xref: /OK3568_Linux_fs/kernel/arch/mips/include/asm/mipsmtregs.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * MT regs definitions, follows on from mipsregs.h
4*4882a593Smuzhiyun  * Copyright (C) 2004 - 2005 MIPS Technologies, Inc.  All rights reserved.
5*4882a593Smuzhiyun  * Elizabeth Clarke et. al.
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  */
8*4882a593Smuzhiyun #ifndef _ASM_MIPSMTREGS_H
9*4882a593Smuzhiyun #define _ASM_MIPSMTREGS_H
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <asm/mipsregs.h>
12*4882a593Smuzhiyun #include <asm/war.h>
13*4882a593Smuzhiyun 
14*4882a593Smuzhiyun #ifndef __ASSEMBLY__
15*4882a593Smuzhiyun 
16*4882a593Smuzhiyun /*
17*4882a593Smuzhiyun  * C macros
18*4882a593Smuzhiyun  */
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun #define read_c0_mvpcontrol()		__read_32bit_c0_register($0, 1)
21*4882a593Smuzhiyun #define write_c0_mvpcontrol(val)	__write_32bit_c0_register($0, 1, val)
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun #define read_c0_mvpconf0()		__read_32bit_c0_register($0, 2)
24*4882a593Smuzhiyun #define read_c0_mvpconf1()		__read_32bit_c0_register($0, 3)
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun #define read_c0_vpecontrol()		__read_32bit_c0_register($1, 1)
27*4882a593Smuzhiyun #define write_c0_vpecontrol(val)	__write_32bit_c0_register($1, 1, val)
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun #define read_c0_vpeconf0()		__read_32bit_c0_register($1, 2)
30*4882a593Smuzhiyun #define write_c0_vpeconf0(val)		__write_32bit_c0_register($1, 2, val)
31*4882a593Smuzhiyun 
32*4882a593Smuzhiyun #define read_c0_vpeconf1()		__read_32bit_c0_register($1, 3)
33*4882a593Smuzhiyun #define write_c0_vpeconf1(val)		__write_32bit_c0_register($1, 3, val)
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun #define read_c0_tcstatus()		__read_32bit_c0_register($2, 1)
36*4882a593Smuzhiyun #define write_c0_tcstatus(val)		__write_32bit_c0_register($2, 1, val)
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun #define read_c0_tcbind()		__read_32bit_c0_register($2, 2)
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun #define write_c0_tchalt(val)		__write_32bit_c0_register($2, 4, val)
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun #define read_c0_tccontext()		__read_32bit_c0_register($2, 5)
43*4882a593Smuzhiyun #define write_c0_tccontext(val)		__write_32bit_c0_register($2, 5, val)
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun #else /* Assembly */
46*4882a593Smuzhiyun /*
47*4882a593Smuzhiyun  * Macros for use in assembly language code
48*4882a593Smuzhiyun  */
49*4882a593Smuzhiyun 
50*4882a593Smuzhiyun #define CP0_MVPCONTROL		$0, 1
51*4882a593Smuzhiyun #define CP0_MVPCONF0		$0, 2
52*4882a593Smuzhiyun #define CP0_MVPCONF1		$0, 3
53*4882a593Smuzhiyun #define CP0_VPECONTROL		$1, 1
54*4882a593Smuzhiyun #define CP0_VPECONF0		$1, 2
55*4882a593Smuzhiyun #define CP0_VPECONF1		$1, 3
56*4882a593Smuzhiyun #define CP0_YQMASK		$1, 4
57*4882a593Smuzhiyun #define CP0_VPESCHEDULE		$1, 5
58*4882a593Smuzhiyun #define CP0_VPESCHEFBK		$1, 6
59*4882a593Smuzhiyun #define CP0_TCSTATUS		$2, 1
60*4882a593Smuzhiyun #define CP0_TCBIND		$2, 2
61*4882a593Smuzhiyun #define CP0_TCRESTART		$2, 3
62*4882a593Smuzhiyun #define CP0_TCHALT		$2, 4
63*4882a593Smuzhiyun #define CP0_TCCONTEXT		$2, 5
64*4882a593Smuzhiyun #define CP0_TCSCHEDULE		$2, 6
65*4882a593Smuzhiyun #define CP0_TCSCHEFBK		$2, 7
66*4882a593Smuzhiyun #define CP0_SRSCONF0		$6, 1
67*4882a593Smuzhiyun #define CP0_SRSCONF1		$6, 2
68*4882a593Smuzhiyun #define CP0_SRSCONF2		$6, 3
69*4882a593Smuzhiyun #define CP0_SRSCONF3		$6, 4
70*4882a593Smuzhiyun #define CP0_SRSCONF4		$6, 5
71*4882a593Smuzhiyun 
72*4882a593Smuzhiyun #endif
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun /* MVPControl fields */
75*4882a593Smuzhiyun #define MVPCONTROL_EVP		(_ULCAST_(1))
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun #define MVPCONTROL_VPC_SHIFT	1
78*4882a593Smuzhiyun #define MVPCONTROL_VPC		(_ULCAST_(1) << MVPCONTROL_VPC_SHIFT)
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun #define MVPCONTROL_STLB_SHIFT	2
81*4882a593Smuzhiyun #define MVPCONTROL_STLB		(_ULCAST_(1) << MVPCONTROL_STLB_SHIFT)
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun /* MVPConf0 fields */
85*4882a593Smuzhiyun #define MVPCONF0_PTC_SHIFT	0
86*4882a593Smuzhiyun #define MVPCONF0_PTC		( _ULCAST_(0xff))
87*4882a593Smuzhiyun #define MVPCONF0_PVPE_SHIFT	10
88*4882a593Smuzhiyun #define MVPCONF0_PVPE		( _ULCAST_(0xf) << MVPCONF0_PVPE_SHIFT)
89*4882a593Smuzhiyun #define MVPCONF0_TCA_SHIFT	15
90*4882a593Smuzhiyun #define MVPCONF0_TCA		( _ULCAST_(1) << MVPCONF0_TCA_SHIFT)
91*4882a593Smuzhiyun #define MVPCONF0_PTLBE_SHIFT	16
92*4882a593Smuzhiyun #define MVPCONF0_PTLBE		(_ULCAST_(0x3ff) << MVPCONF0_PTLBE_SHIFT)
93*4882a593Smuzhiyun #define MVPCONF0_TLBS_SHIFT	29
94*4882a593Smuzhiyun #define MVPCONF0_TLBS		(_ULCAST_(1) << MVPCONF0_TLBS_SHIFT)
95*4882a593Smuzhiyun #define MVPCONF0_M_SHIFT	31
96*4882a593Smuzhiyun #define MVPCONF0_M		(_ULCAST_(0x1) << MVPCONF0_M_SHIFT)
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun /* config3 fields */
100*4882a593Smuzhiyun #define CONFIG3_MT_SHIFT	2
101*4882a593Smuzhiyun #define CONFIG3_MT		(_ULCAST_(1) << CONFIG3_MT_SHIFT)
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun /* VPEControl fields (per VPE) */
105*4882a593Smuzhiyun #define VPECONTROL_TARGTC	(_ULCAST_(0xff))
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun #define VPECONTROL_TE_SHIFT	15
108*4882a593Smuzhiyun #define VPECONTROL_TE		(_ULCAST_(1) << VPECONTROL_TE_SHIFT)
109*4882a593Smuzhiyun #define VPECONTROL_EXCPT_SHIFT	16
110*4882a593Smuzhiyun #define VPECONTROL_EXCPT	(_ULCAST_(0x7) << VPECONTROL_EXCPT_SHIFT)
111*4882a593Smuzhiyun 
112*4882a593Smuzhiyun /* Thread Exception Codes for EXCPT field */
113*4882a593Smuzhiyun #define THREX_TU		0
114*4882a593Smuzhiyun #define THREX_TO		1
115*4882a593Smuzhiyun #define THREX_IYQ		2
116*4882a593Smuzhiyun #define THREX_GSX		3
117*4882a593Smuzhiyun #define THREX_YSCH		4
118*4882a593Smuzhiyun #define THREX_GSSCH		5
119*4882a593Smuzhiyun 
120*4882a593Smuzhiyun #define VPECONTROL_GSI_SHIFT	20
121*4882a593Smuzhiyun #define VPECONTROL_GSI		(_ULCAST_(1) << VPECONTROL_GSI_SHIFT)
122*4882a593Smuzhiyun #define VPECONTROL_YSI_SHIFT	21
123*4882a593Smuzhiyun #define VPECONTROL_YSI		(_ULCAST_(1) << VPECONTROL_YSI_SHIFT)
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun /* VPEConf0 fields (per VPE) */
126*4882a593Smuzhiyun #define VPECONF0_VPA_SHIFT	0
127*4882a593Smuzhiyun #define VPECONF0_VPA		(_ULCAST_(1) << VPECONF0_VPA_SHIFT)
128*4882a593Smuzhiyun #define VPECONF0_MVP_SHIFT	1
129*4882a593Smuzhiyun #define VPECONF0_MVP		(_ULCAST_(1) << VPECONF0_MVP_SHIFT)
130*4882a593Smuzhiyun #define VPECONF0_XTC_SHIFT	21
131*4882a593Smuzhiyun #define VPECONF0_XTC		(_ULCAST_(0xff) << VPECONF0_XTC_SHIFT)
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun /* VPEConf1 fields (per VPE) */
134*4882a593Smuzhiyun #define VPECONF1_NCP1_SHIFT	0
135*4882a593Smuzhiyun #define VPECONF1_NCP1		(_ULCAST_(0xff) << VPECONF1_NCP1_SHIFT)
136*4882a593Smuzhiyun #define VPECONF1_NCP2_SHIFT	10
137*4882a593Smuzhiyun #define VPECONF1_NCP2		(_ULCAST_(0xff) << VPECONF1_NCP2_SHIFT)
138*4882a593Smuzhiyun #define VPECONF1_NCX_SHIFT	20
139*4882a593Smuzhiyun #define VPECONF1_NCX		(_ULCAST_(0xff) << VPECONF1_NCX_SHIFT)
140*4882a593Smuzhiyun 
141*4882a593Smuzhiyun /* TCStatus fields (per TC) */
142*4882a593Smuzhiyun #define TCSTATUS_TASID		(_ULCAST_(0xff))
143*4882a593Smuzhiyun #define TCSTATUS_IXMT_SHIFT	10
144*4882a593Smuzhiyun #define TCSTATUS_IXMT		(_ULCAST_(1) << TCSTATUS_IXMT_SHIFT)
145*4882a593Smuzhiyun #define TCSTATUS_TKSU_SHIFT	11
146*4882a593Smuzhiyun #define TCSTATUS_TKSU		(_ULCAST_(3) << TCSTATUS_TKSU_SHIFT)
147*4882a593Smuzhiyun #define TCSTATUS_A_SHIFT	13
148*4882a593Smuzhiyun #define TCSTATUS_A		(_ULCAST_(1) << TCSTATUS_A_SHIFT)
149*4882a593Smuzhiyun #define TCSTATUS_DA_SHIFT	15
150*4882a593Smuzhiyun #define TCSTATUS_DA		(_ULCAST_(1) << TCSTATUS_DA_SHIFT)
151*4882a593Smuzhiyun #define TCSTATUS_DT_SHIFT	20
152*4882a593Smuzhiyun #define TCSTATUS_DT		(_ULCAST_(1) << TCSTATUS_DT_SHIFT)
153*4882a593Smuzhiyun #define TCSTATUS_TDS_SHIFT	21
154*4882a593Smuzhiyun #define TCSTATUS_TDS		(_ULCAST_(1) << TCSTATUS_TDS_SHIFT)
155*4882a593Smuzhiyun #define TCSTATUS_TSST_SHIFT	22
156*4882a593Smuzhiyun #define TCSTATUS_TSST		(_ULCAST_(1) << TCSTATUS_TSST_SHIFT)
157*4882a593Smuzhiyun #define TCSTATUS_RNST_SHIFT	23
158*4882a593Smuzhiyun #define TCSTATUS_RNST		(_ULCAST_(3) << TCSTATUS_RNST_SHIFT)
159*4882a593Smuzhiyun /* Codes for RNST */
160*4882a593Smuzhiyun #define TC_RUNNING		0
161*4882a593Smuzhiyun #define TC_WAITING		1
162*4882a593Smuzhiyun #define TC_YIELDING		2
163*4882a593Smuzhiyun #define TC_GATED		3
164*4882a593Smuzhiyun 
165*4882a593Smuzhiyun #define TCSTATUS_TMX_SHIFT	27
166*4882a593Smuzhiyun #define TCSTATUS_TMX		(_ULCAST_(1) << TCSTATUS_TMX_SHIFT)
167*4882a593Smuzhiyun /* TCStatus TCU bits can use same definitions/offsets as CU bits in Status */
168*4882a593Smuzhiyun 
169*4882a593Smuzhiyun /* TCBind */
170*4882a593Smuzhiyun #define TCBIND_CURVPE_SHIFT	0
171*4882a593Smuzhiyun #define TCBIND_CURVPE		(_ULCAST_(0xf))
172*4882a593Smuzhiyun 
173*4882a593Smuzhiyun #define TCBIND_CURTC_SHIFT	21
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun #define TCBIND_CURTC		(_ULCAST_(0xff) << TCBIND_CURTC_SHIFT)
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun /* TCHalt */
178*4882a593Smuzhiyun #define TCHALT_H		(_ULCAST_(1))
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun #ifndef __ASSEMBLY__
181*4882a593Smuzhiyun 
core_nvpes(void)182*4882a593Smuzhiyun static inline unsigned core_nvpes(void)
183*4882a593Smuzhiyun {
184*4882a593Smuzhiyun 	unsigned conf0;
185*4882a593Smuzhiyun 
186*4882a593Smuzhiyun 	if (!cpu_has_mipsmt)
187*4882a593Smuzhiyun 		return 1;
188*4882a593Smuzhiyun 
189*4882a593Smuzhiyun 	conf0 = read_c0_mvpconf0();
190*4882a593Smuzhiyun 	return ((conf0 & MVPCONF0_PVPE) >> MVPCONF0_PVPE_SHIFT) + 1;
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun 
dvpe(void)193*4882a593Smuzhiyun static inline unsigned int dvpe(void)
194*4882a593Smuzhiyun {
195*4882a593Smuzhiyun 	int res = 0;
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun 	__asm__ __volatile__(
198*4882a593Smuzhiyun 	"	.set	push						\n"
199*4882a593Smuzhiyun 	"	.set	noreorder					\n"
200*4882a593Smuzhiyun 	"	.set	noat						\n"
201*4882a593Smuzhiyun 	"	.set	mips32r2					\n"
202*4882a593Smuzhiyun 	"	.word	0x41610001		# dvpe $1		\n"
203*4882a593Smuzhiyun 	"	move	%0, $1						\n"
204*4882a593Smuzhiyun 	"	ehb							\n"
205*4882a593Smuzhiyun 	"	.set	pop						\n"
206*4882a593Smuzhiyun 	: "=r" (res));
207*4882a593Smuzhiyun 
208*4882a593Smuzhiyun 	instruction_hazard();
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun 	return res;
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun 
__raw_evpe(void)213*4882a593Smuzhiyun static inline void __raw_evpe(void)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun 	__asm__ __volatile__(
216*4882a593Smuzhiyun 	"	.set	push						\n"
217*4882a593Smuzhiyun 	"	.set	noreorder					\n"
218*4882a593Smuzhiyun 	"	.set	noat						\n"
219*4882a593Smuzhiyun 	"	.set	mips32r2					\n"
220*4882a593Smuzhiyun 	"	.word	0x41600021		# evpe			\n"
221*4882a593Smuzhiyun 	"	ehb							\n"
222*4882a593Smuzhiyun 	"	.set	pop						\n");
223*4882a593Smuzhiyun }
224*4882a593Smuzhiyun 
225*4882a593Smuzhiyun /* Enable virtual processor execution if previous suggested it should be.
226*4882a593Smuzhiyun    EVPE_ENABLE to force */
227*4882a593Smuzhiyun 
228*4882a593Smuzhiyun #define EVPE_ENABLE MVPCONTROL_EVP
229*4882a593Smuzhiyun 
evpe(int previous)230*4882a593Smuzhiyun static inline void evpe(int previous)
231*4882a593Smuzhiyun {
232*4882a593Smuzhiyun 	if ((previous & MVPCONTROL_EVP))
233*4882a593Smuzhiyun 		__raw_evpe();
234*4882a593Smuzhiyun }
235*4882a593Smuzhiyun 
dmt(void)236*4882a593Smuzhiyun static inline unsigned int dmt(void)
237*4882a593Smuzhiyun {
238*4882a593Smuzhiyun 	int res;
239*4882a593Smuzhiyun 
240*4882a593Smuzhiyun 	__asm__ __volatile__(
241*4882a593Smuzhiyun 	"	.set	push						\n"
242*4882a593Smuzhiyun 	"	.set	mips32r2					\n"
243*4882a593Smuzhiyun 	"	.set	noat						\n"
244*4882a593Smuzhiyun 	"	.word	0x41610BC1			# dmt $1	\n"
245*4882a593Smuzhiyun 	"	ehb							\n"
246*4882a593Smuzhiyun 	"	move	%0, $1						\n"
247*4882a593Smuzhiyun 	"	.set	pop						\n"
248*4882a593Smuzhiyun 	: "=r" (res));
249*4882a593Smuzhiyun 
250*4882a593Smuzhiyun 	instruction_hazard();
251*4882a593Smuzhiyun 
252*4882a593Smuzhiyun 	return res;
253*4882a593Smuzhiyun }
254*4882a593Smuzhiyun 
__raw_emt(void)255*4882a593Smuzhiyun static inline void __raw_emt(void)
256*4882a593Smuzhiyun {
257*4882a593Smuzhiyun 	__asm__ __volatile__(
258*4882a593Smuzhiyun 	"	.set	push						\n"
259*4882a593Smuzhiyun 	"	.set	noreorder					\n"
260*4882a593Smuzhiyun 	"	.set	mips32r2					\n"
261*4882a593Smuzhiyun 	"	.word	0x41600be1			# emt		\n"
262*4882a593Smuzhiyun 	"	ehb							\n"
263*4882a593Smuzhiyun 	"	.set	pop");
264*4882a593Smuzhiyun }
265*4882a593Smuzhiyun 
266*4882a593Smuzhiyun /* enable multi-threaded execution if previous suggested it should be.
267*4882a593Smuzhiyun    EMT_ENABLE to force */
268*4882a593Smuzhiyun 
269*4882a593Smuzhiyun #define EMT_ENABLE VPECONTROL_TE
270*4882a593Smuzhiyun 
emt(int previous)271*4882a593Smuzhiyun static inline void emt(int previous)
272*4882a593Smuzhiyun {
273*4882a593Smuzhiyun 	if ((previous & EMT_ENABLE))
274*4882a593Smuzhiyun 		__raw_emt();
275*4882a593Smuzhiyun }
276*4882a593Smuzhiyun 
ehb(void)277*4882a593Smuzhiyun static inline void ehb(void)
278*4882a593Smuzhiyun {
279*4882a593Smuzhiyun 	__asm__ __volatile__(
280*4882a593Smuzhiyun 	"	.set	push					\n"
281*4882a593Smuzhiyun 	"	.set	mips32r2				\n"
282*4882a593Smuzhiyun 	"	ehb						\n"
283*4882a593Smuzhiyun 	"	.set	pop					\n");
284*4882a593Smuzhiyun }
285*4882a593Smuzhiyun 
286*4882a593Smuzhiyun #define mftc0(rt,sel)							\
287*4882a593Smuzhiyun ({									\
288*4882a593Smuzhiyun 	 unsigned long	__res;						\
289*4882a593Smuzhiyun 									\
290*4882a593Smuzhiyun 	__asm__ __volatile__(						\
291*4882a593Smuzhiyun 	"	.set	push					\n"	\
292*4882a593Smuzhiyun 	"	.set	mips32r2				\n"	\
293*4882a593Smuzhiyun 	"	.set	noat					\n"	\
294*4882a593Smuzhiyun 	"	# mftc0 $1, $" #rt ", " #sel "			\n"	\
295*4882a593Smuzhiyun 	"	.word	0x41000800 | (" #rt " << 16) | " #sel " \n"	\
296*4882a593Smuzhiyun 	"	move	%0, $1					\n"	\
297*4882a593Smuzhiyun 	"	.set	pop					\n"	\
298*4882a593Smuzhiyun 	: "=r" (__res));						\
299*4882a593Smuzhiyun 									\
300*4882a593Smuzhiyun 	__res;								\
301*4882a593Smuzhiyun })
302*4882a593Smuzhiyun 
303*4882a593Smuzhiyun #define mftgpr(rt)							\
304*4882a593Smuzhiyun ({									\
305*4882a593Smuzhiyun 	unsigned long __res;						\
306*4882a593Smuzhiyun 									\
307*4882a593Smuzhiyun 	__asm__ __volatile__(						\
308*4882a593Smuzhiyun 	"	.set	push					\n"	\
309*4882a593Smuzhiyun 	"	.set	noat					\n"	\
310*4882a593Smuzhiyun 	"	.set	mips32r2				\n"	\
311*4882a593Smuzhiyun 	"	# mftgpr $1," #rt "				\n"	\
312*4882a593Smuzhiyun 	"	.word	0x41000820 | (" #rt " << 16)		\n"	\
313*4882a593Smuzhiyun 	"	move	%0, $1					\n"	\
314*4882a593Smuzhiyun 	"	.set	pop					\n"	\
315*4882a593Smuzhiyun 	: "=r" (__res));						\
316*4882a593Smuzhiyun 									\
317*4882a593Smuzhiyun 	__res;								\
318*4882a593Smuzhiyun })
319*4882a593Smuzhiyun 
320*4882a593Smuzhiyun #define mftr(rt, u, sel)							\
321*4882a593Smuzhiyun ({									\
322*4882a593Smuzhiyun 	unsigned long __res;						\
323*4882a593Smuzhiyun 									\
324*4882a593Smuzhiyun 	__asm__ __volatile__(						\
325*4882a593Smuzhiyun 	"	mftr	%0, " #rt ", " #u ", " #sel "		\n"	\
326*4882a593Smuzhiyun 	: "=r" (__res));						\
327*4882a593Smuzhiyun 									\
328*4882a593Smuzhiyun 	__res;								\
329*4882a593Smuzhiyun })
330*4882a593Smuzhiyun 
331*4882a593Smuzhiyun #define mttgpr(rd,v)							\
332*4882a593Smuzhiyun do {									\
333*4882a593Smuzhiyun 	__asm__ __volatile__(						\
334*4882a593Smuzhiyun 	"	.set	push					\n"	\
335*4882a593Smuzhiyun 	"	.set	mips32r2				\n"	\
336*4882a593Smuzhiyun 	"	.set	noat					\n"	\
337*4882a593Smuzhiyun 	"	move	$1, %0					\n"	\
338*4882a593Smuzhiyun 	"	# mttgpr $1, " #rd "				\n"	\
339*4882a593Smuzhiyun 	"	.word	0x41810020 | (" #rd " << 11)		\n"	\
340*4882a593Smuzhiyun 	"	.set	pop					\n"	\
341*4882a593Smuzhiyun 	: : "r" (v));							\
342*4882a593Smuzhiyun } while (0)
343*4882a593Smuzhiyun 
344*4882a593Smuzhiyun #define mttc0(rd, sel, v)							\
345*4882a593Smuzhiyun ({									\
346*4882a593Smuzhiyun 	__asm__ __volatile__(						\
347*4882a593Smuzhiyun 	"	.set	push					\n"	\
348*4882a593Smuzhiyun 	"	.set	mips32r2				\n"	\
349*4882a593Smuzhiyun 	"	.set	noat					\n"	\
350*4882a593Smuzhiyun 	"	move	$1, %0					\n"	\
351*4882a593Smuzhiyun 	"	# mttc0 %0," #rd ", " #sel "			\n"	\
352*4882a593Smuzhiyun 	"	.word	0x41810000 | (" #rd " << 11) | " #sel " \n"	\
353*4882a593Smuzhiyun 	"	.set	pop					\n"	\
354*4882a593Smuzhiyun 	:								\
355*4882a593Smuzhiyun 	: "r" (v));							\
356*4882a593Smuzhiyun })
357*4882a593Smuzhiyun 
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun #define mttr(rd, u, sel, v)						\
360*4882a593Smuzhiyun ({									\
361*4882a593Smuzhiyun 	__asm__ __volatile__(						\
362*4882a593Smuzhiyun 	"mttr	%0," #rd ", " #u ", " #sel				\
363*4882a593Smuzhiyun 	: : "r" (v));							\
364*4882a593Smuzhiyun })
365*4882a593Smuzhiyun 
366*4882a593Smuzhiyun 
367*4882a593Smuzhiyun #define settc(tc)							\
368*4882a593Smuzhiyun do {									\
369*4882a593Smuzhiyun 	write_c0_vpecontrol((read_c0_vpecontrol()&~VPECONTROL_TARGTC) | (tc)); \
370*4882a593Smuzhiyun 	ehb();								\
371*4882a593Smuzhiyun } while (0)
372*4882a593Smuzhiyun 
373*4882a593Smuzhiyun 
374*4882a593Smuzhiyun /* you *must* set the target tc (settc) before trying to use these */
375*4882a593Smuzhiyun #define read_vpe_c0_vpecontrol()	mftc0(1, 1)
376*4882a593Smuzhiyun #define write_vpe_c0_vpecontrol(val)	mttc0(1, 1, val)
377*4882a593Smuzhiyun #define read_vpe_c0_vpeconf0()		mftc0(1, 2)
378*4882a593Smuzhiyun #define write_vpe_c0_vpeconf0(val)	mttc0(1, 2, val)
379*4882a593Smuzhiyun #define read_vpe_c0_vpeconf1()		mftc0(1, 3)
380*4882a593Smuzhiyun #define write_vpe_c0_vpeconf1(val)	mttc0(1, 3, val)
381*4882a593Smuzhiyun #define read_vpe_c0_count()		mftc0(9, 0)
382*4882a593Smuzhiyun #define write_vpe_c0_count(val)		mttc0(9, 0, val)
383*4882a593Smuzhiyun #define read_vpe_c0_status()		mftc0(12, 0)
384*4882a593Smuzhiyun #define write_vpe_c0_status(val)	mttc0(12, 0, val)
385*4882a593Smuzhiyun #define read_vpe_c0_cause()		mftc0(13, 0)
386*4882a593Smuzhiyun #define write_vpe_c0_cause(val)		mttc0(13, 0, val)
387*4882a593Smuzhiyun #define read_vpe_c0_config()		mftc0(16, 0)
388*4882a593Smuzhiyun #define write_vpe_c0_config(val)	mttc0(16, 0, val)
389*4882a593Smuzhiyun #define read_vpe_c0_config1()		mftc0(16, 1)
390*4882a593Smuzhiyun #define write_vpe_c0_config1(val)	mttc0(16, 1, val)
391*4882a593Smuzhiyun #define read_vpe_c0_config7()		mftc0(16, 7)
392*4882a593Smuzhiyun #define write_vpe_c0_config7(val)	mttc0(16, 7, val)
393*4882a593Smuzhiyun #define read_vpe_c0_ebase()		mftc0(15, 1)
394*4882a593Smuzhiyun #define write_vpe_c0_ebase(val)		mttc0(15, 1, val)
395*4882a593Smuzhiyun #define write_vpe_c0_compare(val)	mttc0(11, 0, val)
396*4882a593Smuzhiyun #define read_vpe_c0_badvaddr()		mftc0(8, 0)
397*4882a593Smuzhiyun #define read_vpe_c0_epc()		mftc0(14, 0)
398*4882a593Smuzhiyun #define write_vpe_c0_epc(val)		mttc0(14, 0, val)
399*4882a593Smuzhiyun 
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun /* TC */
402*4882a593Smuzhiyun #define read_tc_c0_tcstatus()		mftc0(2, 1)
403*4882a593Smuzhiyun #define write_tc_c0_tcstatus(val)	mttc0(2, 1, val)
404*4882a593Smuzhiyun #define read_tc_c0_tcbind()		mftc0(2, 2)
405*4882a593Smuzhiyun #define write_tc_c0_tcbind(val)		mttc0(2, 2, val)
406*4882a593Smuzhiyun #define read_tc_c0_tcrestart()		mftc0(2, 3)
407*4882a593Smuzhiyun #define write_tc_c0_tcrestart(val)	mttc0(2, 3, val)
408*4882a593Smuzhiyun #define read_tc_c0_tchalt()		mftc0(2, 4)
409*4882a593Smuzhiyun #define write_tc_c0_tchalt(val)		mttc0(2, 4, val)
410*4882a593Smuzhiyun #define read_tc_c0_tccontext()		mftc0(2, 5)
411*4882a593Smuzhiyun #define write_tc_c0_tccontext(val)	mttc0(2, 5, val)
412*4882a593Smuzhiyun 
413*4882a593Smuzhiyun /* GPR */
414*4882a593Smuzhiyun #define read_tc_gpr_sp()		mftgpr(29)
415*4882a593Smuzhiyun #define write_tc_gpr_sp(val)		mttgpr(29, val)
416*4882a593Smuzhiyun #define read_tc_gpr_gp()		mftgpr(28)
417*4882a593Smuzhiyun #define write_tc_gpr_gp(val)		mttgpr(28, val)
418*4882a593Smuzhiyun 
419*4882a593Smuzhiyun __BUILD_SET_C0(mvpcontrol)
420*4882a593Smuzhiyun 
421*4882a593Smuzhiyun #endif /* Not __ASSEMBLY__ */
422*4882a593Smuzhiyun 
423*4882a593Smuzhiyun #endif
424