xref: /OK3568_Linux_fs/kernel/arch/sparc/mm/ultra.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun * ultra.S: Don't expand these all over the place...
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 1997, 2000, 2008 David S. Miller (davem@davemloft.net)
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun#include <linux/pgtable.h>
9*4882a593Smuzhiyun#include <asm/asi.h>
10*4882a593Smuzhiyun#include <asm/page.h>
11*4882a593Smuzhiyun#include <asm/spitfire.h>
12*4882a593Smuzhiyun#include <asm/mmu_context.h>
13*4882a593Smuzhiyun#include <asm/mmu.h>
14*4882a593Smuzhiyun#include <asm/pil.h>
15*4882a593Smuzhiyun#include <asm/head.h>
16*4882a593Smuzhiyun#include <asm/thread_info.h>
17*4882a593Smuzhiyun#include <asm/cacheflush.h>
18*4882a593Smuzhiyun#include <asm/hypervisor.h>
19*4882a593Smuzhiyun#include <asm/cpudata.h>
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun	/* Basically, most of the Spitfire vs. Cheetah madness
22*4882a593Smuzhiyun	 * has to do with the fact that Cheetah does not support
23*4882a593Smuzhiyun	 * IMMU flushes out of the secondary context.  Someone needs
24*4882a593Smuzhiyun	 * to throw a south lake birthday party for the folks
25*4882a593Smuzhiyun	 * in Microelectronics who refused to fix this shit.
26*4882a593Smuzhiyun	 */
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun	/* This file is meant to be read efficiently by the CPU, not humans.
29*4882a593Smuzhiyun	 * Staraj sie tego nikomu nie pierdolnac...
30*4882a593Smuzhiyun	 */
31*4882a593Smuzhiyun	.text
32*4882a593Smuzhiyun	.align		32
33*4882a593Smuzhiyun	.globl		__flush_tlb_mm
34*4882a593Smuzhiyun__flush_tlb_mm:		/* 19 insns */
35*4882a593Smuzhiyun	/* %o0=(ctx & TAG_CONTEXT_BITS), %o1=SECONDARY_CONTEXT */
36*4882a593Smuzhiyun	ldxa		[%o1] ASI_DMMU, %g2
37*4882a593Smuzhiyun	cmp		%g2, %o0
38*4882a593Smuzhiyun	bne,pn		%icc, __spitfire_flush_tlb_mm_slow
39*4882a593Smuzhiyun	 mov		0x50, %g3
40*4882a593Smuzhiyun	stxa		%g0, [%g3] ASI_DMMU_DEMAP
41*4882a593Smuzhiyun	stxa		%g0, [%g3] ASI_IMMU_DEMAP
42*4882a593Smuzhiyun	sethi		%hi(KERNBASE), %g3
43*4882a593Smuzhiyun	flush		%g3
44*4882a593Smuzhiyun	retl
45*4882a593Smuzhiyun	 nop
46*4882a593Smuzhiyun	nop
47*4882a593Smuzhiyun	nop
48*4882a593Smuzhiyun	nop
49*4882a593Smuzhiyun	nop
50*4882a593Smuzhiyun	nop
51*4882a593Smuzhiyun	nop
52*4882a593Smuzhiyun	nop
53*4882a593Smuzhiyun	nop
54*4882a593Smuzhiyun	nop
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun	.align		32
57*4882a593Smuzhiyun	.globl		__flush_tlb_page
58*4882a593Smuzhiyun__flush_tlb_page:	/* 22 insns */
59*4882a593Smuzhiyun	/* %o0 = context, %o1 = vaddr */
60*4882a593Smuzhiyun	rdpr		%pstate, %g7
61*4882a593Smuzhiyun	andn		%g7, PSTATE_IE, %g2
62*4882a593Smuzhiyun	wrpr		%g2, %pstate
63*4882a593Smuzhiyun	mov		SECONDARY_CONTEXT, %o4
64*4882a593Smuzhiyun	ldxa		[%o4] ASI_DMMU, %g2
65*4882a593Smuzhiyun	stxa		%o0, [%o4] ASI_DMMU
66*4882a593Smuzhiyun	andcc		%o1, 1, %g0
67*4882a593Smuzhiyun	andn		%o1, 1, %o3
68*4882a593Smuzhiyun	be,pn		%icc, 1f
69*4882a593Smuzhiyun	 or		%o3, 0x10, %o3
70*4882a593Smuzhiyun	stxa		%g0, [%o3] ASI_IMMU_DEMAP
71*4882a593Smuzhiyun1:	stxa		%g0, [%o3] ASI_DMMU_DEMAP
72*4882a593Smuzhiyun	membar		#Sync
73*4882a593Smuzhiyun	stxa		%g2, [%o4] ASI_DMMU
74*4882a593Smuzhiyun	sethi		%hi(KERNBASE), %o4
75*4882a593Smuzhiyun	flush		%o4
76*4882a593Smuzhiyun	retl
77*4882a593Smuzhiyun	 wrpr		%g7, 0x0, %pstate
78*4882a593Smuzhiyun	nop
79*4882a593Smuzhiyun	nop
80*4882a593Smuzhiyun	nop
81*4882a593Smuzhiyun	nop
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun	.align		32
84*4882a593Smuzhiyun	.globl		__flush_tlb_pending
85*4882a593Smuzhiyun__flush_tlb_pending:	/* 27 insns */
86*4882a593Smuzhiyun	/* %o0 = context, %o1 = nr, %o2 = vaddrs[] */
87*4882a593Smuzhiyun	rdpr		%pstate, %g7
88*4882a593Smuzhiyun	sllx		%o1, 3, %o1
89*4882a593Smuzhiyun	andn		%g7, PSTATE_IE, %g2
90*4882a593Smuzhiyun	wrpr		%g2, %pstate
91*4882a593Smuzhiyun	mov		SECONDARY_CONTEXT, %o4
92*4882a593Smuzhiyun	ldxa		[%o4] ASI_DMMU, %g2
93*4882a593Smuzhiyun	stxa		%o0, [%o4] ASI_DMMU
94*4882a593Smuzhiyun1:	sub		%o1, (1 << 3), %o1
95*4882a593Smuzhiyun	ldx		[%o2 + %o1], %o3
96*4882a593Smuzhiyun	andcc		%o3, 1, %g0
97*4882a593Smuzhiyun	andn		%o3, 1, %o3
98*4882a593Smuzhiyun	be,pn		%icc, 2f
99*4882a593Smuzhiyun	 or		%o3, 0x10, %o3
100*4882a593Smuzhiyun	stxa		%g0, [%o3] ASI_IMMU_DEMAP
101*4882a593Smuzhiyun2:	stxa		%g0, [%o3] ASI_DMMU_DEMAP
102*4882a593Smuzhiyun	membar		#Sync
103*4882a593Smuzhiyun	brnz,pt		%o1, 1b
104*4882a593Smuzhiyun	 nop
105*4882a593Smuzhiyun	stxa		%g2, [%o4] ASI_DMMU
106*4882a593Smuzhiyun	sethi		%hi(KERNBASE), %o4
107*4882a593Smuzhiyun	flush		%o4
108*4882a593Smuzhiyun	retl
109*4882a593Smuzhiyun	 wrpr		%g7, 0x0, %pstate
110*4882a593Smuzhiyun	nop
111*4882a593Smuzhiyun	nop
112*4882a593Smuzhiyun	nop
113*4882a593Smuzhiyun	nop
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun	.align		32
116*4882a593Smuzhiyun	.globl		__flush_tlb_kernel_range
117*4882a593Smuzhiyun__flush_tlb_kernel_range:	/* 31 insns */
118*4882a593Smuzhiyun	/* %o0=start, %o1=end */
119*4882a593Smuzhiyun	cmp		%o0, %o1
120*4882a593Smuzhiyun	be,pn		%xcc, 2f
121*4882a593Smuzhiyun	 sub		%o1, %o0, %o3
122*4882a593Smuzhiyun	srlx		%o3, 18, %o4
123*4882a593Smuzhiyun	brnz,pn		%o4, __spitfire_flush_tlb_kernel_range_slow
124*4882a593Smuzhiyun	 sethi		%hi(PAGE_SIZE), %o4
125*4882a593Smuzhiyun	sub		%o3, %o4, %o3
126*4882a593Smuzhiyun	or		%o0, 0x20, %o0		! Nucleus
127*4882a593Smuzhiyun1:	stxa		%g0, [%o0 + %o3] ASI_DMMU_DEMAP
128*4882a593Smuzhiyun	stxa		%g0, [%o0 + %o3] ASI_IMMU_DEMAP
129*4882a593Smuzhiyun	membar		#Sync
130*4882a593Smuzhiyun	brnz,pt		%o3, 1b
131*4882a593Smuzhiyun	 sub		%o3, %o4, %o3
132*4882a593Smuzhiyun2:	sethi		%hi(KERNBASE), %o3
133*4882a593Smuzhiyun	flush		%o3
134*4882a593Smuzhiyun	retl
135*4882a593Smuzhiyun	 nop
136*4882a593Smuzhiyun	nop
137*4882a593Smuzhiyun	nop
138*4882a593Smuzhiyun	nop
139*4882a593Smuzhiyun	nop
140*4882a593Smuzhiyun	nop
141*4882a593Smuzhiyun	nop
142*4882a593Smuzhiyun	nop
143*4882a593Smuzhiyun	nop
144*4882a593Smuzhiyun	nop
145*4882a593Smuzhiyun	nop
146*4882a593Smuzhiyun	nop
147*4882a593Smuzhiyun	nop
148*4882a593Smuzhiyun	nop
149*4882a593Smuzhiyun	nop
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun__spitfire_flush_tlb_kernel_range_slow:
152*4882a593Smuzhiyun	mov		63 * 8, %o4
153*4882a593Smuzhiyun1:	ldxa		[%o4] ASI_ITLB_DATA_ACCESS, %o3
154*4882a593Smuzhiyun	andcc		%o3, 0x40, %g0			/* _PAGE_L_4U */
155*4882a593Smuzhiyun	bne,pn		%xcc, 2f
156*4882a593Smuzhiyun	 mov		TLB_TAG_ACCESS, %o3
157*4882a593Smuzhiyun	stxa		%g0, [%o3] ASI_IMMU
158*4882a593Smuzhiyun	stxa		%g0, [%o4] ASI_ITLB_DATA_ACCESS
159*4882a593Smuzhiyun	membar		#Sync
160*4882a593Smuzhiyun2:	ldxa		[%o4] ASI_DTLB_DATA_ACCESS, %o3
161*4882a593Smuzhiyun	andcc		%o3, 0x40, %g0
162*4882a593Smuzhiyun	bne,pn		%xcc, 2f
163*4882a593Smuzhiyun	 mov		TLB_TAG_ACCESS, %o3
164*4882a593Smuzhiyun	stxa		%g0, [%o3] ASI_DMMU
165*4882a593Smuzhiyun	stxa		%g0, [%o4] ASI_DTLB_DATA_ACCESS
166*4882a593Smuzhiyun	membar		#Sync
167*4882a593Smuzhiyun2:	sub		%o4, 8, %o4
168*4882a593Smuzhiyun	brgez,pt	%o4, 1b
169*4882a593Smuzhiyun	 nop
170*4882a593Smuzhiyun	retl
171*4882a593Smuzhiyun	 nop
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun__spitfire_flush_tlb_mm_slow:
174*4882a593Smuzhiyun	rdpr		%pstate, %g1
175*4882a593Smuzhiyun	wrpr		%g1, PSTATE_IE, %pstate
176*4882a593Smuzhiyun	stxa		%o0, [%o1] ASI_DMMU
177*4882a593Smuzhiyun	stxa		%g0, [%g3] ASI_DMMU_DEMAP
178*4882a593Smuzhiyun	stxa		%g0, [%g3] ASI_IMMU_DEMAP
179*4882a593Smuzhiyun	flush		%g6
180*4882a593Smuzhiyun	stxa		%g2, [%o1] ASI_DMMU
181*4882a593Smuzhiyun	sethi		%hi(KERNBASE), %o1
182*4882a593Smuzhiyun	flush		%o1
183*4882a593Smuzhiyun	retl
184*4882a593Smuzhiyun	 wrpr		%g1, 0, %pstate
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun/*
187*4882a593Smuzhiyun * The following code flushes one page_size worth.
188*4882a593Smuzhiyun */
189*4882a593Smuzhiyun	.section .kprobes.text, "ax"
190*4882a593Smuzhiyun	.align		32
191*4882a593Smuzhiyun	.globl		__flush_icache_page
192*4882a593Smuzhiyun__flush_icache_page:	/* %o0 = phys_page */
193*4882a593Smuzhiyun	srlx		%o0, PAGE_SHIFT, %o0
194*4882a593Smuzhiyun	sethi		%hi(PAGE_OFFSET), %g1
195*4882a593Smuzhiyun	sllx		%o0, PAGE_SHIFT, %o0
196*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE), %g2
197*4882a593Smuzhiyun	ldx		[%g1 + %lo(PAGE_OFFSET)], %g1
198*4882a593Smuzhiyun	add		%o0, %g1, %o0
199*4882a593Smuzhiyun1:	subcc		%g2, 32, %g2
200*4882a593Smuzhiyun	bne,pt		%icc, 1b
201*4882a593Smuzhiyun	 flush		%o0 + %g2
202*4882a593Smuzhiyun	retl
203*4882a593Smuzhiyun	 nop
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun#ifdef DCACHE_ALIASING_POSSIBLE
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun#if (PAGE_SHIFT != 13)
208*4882a593Smuzhiyun#error only page shift of 13 is supported by dcache flush
209*4882a593Smuzhiyun#endif
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun#define DTAG_MASK 0x3
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun	/* This routine is Spitfire specific so the hardcoded
214*4882a593Smuzhiyun	 * D-cache size and line-size are OK.
215*4882a593Smuzhiyun	 */
216*4882a593Smuzhiyun	.align		64
217*4882a593Smuzhiyun	.globl		__flush_dcache_page
218*4882a593Smuzhiyun__flush_dcache_page:	/* %o0=kaddr, %o1=flush_icache */
219*4882a593Smuzhiyun	sethi		%hi(PAGE_OFFSET), %g1
220*4882a593Smuzhiyun	ldx		[%g1 + %lo(PAGE_OFFSET)], %g1
221*4882a593Smuzhiyun	sub		%o0, %g1, %o0			! physical address
222*4882a593Smuzhiyun	srlx		%o0, 11, %o0			! make D-cache TAG
223*4882a593Smuzhiyun	sethi		%hi(1 << 14), %o2		! D-cache size
224*4882a593Smuzhiyun	sub		%o2, (1 << 5), %o2		! D-cache line size
225*4882a593Smuzhiyun1:	ldxa		[%o2] ASI_DCACHE_TAG, %o3	! load D-cache TAG
226*4882a593Smuzhiyun	andcc		%o3, DTAG_MASK, %g0		! Valid?
227*4882a593Smuzhiyun	be,pn		%xcc, 2f			! Nope, branch
228*4882a593Smuzhiyun	 andn		%o3, DTAG_MASK, %o3		! Clear valid bits
229*4882a593Smuzhiyun	cmp		%o3, %o0			! TAG match?
230*4882a593Smuzhiyun	bne,pt		%xcc, 2f			! Nope, branch
231*4882a593Smuzhiyun	 nop
232*4882a593Smuzhiyun	stxa		%g0, [%o2] ASI_DCACHE_TAG	! Invalidate TAG
233*4882a593Smuzhiyun	membar		#Sync
234*4882a593Smuzhiyun2:	brnz,pt		%o2, 1b
235*4882a593Smuzhiyun	 sub		%o2, (1 << 5), %o2		! D-cache line size
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun	/* The I-cache does not snoop local stores so we
238*4882a593Smuzhiyun	 * better flush that too when necessary.
239*4882a593Smuzhiyun	 */
240*4882a593Smuzhiyun	brnz,pt		%o1, __flush_icache_page
241*4882a593Smuzhiyun	 sllx		%o0, 11, %o0
242*4882a593Smuzhiyun	retl
243*4882a593Smuzhiyun	 nop
244*4882a593Smuzhiyun
245*4882a593Smuzhiyun#endif /* DCACHE_ALIASING_POSSIBLE */
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun	.previous
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun	/* Cheetah specific versions, patched at boot time. */
250*4882a593Smuzhiyun__cheetah_flush_tlb_mm: /* 19 insns */
251*4882a593Smuzhiyun	rdpr		%pstate, %g7
252*4882a593Smuzhiyun	andn		%g7, PSTATE_IE, %g2
253*4882a593Smuzhiyun	wrpr		%g2, 0x0, %pstate
254*4882a593Smuzhiyun	wrpr		%g0, 1, %tl
255*4882a593Smuzhiyun	mov		PRIMARY_CONTEXT, %o2
256*4882a593Smuzhiyun	mov		0x40, %g3
257*4882a593Smuzhiyun	ldxa		[%o2] ASI_DMMU, %g2
258*4882a593Smuzhiyun	srlx		%g2, CTX_PGSZ1_NUC_SHIFT, %o1
259*4882a593Smuzhiyun	sllx		%o1, CTX_PGSZ1_NUC_SHIFT, %o1
260*4882a593Smuzhiyun	or		%o0, %o1, %o0	/* Preserve nucleus page size fields */
261*4882a593Smuzhiyun	stxa		%o0, [%o2] ASI_DMMU
262*4882a593Smuzhiyun	stxa		%g0, [%g3] ASI_DMMU_DEMAP
263*4882a593Smuzhiyun	stxa		%g0, [%g3] ASI_IMMU_DEMAP
264*4882a593Smuzhiyun	stxa		%g2, [%o2] ASI_DMMU
265*4882a593Smuzhiyun	sethi		%hi(KERNBASE), %o2
266*4882a593Smuzhiyun	flush		%o2
267*4882a593Smuzhiyun	wrpr		%g0, 0, %tl
268*4882a593Smuzhiyun	retl
269*4882a593Smuzhiyun	 wrpr		%g7, 0x0, %pstate
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun__cheetah_flush_tlb_page:	/* 22 insns */
272*4882a593Smuzhiyun	/* %o0 = context, %o1 = vaddr */
273*4882a593Smuzhiyun	rdpr		%pstate, %g7
274*4882a593Smuzhiyun	andn		%g7, PSTATE_IE, %g2
275*4882a593Smuzhiyun	wrpr		%g2, 0x0, %pstate
276*4882a593Smuzhiyun	wrpr		%g0, 1, %tl
277*4882a593Smuzhiyun	mov		PRIMARY_CONTEXT, %o4
278*4882a593Smuzhiyun	ldxa		[%o4] ASI_DMMU, %g2
279*4882a593Smuzhiyun	srlx		%g2, CTX_PGSZ1_NUC_SHIFT, %o3
280*4882a593Smuzhiyun	sllx		%o3, CTX_PGSZ1_NUC_SHIFT, %o3
281*4882a593Smuzhiyun	or		%o0, %o3, %o0	/* Preserve nucleus page size fields */
282*4882a593Smuzhiyun	stxa		%o0, [%o4] ASI_DMMU
283*4882a593Smuzhiyun	andcc		%o1, 1, %g0
284*4882a593Smuzhiyun	be,pn		%icc, 1f
285*4882a593Smuzhiyun	 andn		%o1, 1, %o3
286*4882a593Smuzhiyun	stxa		%g0, [%o3] ASI_IMMU_DEMAP
287*4882a593Smuzhiyun1:	stxa		%g0, [%o3] ASI_DMMU_DEMAP
288*4882a593Smuzhiyun	membar		#Sync
289*4882a593Smuzhiyun	stxa		%g2, [%o4] ASI_DMMU
290*4882a593Smuzhiyun	sethi		%hi(KERNBASE), %o4
291*4882a593Smuzhiyun	flush		%o4
292*4882a593Smuzhiyun	wrpr		%g0, 0, %tl
293*4882a593Smuzhiyun	retl
294*4882a593Smuzhiyun	 wrpr		%g7, 0x0, %pstate
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun__cheetah_flush_tlb_pending:	/* 27 insns */
297*4882a593Smuzhiyun	/* %o0 = context, %o1 = nr, %o2 = vaddrs[] */
298*4882a593Smuzhiyun	rdpr		%pstate, %g7
299*4882a593Smuzhiyun	sllx		%o1, 3, %o1
300*4882a593Smuzhiyun	andn		%g7, PSTATE_IE, %g2
301*4882a593Smuzhiyun	wrpr		%g2, 0x0, %pstate
302*4882a593Smuzhiyun	wrpr		%g0, 1, %tl
303*4882a593Smuzhiyun	mov		PRIMARY_CONTEXT, %o4
304*4882a593Smuzhiyun	ldxa		[%o4] ASI_DMMU, %g2
305*4882a593Smuzhiyun	srlx		%g2, CTX_PGSZ1_NUC_SHIFT, %o3
306*4882a593Smuzhiyun	sllx		%o3, CTX_PGSZ1_NUC_SHIFT, %o3
307*4882a593Smuzhiyun	or		%o0, %o3, %o0	/* Preserve nucleus page size fields */
308*4882a593Smuzhiyun	stxa		%o0, [%o4] ASI_DMMU
309*4882a593Smuzhiyun1:	sub		%o1, (1 << 3), %o1
310*4882a593Smuzhiyun	ldx		[%o2 + %o1], %o3
311*4882a593Smuzhiyun	andcc		%o3, 1, %g0
312*4882a593Smuzhiyun	be,pn		%icc, 2f
313*4882a593Smuzhiyun	 andn		%o3, 1, %o3
314*4882a593Smuzhiyun	stxa		%g0, [%o3] ASI_IMMU_DEMAP
315*4882a593Smuzhiyun2:	stxa		%g0, [%o3] ASI_DMMU_DEMAP
316*4882a593Smuzhiyun	membar		#Sync
317*4882a593Smuzhiyun	brnz,pt		%o1, 1b
318*4882a593Smuzhiyun	 nop
319*4882a593Smuzhiyun	stxa		%g2, [%o4] ASI_DMMU
320*4882a593Smuzhiyun	sethi		%hi(KERNBASE), %o4
321*4882a593Smuzhiyun	flush		%o4
322*4882a593Smuzhiyun	wrpr		%g0, 0, %tl
323*4882a593Smuzhiyun	retl
324*4882a593Smuzhiyun	 wrpr		%g7, 0x0, %pstate
325*4882a593Smuzhiyun
326*4882a593Smuzhiyun__cheetah_flush_tlb_kernel_range:	/* 31 insns */
327*4882a593Smuzhiyun	/* %o0=start, %o1=end */
328*4882a593Smuzhiyun	cmp		%o0, %o1
329*4882a593Smuzhiyun	be,pn		%xcc, 2f
330*4882a593Smuzhiyun	 sub		%o1, %o0, %o3
331*4882a593Smuzhiyun	srlx		%o3, 18, %o4
332*4882a593Smuzhiyun	brnz,pn		%o4, 3f
333*4882a593Smuzhiyun	 sethi		%hi(PAGE_SIZE), %o4
334*4882a593Smuzhiyun	sub		%o3, %o4, %o3
335*4882a593Smuzhiyun	or		%o0, 0x20, %o0		! Nucleus
336*4882a593Smuzhiyun1:	stxa		%g0, [%o0 + %o3] ASI_DMMU_DEMAP
337*4882a593Smuzhiyun	stxa		%g0, [%o0 + %o3] ASI_IMMU_DEMAP
338*4882a593Smuzhiyun	membar		#Sync
339*4882a593Smuzhiyun	brnz,pt		%o3, 1b
340*4882a593Smuzhiyun	 sub		%o3, %o4, %o3
341*4882a593Smuzhiyun2:	sethi		%hi(KERNBASE), %o3
342*4882a593Smuzhiyun	flush		%o3
343*4882a593Smuzhiyun	retl
344*4882a593Smuzhiyun	 nop
345*4882a593Smuzhiyun3:	mov		0x80, %o4
346*4882a593Smuzhiyun	stxa		%g0, [%o4] ASI_DMMU_DEMAP
347*4882a593Smuzhiyun	membar		#Sync
348*4882a593Smuzhiyun	stxa		%g0, [%o4] ASI_IMMU_DEMAP
349*4882a593Smuzhiyun	membar		#Sync
350*4882a593Smuzhiyun	retl
351*4882a593Smuzhiyun	 nop
352*4882a593Smuzhiyun	nop
353*4882a593Smuzhiyun	nop
354*4882a593Smuzhiyun	nop
355*4882a593Smuzhiyun	nop
356*4882a593Smuzhiyun	nop
357*4882a593Smuzhiyun	nop
358*4882a593Smuzhiyun	nop
359*4882a593Smuzhiyun
360*4882a593Smuzhiyun#ifdef DCACHE_ALIASING_POSSIBLE
361*4882a593Smuzhiyun__cheetah_flush_dcache_page: /* 11 insns */
362*4882a593Smuzhiyun	sethi		%hi(PAGE_OFFSET), %g1
363*4882a593Smuzhiyun	ldx		[%g1 + %lo(PAGE_OFFSET)], %g1
364*4882a593Smuzhiyun	sub		%o0, %g1, %o0
365*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE), %o4
366*4882a593Smuzhiyun1:	subcc		%o4, (1 << 5), %o4
367*4882a593Smuzhiyun	stxa		%g0, [%o0 + %o4] ASI_DCACHE_INVALIDATE
368*4882a593Smuzhiyun	membar		#Sync
369*4882a593Smuzhiyun	bne,pt		%icc, 1b
370*4882a593Smuzhiyun	 nop
371*4882a593Smuzhiyun	retl		/* I-cache flush never needed on Cheetah, see callers. */
372*4882a593Smuzhiyun	 nop
373*4882a593Smuzhiyun#endif /* DCACHE_ALIASING_POSSIBLE */
374*4882a593Smuzhiyun
375*4882a593Smuzhiyun	/* Hypervisor specific versions, patched at boot time.  */
376*4882a593Smuzhiyun__hypervisor_tlb_tl0_error:
377*4882a593Smuzhiyun	save		%sp, -192, %sp
378*4882a593Smuzhiyun	mov		%i0, %o0
379*4882a593Smuzhiyun	call		hypervisor_tlbop_error
380*4882a593Smuzhiyun	 mov		%i1, %o1
381*4882a593Smuzhiyun	ret
382*4882a593Smuzhiyun	 restore
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun__hypervisor_flush_tlb_mm: /* 19 insns */
385*4882a593Smuzhiyun	mov		%o0, %o2	/* ARG2: mmu context */
386*4882a593Smuzhiyun	mov		0, %o0		/* ARG0: CPU lists unimplemented */
387*4882a593Smuzhiyun	mov		0, %o1		/* ARG1: CPU lists unimplemented */
388*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o3	/* ARG3: flags */
389*4882a593Smuzhiyun	mov		HV_FAST_MMU_DEMAP_CTX, %o5
390*4882a593Smuzhiyun	ta		HV_FAST_TRAP
391*4882a593Smuzhiyun	brnz,pn		%o0, 1f
392*4882a593Smuzhiyun	 mov		HV_FAST_MMU_DEMAP_CTX, %o1
393*4882a593Smuzhiyun	retl
394*4882a593Smuzhiyun	 nop
395*4882a593Smuzhiyun1:	sethi		%hi(__hypervisor_tlb_tl0_error), %o5
396*4882a593Smuzhiyun	jmpl		%o5 + %lo(__hypervisor_tlb_tl0_error), %g0
397*4882a593Smuzhiyun	 nop
398*4882a593Smuzhiyun	nop
399*4882a593Smuzhiyun	nop
400*4882a593Smuzhiyun	nop
401*4882a593Smuzhiyun	nop
402*4882a593Smuzhiyun	nop
403*4882a593Smuzhiyun	nop
404*4882a593Smuzhiyun
405*4882a593Smuzhiyun__hypervisor_flush_tlb_page: /* 22 insns */
406*4882a593Smuzhiyun	/* %o0 = context, %o1 = vaddr */
407*4882a593Smuzhiyun	mov		%o0, %g2
408*4882a593Smuzhiyun	mov		%o1, %o0              /* ARG0: vaddr + IMMU-bit */
409*4882a593Smuzhiyun	mov		%g2, %o1	      /* ARG1: mmu context */
410*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o2	      /* ARG2: flags */
411*4882a593Smuzhiyun	srlx		%o0, PAGE_SHIFT, %o0
412*4882a593Smuzhiyun	sllx		%o0, PAGE_SHIFT, %o0
413*4882a593Smuzhiyun	ta		HV_MMU_UNMAP_ADDR_TRAP
414*4882a593Smuzhiyun	brnz,pn		%o0, 1f
415*4882a593Smuzhiyun	 mov		HV_MMU_UNMAP_ADDR_TRAP, %o1
416*4882a593Smuzhiyun	retl
417*4882a593Smuzhiyun	 nop
418*4882a593Smuzhiyun1:	sethi		%hi(__hypervisor_tlb_tl0_error), %o2
419*4882a593Smuzhiyun	jmpl		%o2 + %lo(__hypervisor_tlb_tl0_error), %g0
420*4882a593Smuzhiyun	 nop
421*4882a593Smuzhiyun	nop
422*4882a593Smuzhiyun	nop
423*4882a593Smuzhiyun	nop
424*4882a593Smuzhiyun	nop
425*4882a593Smuzhiyun	nop
426*4882a593Smuzhiyun	nop
427*4882a593Smuzhiyun	nop
428*4882a593Smuzhiyun	nop
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun__hypervisor_flush_tlb_pending: /* 27 insns */
431*4882a593Smuzhiyun	/* %o0 = context, %o1 = nr, %o2 = vaddrs[] */
432*4882a593Smuzhiyun	sllx		%o1, 3, %g1
433*4882a593Smuzhiyun	mov		%o2, %g2
434*4882a593Smuzhiyun	mov		%o0, %g3
435*4882a593Smuzhiyun1:	sub		%g1, (1 << 3), %g1
436*4882a593Smuzhiyun	ldx		[%g2 + %g1], %o0      /* ARG0: vaddr + IMMU-bit */
437*4882a593Smuzhiyun	mov		%g3, %o1	      /* ARG1: mmu context */
438*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o2	      /* ARG2: flags */
439*4882a593Smuzhiyun	srlx		%o0, PAGE_SHIFT, %o0
440*4882a593Smuzhiyun	sllx		%o0, PAGE_SHIFT, %o0
441*4882a593Smuzhiyun	ta		HV_MMU_UNMAP_ADDR_TRAP
442*4882a593Smuzhiyun	brnz,pn		%o0, 1f
443*4882a593Smuzhiyun	 mov		HV_MMU_UNMAP_ADDR_TRAP, %o1
444*4882a593Smuzhiyun	brnz,pt		%g1, 1b
445*4882a593Smuzhiyun	 nop
446*4882a593Smuzhiyun	retl
447*4882a593Smuzhiyun	 nop
448*4882a593Smuzhiyun1:	sethi		%hi(__hypervisor_tlb_tl0_error), %o2
449*4882a593Smuzhiyun	jmpl		%o2 + %lo(__hypervisor_tlb_tl0_error), %g0
450*4882a593Smuzhiyun	 nop
451*4882a593Smuzhiyun	nop
452*4882a593Smuzhiyun	nop
453*4882a593Smuzhiyun	nop
454*4882a593Smuzhiyun	nop
455*4882a593Smuzhiyun	nop
456*4882a593Smuzhiyun	nop
457*4882a593Smuzhiyun	nop
458*4882a593Smuzhiyun	nop
459*4882a593Smuzhiyun
460*4882a593Smuzhiyun__hypervisor_flush_tlb_kernel_range: /* 31 insns */
461*4882a593Smuzhiyun	/* %o0=start, %o1=end */
462*4882a593Smuzhiyun	cmp		%o0, %o1
463*4882a593Smuzhiyun	be,pn		%xcc, 2f
464*4882a593Smuzhiyun	 sub		%o1, %o0, %g2
465*4882a593Smuzhiyun	srlx		%g2, 18, %g3
466*4882a593Smuzhiyun	brnz,pn		%g3, 4f
467*4882a593Smuzhiyun	 mov		%o0, %g1
468*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE), %g3
469*4882a593Smuzhiyun	sub		%g2, %g3, %g2
470*4882a593Smuzhiyun1:	add		%g1, %g2, %o0	/* ARG0: virtual address */
471*4882a593Smuzhiyun	mov		0, %o1		/* ARG1: mmu context */
472*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o2	/* ARG2: flags */
473*4882a593Smuzhiyun	ta		HV_MMU_UNMAP_ADDR_TRAP
474*4882a593Smuzhiyun	brnz,pn		%o0, 3f
475*4882a593Smuzhiyun	 mov		HV_MMU_UNMAP_ADDR_TRAP, %o1
476*4882a593Smuzhiyun	brnz,pt		%g2, 1b
477*4882a593Smuzhiyun	 sub		%g2, %g3, %g2
478*4882a593Smuzhiyun2:	retl
479*4882a593Smuzhiyun	 nop
480*4882a593Smuzhiyun3:	sethi		%hi(__hypervisor_tlb_tl0_error), %o2
481*4882a593Smuzhiyun	jmpl		%o2 + %lo(__hypervisor_tlb_tl0_error), %g0
482*4882a593Smuzhiyun	 nop
483*4882a593Smuzhiyun4:	mov		0, %o0		/* ARG0: CPU lists unimplemented */
484*4882a593Smuzhiyun	mov		0, %o1		/* ARG1: CPU lists unimplemented */
485*4882a593Smuzhiyun	mov		0, %o2		/* ARG2: mmu context == nucleus */
486*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o3	/* ARG3: flags */
487*4882a593Smuzhiyun	mov		HV_FAST_MMU_DEMAP_CTX, %o5
488*4882a593Smuzhiyun	ta		HV_FAST_TRAP
489*4882a593Smuzhiyun	brnz,pn		%o0, 3b
490*4882a593Smuzhiyun	 mov		HV_FAST_MMU_DEMAP_CTX, %o1
491*4882a593Smuzhiyun	retl
492*4882a593Smuzhiyun	 nop
493*4882a593Smuzhiyun
494*4882a593Smuzhiyun#ifdef DCACHE_ALIASING_POSSIBLE
495*4882a593Smuzhiyun	/* XXX Niagara and friends have an 8K cache, so no aliasing is
496*4882a593Smuzhiyun	 * XXX possible, but nothing explicit in the Hypervisor API
497*4882a593Smuzhiyun	 * XXX guarantees this.
498*4882a593Smuzhiyun	 */
499*4882a593Smuzhiyun__hypervisor_flush_dcache_page:	/* 2 insns */
500*4882a593Smuzhiyun	retl
501*4882a593Smuzhiyun	 nop
502*4882a593Smuzhiyun#endif
503*4882a593Smuzhiyun
504*4882a593Smuzhiyuntlb_patch_one:
505*4882a593Smuzhiyun1:	lduw		[%o1], %g1
506*4882a593Smuzhiyun	stw		%g1, [%o0]
507*4882a593Smuzhiyun	flush		%o0
508*4882a593Smuzhiyun	subcc		%o2, 1, %o2
509*4882a593Smuzhiyun	add		%o1, 4, %o1
510*4882a593Smuzhiyun	bne,pt		%icc, 1b
511*4882a593Smuzhiyun	 add		%o0, 4, %o0
512*4882a593Smuzhiyun	retl
513*4882a593Smuzhiyun	 nop
514*4882a593Smuzhiyun
515*4882a593Smuzhiyun#ifdef CONFIG_SMP
516*4882a593Smuzhiyun	/* These are all called by the slaves of a cross call, at
517*4882a593Smuzhiyun	 * trap level 1, with interrupts fully disabled.
518*4882a593Smuzhiyun	 *
519*4882a593Smuzhiyun	 * Register usage:
520*4882a593Smuzhiyun	 *   %g5	mm->context	(all tlb flushes)
521*4882a593Smuzhiyun	 *   %g1	address arg 1	(tlb page and range flushes)
522*4882a593Smuzhiyun	 *   %g7	address arg 2	(tlb range flush only)
523*4882a593Smuzhiyun	 *
524*4882a593Smuzhiyun	 *   %g6	scratch 1
525*4882a593Smuzhiyun	 *   %g2	scratch 2
526*4882a593Smuzhiyun	 *   %g3	scratch 3
527*4882a593Smuzhiyun	 *   %g4	scratch 4
528*4882a593Smuzhiyun	 */
529*4882a593Smuzhiyun	.align		32
530*4882a593Smuzhiyun	.globl		xcall_flush_tlb_mm
531*4882a593Smuzhiyunxcall_flush_tlb_mm:	/* 24 insns */
532*4882a593Smuzhiyun	mov		PRIMARY_CONTEXT, %g2
533*4882a593Smuzhiyun	ldxa		[%g2] ASI_DMMU, %g3
534*4882a593Smuzhiyun	srlx		%g3, CTX_PGSZ1_NUC_SHIFT, %g4
535*4882a593Smuzhiyun	sllx		%g4, CTX_PGSZ1_NUC_SHIFT, %g4
536*4882a593Smuzhiyun	or		%g5, %g4, %g5	/* Preserve nucleus page size fields */
537*4882a593Smuzhiyun	stxa		%g5, [%g2] ASI_DMMU
538*4882a593Smuzhiyun	mov		0x40, %g4
539*4882a593Smuzhiyun	stxa		%g0, [%g4] ASI_DMMU_DEMAP
540*4882a593Smuzhiyun	stxa		%g0, [%g4] ASI_IMMU_DEMAP
541*4882a593Smuzhiyun	stxa		%g3, [%g2] ASI_DMMU
542*4882a593Smuzhiyun	retry
543*4882a593Smuzhiyun	nop
544*4882a593Smuzhiyun	nop
545*4882a593Smuzhiyun	nop
546*4882a593Smuzhiyun	nop
547*4882a593Smuzhiyun	nop
548*4882a593Smuzhiyun	nop
549*4882a593Smuzhiyun	nop
550*4882a593Smuzhiyun	nop
551*4882a593Smuzhiyun	nop
552*4882a593Smuzhiyun	nop
553*4882a593Smuzhiyun	nop
554*4882a593Smuzhiyun	nop
555*4882a593Smuzhiyun	nop
556*4882a593Smuzhiyun
557*4882a593Smuzhiyun	.globl		xcall_flush_tlb_page
558*4882a593Smuzhiyunxcall_flush_tlb_page:	/* 20 insns */
559*4882a593Smuzhiyun	/* %g5=context, %g1=vaddr */
560*4882a593Smuzhiyun	mov		PRIMARY_CONTEXT, %g4
561*4882a593Smuzhiyun	ldxa		[%g4] ASI_DMMU, %g2
562*4882a593Smuzhiyun	srlx		%g2, CTX_PGSZ1_NUC_SHIFT, %g4
563*4882a593Smuzhiyun	sllx		%g4, CTX_PGSZ1_NUC_SHIFT, %g4
564*4882a593Smuzhiyun	or		%g5, %g4, %g5
565*4882a593Smuzhiyun	mov		PRIMARY_CONTEXT, %g4
566*4882a593Smuzhiyun	stxa		%g5, [%g4] ASI_DMMU
567*4882a593Smuzhiyun	andcc		%g1, 0x1, %g0
568*4882a593Smuzhiyun	be,pn		%icc, 2f
569*4882a593Smuzhiyun	 andn		%g1, 0x1, %g5
570*4882a593Smuzhiyun	stxa		%g0, [%g5] ASI_IMMU_DEMAP
571*4882a593Smuzhiyun2:	stxa		%g0, [%g5] ASI_DMMU_DEMAP
572*4882a593Smuzhiyun	membar		#Sync
573*4882a593Smuzhiyun	stxa		%g2, [%g4] ASI_DMMU
574*4882a593Smuzhiyun	retry
575*4882a593Smuzhiyun	nop
576*4882a593Smuzhiyun	nop
577*4882a593Smuzhiyun	nop
578*4882a593Smuzhiyun	nop
579*4882a593Smuzhiyun	nop
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun	.globl		xcall_flush_tlb_kernel_range
582*4882a593Smuzhiyunxcall_flush_tlb_kernel_range:	/* 44 insns */
583*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE - 1), %g2
584*4882a593Smuzhiyun	or		%g2, %lo(PAGE_SIZE - 1), %g2
585*4882a593Smuzhiyun	andn		%g1, %g2, %g1
586*4882a593Smuzhiyun	andn		%g7, %g2, %g7
587*4882a593Smuzhiyun	sub		%g7, %g1, %g3
588*4882a593Smuzhiyun	srlx		%g3, 18, %g2
589*4882a593Smuzhiyun	brnz,pn		%g2, 2f
590*4882a593Smuzhiyun	 sethi		%hi(PAGE_SIZE), %g2
591*4882a593Smuzhiyun	sub		%g3, %g2, %g3
592*4882a593Smuzhiyun	or		%g1, 0x20, %g1		! Nucleus
593*4882a593Smuzhiyun1:	stxa		%g0, [%g1 + %g3] ASI_DMMU_DEMAP
594*4882a593Smuzhiyun	stxa		%g0, [%g1 + %g3] ASI_IMMU_DEMAP
595*4882a593Smuzhiyun	membar		#Sync
596*4882a593Smuzhiyun	brnz,pt		%g3, 1b
597*4882a593Smuzhiyun	 sub		%g3, %g2, %g3
598*4882a593Smuzhiyun	retry
599*4882a593Smuzhiyun2:	mov		63 * 8, %g1
600*4882a593Smuzhiyun1:	ldxa		[%g1] ASI_ITLB_DATA_ACCESS, %g2
601*4882a593Smuzhiyun	andcc		%g2, 0x40, %g0			/* _PAGE_L_4U */
602*4882a593Smuzhiyun	bne,pn		%xcc, 2f
603*4882a593Smuzhiyun	 mov		TLB_TAG_ACCESS, %g2
604*4882a593Smuzhiyun	stxa		%g0, [%g2] ASI_IMMU
605*4882a593Smuzhiyun	stxa		%g0, [%g1] ASI_ITLB_DATA_ACCESS
606*4882a593Smuzhiyun	membar		#Sync
607*4882a593Smuzhiyun2:	ldxa		[%g1] ASI_DTLB_DATA_ACCESS, %g2
608*4882a593Smuzhiyun	andcc		%g2, 0x40, %g0
609*4882a593Smuzhiyun	bne,pn		%xcc, 2f
610*4882a593Smuzhiyun	 mov		TLB_TAG_ACCESS, %g2
611*4882a593Smuzhiyun	stxa		%g0, [%g2] ASI_DMMU
612*4882a593Smuzhiyun	stxa		%g0, [%g1] ASI_DTLB_DATA_ACCESS
613*4882a593Smuzhiyun	membar		#Sync
614*4882a593Smuzhiyun2:	sub		%g1, 8, %g1
615*4882a593Smuzhiyun	brgez,pt	%g1, 1b
616*4882a593Smuzhiyun	 nop
617*4882a593Smuzhiyun	retry
618*4882a593Smuzhiyun	nop
619*4882a593Smuzhiyun	nop
620*4882a593Smuzhiyun	nop
621*4882a593Smuzhiyun	nop
622*4882a593Smuzhiyun	nop
623*4882a593Smuzhiyun	nop
624*4882a593Smuzhiyun	nop
625*4882a593Smuzhiyun	nop
626*4882a593Smuzhiyun	nop
627*4882a593Smuzhiyun
628*4882a593Smuzhiyun	/* This runs in a very controlled environment, so we do
629*4882a593Smuzhiyun	 * not need to worry about BH races etc.
630*4882a593Smuzhiyun	 */
631*4882a593Smuzhiyun	.globl		xcall_sync_tick
632*4882a593Smuzhiyunxcall_sync_tick:
633*4882a593Smuzhiyun
634*4882a593Smuzhiyun661:	rdpr		%pstate, %g2
635*4882a593Smuzhiyun	wrpr		%g2, PSTATE_IG | PSTATE_AG, %pstate
636*4882a593Smuzhiyun	.section	.sun4v_2insn_patch, "ax"
637*4882a593Smuzhiyun	.word		661b
638*4882a593Smuzhiyun	nop
639*4882a593Smuzhiyun	nop
640*4882a593Smuzhiyun	.previous
641*4882a593Smuzhiyun
642*4882a593Smuzhiyun	rdpr		%pil, %g2
643*4882a593Smuzhiyun	wrpr		%g0, PIL_NORMAL_MAX, %pil
644*4882a593Smuzhiyun	sethi		%hi(109f), %g7
645*4882a593Smuzhiyun	b,pt		%xcc, etrap_irq
646*4882a593Smuzhiyun109:	 or		%g7, %lo(109b), %g7
647*4882a593Smuzhiyun#ifdef CONFIG_TRACE_IRQFLAGS
648*4882a593Smuzhiyun	call		trace_hardirqs_off
649*4882a593Smuzhiyun	 nop
650*4882a593Smuzhiyun#endif
651*4882a593Smuzhiyun	call		smp_synchronize_tick_client
652*4882a593Smuzhiyun	 nop
653*4882a593Smuzhiyun	b		rtrap_xcall
654*4882a593Smuzhiyun	 ldx		[%sp + PTREGS_OFF + PT_V9_TSTATE], %l1
655*4882a593Smuzhiyun
656*4882a593Smuzhiyun	.globl		xcall_fetch_glob_regs
657*4882a593Smuzhiyunxcall_fetch_glob_regs:
658*4882a593Smuzhiyun	sethi		%hi(global_cpu_snapshot), %g1
659*4882a593Smuzhiyun	or		%g1, %lo(global_cpu_snapshot), %g1
660*4882a593Smuzhiyun	__GET_CPUID(%g2)
661*4882a593Smuzhiyun	sllx		%g2, 6, %g3
662*4882a593Smuzhiyun	add		%g1, %g3, %g1
663*4882a593Smuzhiyun	rdpr		%tstate, %g7
664*4882a593Smuzhiyun	stx		%g7, [%g1 + GR_SNAP_TSTATE]
665*4882a593Smuzhiyun	rdpr		%tpc, %g7
666*4882a593Smuzhiyun	stx		%g7, [%g1 + GR_SNAP_TPC]
667*4882a593Smuzhiyun	rdpr		%tnpc, %g7
668*4882a593Smuzhiyun	stx		%g7, [%g1 + GR_SNAP_TNPC]
669*4882a593Smuzhiyun	stx		%o7, [%g1 + GR_SNAP_O7]
670*4882a593Smuzhiyun	stx		%i7, [%g1 + GR_SNAP_I7]
671*4882a593Smuzhiyun	/* Don't try this at home kids... */
672*4882a593Smuzhiyun	rdpr		%cwp, %g3
673*4882a593Smuzhiyun	sub		%g3, 1, %g7
674*4882a593Smuzhiyun	wrpr		%g7, %cwp
675*4882a593Smuzhiyun	mov		%i7, %g7
676*4882a593Smuzhiyun	wrpr		%g3, %cwp
677*4882a593Smuzhiyun	stx		%g7, [%g1 + GR_SNAP_RPC]
678*4882a593Smuzhiyun	sethi		%hi(trap_block), %g7
679*4882a593Smuzhiyun	or		%g7, %lo(trap_block), %g7
680*4882a593Smuzhiyun	sllx		%g2, TRAP_BLOCK_SZ_SHIFT, %g2
681*4882a593Smuzhiyun	add		%g7, %g2, %g7
682*4882a593Smuzhiyun	ldx		[%g7 + TRAP_PER_CPU_THREAD], %g3
683*4882a593Smuzhiyun	stx		%g3, [%g1 + GR_SNAP_THREAD]
684*4882a593Smuzhiyun	retry
685*4882a593Smuzhiyun
686*4882a593Smuzhiyun	.globl		xcall_fetch_glob_pmu
687*4882a593Smuzhiyunxcall_fetch_glob_pmu:
688*4882a593Smuzhiyun	sethi		%hi(global_cpu_snapshot), %g1
689*4882a593Smuzhiyun	or		%g1, %lo(global_cpu_snapshot), %g1
690*4882a593Smuzhiyun	__GET_CPUID(%g2)
691*4882a593Smuzhiyun	sllx		%g2, 6, %g3
692*4882a593Smuzhiyun	add		%g1, %g3, %g1
693*4882a593Smuzhiyun	rd		%pic, %g7
694*4882a593Smuzhiyun	stx		%g7, [%g1 + (4 * 8)]
695*4882a593Smuzhiyun	rd		%pcr, %g7
696*4882a593Smuzhiyun	stx		%g7, [%g1 + (0 * 8)]
697*4882a593Smuzhiyun	retry
698*4882a593Smuzhiyun
699*4882a593Smuzhiyun	.globl		xcall_fetch_glob_pmu_n4
700*4882a593Smuzhiyunxcall_fetch_glob_pmu_n4:
701*4882a593Smuzhiyun	sethi		%hi(global_cpu_snapshot), %g1
702*4882a593Smuzhiyun	or		%g1, %lo(global_cpu_snapshot), %g1
703*4882a593Smuzhiyun	__GET_CPUID(%g2)
704*4882a593Smuzhiyun	sllx		%g2, 6, %g3
705*4882a593Smuzhiyun	add		%g1, %g3, %g1
706*4882a593Smuzhiyun
707*4882a593Smuzhiyun	ldxa		[%g0] ASI_PIC, %g7
708*4882a593Smuzhiyun	stx		%g7, [%g1 + (4 * 8)]
709*4882a593Smuzhiyun	mov		0x08, %g3
710*4882a593Smuzhiyun	ldxa		[%g3] ASI_PIC, %g7
711*4882a593Smuzhiyun	stx		%g7, [%g1 + (5 * 8)]
712*4882a593Smuzhiyun	mov		0x10, %g3
713*4882a593Smuzhiyun	ldxa		[%g3] ASI_PIC, %g7
714*4882a593Smuzhiyun	stx		%g7, [%g1 + (6 * 8)]
715*4882a593Smuzhiyun	mov		0x18, %g3
716*4882a593Smuzhiyun	ldxa		[%g3] ASI_PIC, %g7
717*4882a593Smuzhiyun	stx		%g7, [%g1 + (7 * 8)]
718*4882a593Smuzhiyun
719*4882a593Smuzhiyun	mov		%o0, %g2
720*4882a593Smuzhiyun	mov		%o1, %g3
721*4882a593Smuzhiyun	mov		%o5, %g7
722*4882a593Smuzhiyun
723*4882a593Smuzhiyun	mov		HV_FAST_VT_GET_PERFREG, %o5
724*4882a593Smuzhiyun	mov		3, %o0
725*4882a593Smuzhiyun	ta		HV_FAST_TRAP
726*4882a593Smuzhiyun	stx		%o1, [%g1 + (3 * 8)]
727*4882a593Smuzhiyun	mov		HV_FAST_VT_GET_PERFREG, %o5
728*4882a593Smuzhiyun	mov		2, %o0
729*4882a593Smuzhiyun	ta		HV_FAST_TRAP
730*4882a593Smuzhiyun	stx		%o1, [%g1 + (2 * 8)]
731*4882a593Smuzhiyun	mov		HV_FAST_VT_GET_PERFREG, %o5
732*4882a593Smuzhiyun	mov		1, %o0
733*4882a593Smuzhiyun	ta		HV_FAST_TRAP
734*4882a593Smuzhiyun	stx		%o1, [%g1 + (1 * 8)]
735*4882a593Smuzhiyun	mov		HV_FAST_VT_GET_PERFREG, %o5
736*4882a593Smuzhiyun	mov		0, %o0
737*4882a593Smuzhiyun	ta		HV_FAST_TRAP
738*4882a593Smuzhiyun	stx		%o1, [%g1 + (0 * 8)]
739*4882a593Smuzhiyun
740*4882a593Smuzhiyun	mov		%g2, %o0
741*4882a593Smuzhiyun	mov		%g3, %o1
742*4882a593Smuzhiyun	mov		%g7, %o5
743*4882a593Smuzhiyun
744*4882a593Smuzhiyun	retry
745*4882a593Smuzhiyun
746*4882a593Smuzhiyun__cheetah_xcall_flush_tlb_kernel_range:	/* 44 insns */
747*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE - 1), %g2
748*4882a593Smuzhiyun	or		%g2, %lo(PAGE_SIZE - 1), %g2
749*4882a593Smuzhiyun	andn		%g1, %g2, %g1
750*4882a593Smuzhiyun	andn		%g7, %g2, %g7
751*4882a593Smuzhiyun	sub		%g7, %g1, %g3
752*4882a593Smuzhiyun	srlx		%g3, 18, %g2
753*4882a593Smuzhiyun	brnz,pn		%g2, 2f
754*4882a593Smuzhiyun	 sethi		%hi(PAGE_SIZE), %g2
755*4882a593Smuzhiyun	sub		%g3, %g2, %g3
756*4882a593Smuzhiyun	or		%g1, 0x20, %g1		! Nucleus
757*4882a593Smuzhiyun1:	stxa		%g0, [%g1 + %g3] ASI_DMMU_DEMAP
758*4882a593Smuzhiyun	stxa		%g0, [%g1 + %g3] ASI_IMMU_DEMAP
759*4882a593Smuzhiyun	membar		#Sync
760*4882a593Smuzhiyun	brnz,pt		%g3, 1b
761*4882a593Smuzhiyun	 sub		%g3, %g2, %g3
762*4882a593Smuzhiyun	retry
763*4882a593Smuzhiyun2:	mov		0x80, %g2
764*4882a593Smuzhiyun	stxa		%g0, [%g2] ASI_DMMU_DEMAP
765*4882a593Smuzhiyun	membar		#Sync
766*4882a593Smuzhiyun	stxa		%g0, [%g2] ASI_IMMU_DEMAP
767*4882a593Smuzhiyun	membar		#Sync
768*4882a593Smuzhiyun	retry
769*4882a593Smuzhiyun	nop
770*4882a593Smuzhiyun	nop
771*4882a593Smuzhiyun	nop
772*4882a593Smuzhiyun	nop
773*4882a593Smuzhiyun	nop
774*4882a593Smuzhiyun	nop
775*4882a593Smuzhiyun	nop
776*4882a593Smuzhiyun	nop
777*4882a593Smuzhiyun	nop
778*4882a593Smuzhiyun	nop
779*4882a593Smuzhiyun	nop
780*4882a593Smuzhiyun	nop
781*4882a593Smuzhiyun	nop
782*4882a593Smuzhiyun	nop
783*4882a593Smuzhiyun	nop
784*4882a593Smuzhiyun	nop
785*4882a593Smuzhiyun	nop
786*4882a593Smuzhiyun	nop
787*4882a593Smuzhiyun	nop
788*4882a593Smuzhiyun	nop
789*4882a593Smuzhiyun	nop
790*4882a593Smuzhiyun	nop
791*4882a593Smuzhiyun
792*4882a593Smuzhiyun#ifdef DCACHE_ALIASING_POSSIBLE
793*4882a593Smuzhiyun	.align		32
794*4882a593Smuzhiyun	.globl		xcall_flush_dcache_page_cheetah
795*4882a593Smuzhiyunxcall_flush_dcache_page_cheetah: /* %g1 == physical page address */
796*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE), %g3
797*4882a593Smuzhiyun1:	subcc		%g3, (1 << 5), %g3
798*4882a593Smuzhiyun	stxa		%g0, [%g1 + %g3] ASI_DCACHE_INVALIDATE
799*4882a593Smuzhiyun	membar		#Sync
800*4882a593Smuzhiyun	bne,pt		%icc, 1b
801*4882a593Smuzhiyun	 nop
802*4882a593Smuzhiyun	retry
803*4882a593Smuzhiyun	nop
804*4882a593Smuzhiyun#endif /* DCACHE_ALIASING_POSSIBLE */
805*4882a593Smuzhiyun
806*4882a593Smuzhiyun	.globl		xcall_flush_dcache_page_spitfire
807*4882a593Smuzhiyunxcall_flush_dcache_page_spitfire: /* %g1 == physical page address
808*4882a593Smuzhiyun				     %g7 == kernel page virtual address
809*4882a593Smuzhiyun				     %g5 == (page->mapping != NULL)  */
810*4882a593Smuzhiyun#ifdef DCACHE_ALIASING_POSSIBLE
811*4882a593Smuzhiyun	srlx		%g1, (13 - 2), %g1	! Form tag comparitor
812*4882a593Smuzhiyun	sethi		%hi(L1DCACHE_SIZE), %g3	! D$ size == 16K
813*4882a593Smuzhiyun	sub		%g3, (1 << 5), %g3	! D$ linesize == 32
814*4882a593Smuzhiyun1:	ldxa		[%g3] ASI_DCACHE_TAG, %g2
815*4882a593Smuzhiyun	andcc		%g2, 0x3, %g0
816*4882a593Smuzhiyun	be,pn		%xcc, 2f
817*4882a593Smuzhiyun	 andn		%g2, 0x3, %g2
818*4882a593Smuzhiyun	cmp		%g2, %g1
819*4882a593Smuzhiyun
820*4882a593Smuzhiyun	bne,pt		%xcc, 2f
821*4882a593Smuzhiyun	 nop
822*4882a593Smuzhiyun	stxa		%g0, [%g3] ASI_DCACHE_TAG
823*4882a593Smuzhiyun	membar		#Sync
824*4882a593Smuzhiyun2:	cmp		%g3, 0
825*4882a593Smuzhiyun	bne,pt		%xcc, 1b
826*4882a593Smuzhiyun	 sub		%g3, (1 << 5), %g3
827*4882a593Smuzhiyun
828*4882a593Smuzhiyun	brz,pn		%g5, 2f
829*4882a593Smuzhiyun#endif /* DCACHE_ALIASING_POSSIBLE */
830*4882a593Smuzhiyun	 sethi		%hi(PAGE_SIZE), %g3
831*4882a593Smuzhiyun
832*4882a593Smuzhiyun1:	flush		%g7
833*4882a593Smuzhiyun	subcc		%g3, (1 << 5), %g3
834*4882a593Smuzhiyun	bne,pt		%icc, 1b
835*4882a593Smuzhiyun	 add		%g7, (1 << 5), %g7
836*4882a593Smuzhiyun
837*4882a593Smuzhiyun2:	retry
838*4882a593Smuzhiyun	nop
839*4882a593Smuzhiyun	nop
840*4882a593Smuzhiyun
841*4882a593Smuzhiyun	/* %g5:	error
842*4882a593Smuzhiyun	 * %g6:	tlb op
843*4882a593Smuzhiyun	 */
844*4882a593Smuzhiyun__hypervisor_tlb_xcall_error:
845*4882a593Smuzhiyun	mov	%g5, %g4
846*4882a593Smuzhiyun	mov	%g6, %g5
847*4882a593Smuzhiyun	ba,pt	%xcc, etrap
848*4882a593Smuzhiyun	 rd	%pc, %g7
849*4882a593Smuzhiyun	mov	%l4, %o0
850*4882a593Smuzhiyun	call	hypervisor_tlbop_error_xcall
851*4882a593Smuzhiyun	 mov	%l5, %o1
852*4882a593Smuzhiyun	ba,a,pt	%xcc, rtrap
853*4882a593Smuzhiyun
854*4882a593Smuzhiyun	.globl		__hypervisor_xcall_flush_tlb_mm
855*4882a593Smuzhiyun__hypervisor_xcall_flush_tlb_mm: /* 24 insns */
856*4882a593Smuzhiyun	/* %g5=ctx, g1,g2,g3,g4,g7=scratch, %g6=unusable */
857*4882a593Smuzhiyun	mov		%o0, %g2
858*4882a593Smuzhiyun	mov		%o1, %g3
859*4882a593Smuzhiyun	mov		%o2, %g4
860*4882a593Smuzhiyun	mov		%o3, %g1
861*4882a593Smuzhiyun	mov		%o5, %g7
862*4882a593Smuzhiyun	clr		%o0		/* ARG0: CPU lists unimplemented */
863*4882a593Smuzhiyun	clr		%o1		/* ARG1: CPU lists unimplemented */
864*4882a593Smuzhiyun	mov		%g5, %o2	/* ARG2: mmu context */
865*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o3	/* ARG3: flags */
866*4882a593Smuzhiyun	mov		HV_FAST_MMU_DEMAP_CTX, %o5
867*4882a593Smuzhiyun	ta		HV_FAST_TRAP
868*4882a593Smuzhiyun	mov		HV_FAST_MMU_DEMAP_CTX, %g6
869*4882a593Smuzhiyun	brnz,pn		%o0, 1f
870*4882a593Smuzhiyun	 mov		%o0, %g5
871*4882a593Smuzhiyun	mov		%g2, %o0
872*4882a593Smuzhiyun	mov		%g3, %o1
873*4882a593Smuzhiyun	mov		%g4, %o2
874*4882a593Smuzhiyun	mov		%g1, %o3
875*4882a593Smuzhiyun	mov		%g7, %o5
876*4882a593Smuzhiyun	membar		#Sync
877*4882a593Smuzhiyun	retry
878*4882a593Smuzhiyun1:	sethi		%hi(__hypervisor_tlb_xcall_error), %g4
879*4882a593Smuzhiyun	jmpl		%g4 + %lo(__hypervisor_tlb_xcall_error), %g0
880*4882a593Smuzhiyun	 nop
881*4882a593Smuzhiyun
882*4882a593Smuzhiyun	.globl		__hypervisor_xcall_flush_tlb_page
883*4882a593Smuzhiyun__hypervisor_xcall_flush_tlb_page: /* 20 insns */
884*4882a593Smuzhiyun	/* %g5=ctx, %g1=vaddr */
885*4882a593Smuzhiyun	mov		%o0, %g2
886*4882a593Smuzhiyun	mov		%o1, %g3
887*4882a593Smuzhiyun	mov		%o2, %g4
888*4882a593Smuzhiyun	mov		%g1, %o0	        /* ARG0: virtual address */
889*4882a593Smuzhiyun	mov		%g5, %o1		/* ARG1: mmu context */
890*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o2		/* ARG2: flags */
891*4882a593Smuzhiyun	srlx		%o0, PAGE_SHIFT, %o0
892*4882a593Smuzhiyun	sllx		%o0, PAGE_SHIFT, %o0
893*4882a593Smuzhiyun	ta		HV_MMU_UNMAP_ADDR_TRAP
894*4882a593Smuzhiyun	mov		HV_MMU_UNMAP_ADDR_TRAP, %g6
895*4882a593Smuzhiyun	brnz,a,pn	%o0, 1f
896*4882a593Smuzhiyun	 mov		%o0, %g5
897*4882a593Smuzhiyun	mov		%g2, %o0
898*4882a593Smuzhiyun	mov		%g3, %o1
899*4882a593Smuzhiyun	mov		%g4, %o2
900*4882a593Smuzhiyun	membar		#Sync
901*4882a593Smuzhiyun	retry
902*4882a593Smuzhiyun1:	sethi		%hi(__hypervisor_tlb_xcall_error), %g4
903*4882a593Smuzhiyun	jmpl		%g4 + %lo(__hypervisor_tlb_xcall_error), %g0
904*4882a593Smuzhiyun	 nop
905*4882a593Smuzhiyun
906*4882a593Smuzhiyun	.globl		__hypervisor_xcall_flush_tlb_kernel_range
907*4882a593Smuzhiyun__hypervisor_xcall_flush_tlb_kernel_range: /* 44 insns */
908*4882a593Smuzhiyun	/* %g1=start, %g7=end, g2,g3,g4,g5,g6=scratch */
909*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE - 1), %g2
910*4882a593Smuzhiyun	or		%g2, %lo(PAGE_SIZE - 1), %g2
911*4882a593Smuzhiyun	andn		%g1, %g2, %g1
912*4882a593Smuzhiyun	andn		%g7, %g2, %g7
913*4882a593Smuzhiyun	sub		%g7, %g1, %g3
914*4882a593Smuzhiyun	srlx		%g3, 18, %g7
915*4882a593Smuzhiyun	add		%g2, 1, %g2
916*4882a593Smuzhiyun	sub		%g3, %g2, %g3
917*4882a593Smuzhiyun	mov		%o0, %g2
918*4882a593Smuzhiyun	mov		%o1, %g4
919*4882a593Smuzhiyun	brnz,pn		%g7, 2f
920*4882a593Smuzhiyun	 mov		%o2, %g7
921*4882a593Smuzhiyun1:	add		%g1, %g3, %o0	/* ARG0: virtual address */
922*4882a593Smuzhiyun	mov		0, %o1		/* ARG1: mmu context */
923*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o2	/* ARG2: flags */
924*4882a593Smuzhiyun	ta		HV_MMU_UNMAP_ADDR_TRAP
925*4882a593Smuzhiyun	mov		HV_MMU_UNMAP_ADDR_TRAP, %g6
926*4882a593Smuzhiyun	brnz,pn		%o0, 1f
927*4882a593Smuzhiyun	 mov		%o0, %g5
928*4882a593Smuzhiyun	sethi		%hi(PAGE_SIZE), %o2
929*4882a593Smuzhiyun	brnz,pt		%g3, 1b
930*4882a593Smuzhiyun	 sub		%g3, %o2, %g3
931*4882a593Smuzhiyun5:	mov		%g2, %o0
932*4882a593Smuzhiyun	mov		%g4, %o1
933*4882a593Smuzhiyun	mov		%g7, %o2
934*4882a593Smuzhiyun	membar		#Sync
935*4882a593Smuzhiyun	retry
936*4882a593Smuzhiyun1:	sethi		%hi(__hypervisor_tlb_xcall_error), %g4
937*4882a593Smuzhiyun	jmpl		%g4 + %lo(__hypervisor_tlb_xcall_error), %g0
938*4882a593Smuzhiyun	 nop
939*4882a593Smuzhiyun2:	mov		%o3, %g1
940*4882a593Smuzhiyun	mov		%o5, %g3
941*4882a593Smuzhiyun	mov		0, %o0		/* ARG0: CPU lists unimplemented */
942*4882a593Smuzhiyun	mov		0, %o1		/* ARG1: CPU lists unimplemented */
943*4882a593Smuzhiyun	mov		0, %o2		/* ARG2: mmu context == nucleus */
944*4882a593Smuzhiyun	mov		HV_MMU_ALL, %o3	/* ARG3: flags */
945*4882a593Smuzhiyun	mov		HV_FAST_MMU_DEMAP_CTX, %o5
946*4882a593Smuzhiyun	ta		HV_FAST_TRAP
947*4882a593Smuzhiyun	mov		%g1, %o3
948*4882a593Smuzhiyun	brz,pt		%o0, 5b
949*4882a593Smuzhiyun	 mov		%g3, %o5
950*4882a593Smuzhiyun	mov		HV_FAST_MMU_DEMAP_CTX, %g6
951*4882a593Smuzhiyun	ba,pt		%xcc, 1b
952*4882a593Smuzhiyun	 clr		%g5
953*4882a593Smuzhiyun
954*4882a593Smuzhiyun	/* These just get rescheduled to PIL vectors. */
955*4882a593Smuzhiyun	.globl		xcall_call_function
956*4882a593Smuzhiyunxcall_call_function:
957*4882a593Smuzhiyun	wr		%g0, (1 << PIL_SMP_CALL_FUNC), %set_softint
958*4882a593Smuzhiyun	retry
959*4882a593Smuzhiyun
960*4882a593Smuzhiyun	.globl		xcall_call_function_single
961*4882a593Smuzhiyunxcall_call_function_single:
962*4882a593Smuzhiyun	wr		%g0, (1 << PIL_SMP_CALL_FUNC_SNGL), %set_softint
963*4882a593Smuzhiyun	retry
964*4882a593Smuzhiyun
965*4882a593Smuzhiyun	.globl		xcall_receive_signal
966*4882a593Smuzhiyunxcall_receive_signal:
967*4882a593Smuzhiyun	wr		%g0, (1 << PIL_SMP_RECEIVE_SIGNAL), %set_softint
968*4882a593Smuzhiyun	retry
969*4882a593Smuzhiyun
970*4882a593Smuzhiyun	.globl		xcall_capture
971*4882a593Smuzhiyunxcall_capture:
972*4882a593Smuzhiyun	wr		%g0, (1 << PIL_SMP_CAPTURE), %set_softint
973*4882a593Smuzhiyun	retry
974*4882a593Smuzhiyun
975*4882a593Smuzhiyun#ifdef CONFIG_KGDB
976*4882a593Smuzhiyun	.globl		xcall_kgdb_capture
977*4882a593Smuzhiyunxcall_kgdb_capture:
978*4882a593Smuzhiyun	wr		%g0, (1 << PIL_KGDB_CAPTURE), %set_softint
979*4882a593Smuzhiyun	retry
980*4882a593Smuzhiyun#endif
981*4882a593Smuzhiyun
982*4882a593Smuzhiyun#endif /* CONFIG_SMP */
983*4882a593Smuzhiyun
984*4882a593Smuzhiyun	.globl		cheetah_patch_cachetlbops
985*4882a593Smuzhiyuncheetah_patch_cachetlbops:
986*4882a593Smuzhiyun	save		%sp, -128, %sp
987*4882a593Smuzhiyun
988*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_mm), %o0
989*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_mm), %o0
990*4882a593Smuzhiyun	sethi		%hi(__cheetah_flush_tlb_mm), %o1
991*4882a593Smuzhiyun	or		%o1, %lo(__cheetah_flush_tlb_mm), %o1
992*4882a593Smuzhiyun	call		tlb_patch_one
993*4882a593Smuzhiyun	 mov		19, %o2
994*4882a593Smuzhiyun
995*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_page), %o0
996*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_page), %o0
997*4882a593Smuzhiyun	sethi		%hi(__cheetah_flush_tlb_page), %o1
998*4882a593Smuzhiyun	or		%o1, %lo(__cheetah_flush_tlb_page), %o1
999*4882a593Smuzhiyun	call		tlb_patch_one
1000*4882a593Smuzhiyun	 mov		22, %o2
1001*4882a593Smuzhiyun
1002*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_pending), %o0
1003*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_pending), %o0
1004*4882a593Smuzhiyun	sethi		%hi(__cheetah_flush_tlb_pending), %o1
1005*4882a593Smuzhiyun	or		%o1, %lo(__cheetah_flush_tlb_pending), %o1
1006*4882a593Smuzhiyun	call		tlb_patch_one
1007*4882a593Smuzhiyun	 mov		27, %o2
1008*4882a593Smuzhiyun
1009*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_kernel_range), %o0
1010*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_kernel_range), %o0
1011*4882a593Smuzhiyun	sethi		%hi(__cheetah_flush_tlb_kernel_range), %o1
1012*4882a593Smuzhiyun	or		%o1, %lo(__cheetah_flush_tlb_kernel_range), %o1
1013*4882a593Smuzhiyun	call		tlb_patch_one
1014*4882a593Smuzhiyun	 mov		31, %o2
1015*4882a593Smuzhiyun
1016*4882a593Smuzhiyun#ifdef DCACHE_ALIASING_POSSIBLE
1017*4882a593Smuzhiyun	sethi		%hi(__flush_dcache_page), %o0
1018*4882a593Smuzhiyun	or		%o0, %lo(__flush_dcache_page), %o0
1019*4882a593Smuzhiyun	sethi		%hi(__cheetah_flush_dcache_page), %o1
1020*4882a593Smuzhiyun	or		%o1, %lo(__cheetah_flush_dcache_page), %o1
1021*4882a593Smuzhiyun	call		tlb_patch_one
1022*4882a593Smuzhiyun	 mov		11, %o2
1023*4882a593Smuzhiyun#endif /* DCACHE_ALIASING_POSSIBLE */
1024*4882a593Smuzhiyun
1025*4882a593Smuzhiyun#ifdef CONFIG_SMP
1026*4882a593Smuzhiyun	sethi		%hi(xcall_flush_tlb_kernel_range), %o0
1027*4882a593Smuzhiyun	or		%o0, %lo(xcall_flush_tlb_kernel_range), %o0
1028*4882a593Smuzhiyun	sethi		%hi(__cheetah_xcall_flush_tlb_kernel_range), %o1
1029*4882a593Smuzhiyun	or		%o1, %lo(__cheetah_xcall_flush_tlb_kernel_range), %o1
1030*4882a593Smuzhiyun	call		tlb_patch_one
1031*4882a593Smuzhiyun	 mov		44, %o2
1032*4882a593Smuzhiyun#endif /* CONFIG_SMP */
1033*4882a593Smuzhiyun
1034*4882a593Smuzhiyun	ret
1035*4882a593Smuzhiyun	 restore
1036*4882a593Smuzhiyun
1037*4882a593Smuzhiyun	.globl		hypervisor_patch_cachetlbops
1038*4882a593Smuzhiyunhypervisor_patch_cachetlbops:
1039*4882a593Smuzhiyun	save		%sp, -128, %sp
1040*4882a593Smuzhiyun
1041*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_mm), %o0
1042*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_mm), %o0
1043*4882a593Smuzhiyun	sethi		%hi(__hypervisor_flush_tlb_mm), %o1
1044*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_flush_tlb_mm), %o1
1045*4882a593Smuzhiyun	call		tlb_patch_one
1046*4882a593Smuzhiyun	 mov		19, %o2
1047*4882a593Smuzhiyun
1048*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_page), %o0
1049*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_page), %o0
1050*4882a593Smuzhiyun	sethi		%hi(__hypervisor_flush_tlb_page), %o1
1051*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_flush_tlb_page), %o1
1052*4882a593Smuzhiyun	call		tlb_patch_one
1053*4882a593Smuzhiyun	 mov		22, %o2
1054*4882a593Smuzhiyun
1055*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_pending), %o0
1056*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_pending), %o0
1057*4882a593Smuzhiyun	sethi		%hi(__hypervisor_flush_tlb_pending), %o1
1058*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_flush_tlb_pending), %o1
1059*4882a593Smuzhiyun	call		tlb_patch_one
1060*4882a593Smuzhiyun	 mov		27, %o2
1061*4882a593Smuzhiyun
1062*4882a593Smuzhiyun	sethi		%hi(__flush_tlb_kernel_range), %o0
1063*4882a593Smuzhiyun	or		%o0, %lo(__flush_tlb_kernel_range), %o0
1064*4882a593Smuzhiyun	sethi		%hi(__hypervisor_flush_tlb_kernel_range), %o1
1065*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_flush_tlb_kernel_range), %o1
1066*4882a593Smuzhiyun	call		tlb_patch_one
1067*4882a593Smuzhiyun	 mov		31, %o2
1068*4882a593Smuzhiyun
1069*4882a593Smuzhiyun#ifdef DCACHE_ALIASING_POSSIBLE
1070*4882a593Smuzhiyun	sethi		%hi(__flush_dcache_page), %o0
1071*4882a593Smuzhiyun	or		%o0, %lo(__flush_dcache_page), %o0
1072*4882a593Smuzhiyun	sethi		%hi(__hypervisor_flush_dcache_page), %o1
1073*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_flush_dcache_page), %o1
1074*4882a593Smuzhiyun	call		tlb_patch_one
1075*4882a593Smuzhiyun	 mov		2, %o2
1076*4882a593Smuzhiyun#endif /* DCACHE_ALIASING_POSSIBLE */
1077*4882a593Smuzhiyun
1078*4882a593Smuzhiyun#ifdef CONFIG_SMP
1079*4882a593Smuzhiyun	sethi		%hi(xcall_flush_tlb_mm), %o0
1080*4882a593Smuzhiyun	or		%o0, %lo(xcall_flush_tlb_mm), %o0
1081*4882a593Smuzhiyun	sethi		%hi(__hypervisor_xcall_flush_tlb_mm), %o1
1082*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_xcall_flush_tlb_mm), %o1
1083*4882a593Smuzhiyun	call		tlb_patch_one
1084*4882a593Smuzhiyun	 mov		24, %o2
1085*4882a593Smuzhiyun
1086*4882a593Smuzhiyun	sethi		%hi(xcall_flush_tlb_page), %o0
1087*4882a593Smuzhiyun	or		%o0, %lo(xcall_flush_tlb_page), %o0
1088*4882a593Smuzhiyun	sethi		%hi(__hypervisor_xcall_flush_tlb_page), %o1
1089*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_xcall_flush_tlb_page), %o1
1090*4882a593Smuzhiyun	call		tlb_patch_one
1091*4882a593Smuzhiyun	 mov		20, %o2
1092*4882a593Smuzhiyun
1093*4882a593Smuzhiyun	sethi		%hi(xcall_flush_tlb_kernel_range), %o0
1094*4882a593Smuzhiyun	or		%o0, %lo(xcall_flush_tlb_kernel_range), %o0
1095*4882a593Smuzhiyun	sethi		%hi(__hypervisor_xcall_flush_tlb_kernel_range), %o1
1096*4882a593Smuzhiyun	or		%o1, %lo(__hypervisor_xcall_flush_tlb_kernel_range), %o1
1097*4882a593Smuzhiyun	call		tlb_patch_one
1098*4882a593Smuzhiyun	 mov		44, %o2
1099*4882a593Smuzhiyun#endif /* CONFIG_SMP */
1100*4882a593Smuzhiyun
1101*4882a593Smuzhiyun	ret
1102*4882a593Smuzhiyun	 restore
1103