xref: /OK3568_Linux_fs/kernel/arch/powerpc/mm/nohash/tlb_low.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-or-later */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun * This file contains low-level functions for performing various
4*4882a593Smuzhiyun * types of TLB invalidations on various processors with no hash
5*4882a593Smuzhiyun * table.
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * This file implements the following functions for all no-hash
8*4882a593Smuzhiyun * processors. Some aren't implemented for some variants. Some
9*4882a593Smuzhiyun * are inline in tlbflush.h
10*4882a593Smuzhiyun *
11*4882a593Smuzhiyun *	- tlbil_va
12*4882a593Smuzhiyun *	- tlbil_pid
13*4882a593Smuzhiyun *	- tlbil_all
14*4882a593Smuzhiyun *	- tlbivax_bcast
15*4882a593Smuzhiyun *
16*4882a593Smuzhiyun * Code mostly moved over from misc_32.S
17*4882a593Smuzhiyun *
18*4882a593Smuzhiyun *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
19*4882a593Smuzhiyun *
20*4882a593Smuzhiyun * Partially rewritten by Cort Dougan (cort@cs.nmt.edu)
21*4882a593Smuzhiyun * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt.
22*4882a593Smuzhiyun */
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun#include <asm/reg.h>
25*4882a593Smuzhiyun#include <asm/page.h>
26*4882a593Smuzhiyun#include <asm/cputable.h>
27*4882a593Smuzhiyun#include <asm/mmu.h>
28*4882a593Smuzhiyun#include <asm/ppc_asm.h>
29*4882a593Smuzhiyun#include <asm/asm-offsets.h>
30*4882a593Smuzhiyun#include <asm/processor.h>
31*4882a593Smuzhiyun#include <asm/bug.h>
32*4882a593Smuzhiyun#include <asm/asm-compat.h>
33*4882a593Smuzhiyun#include <asm/feature-fixups.h>
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun#if defined(CONFIG_40x)
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun/*
38*4882a593Smuzhiyun * 40x implementation needs only tlbil_va
39*4882a593Smuzhiyun */
40*4882a593Smuzhiyun_GLOBAL(__tlbil_va)
41*4882a593Smuzhiyun	/* We run the search with interrupts disabled because we have to change
42*4882a593Smuzhiyun	 * the PID and I don't want to preempt when that happens.
43*4882a593Smuzhiyun	 */
44*4882a593Smuzhiyun	mfmsr	r5
45*4882a593Smuzhiyun	mfspr	r6,SPRN_PID
46*4882a593Smuzhiyun	wrteei	0
47*4882a593Smuzhiyun	mtspr	SPRN_PID,r4
48*4882a593Smuzhiyun	tlbsx.	r3, 0, r3
49*4882a593Smuzhiyun	mtspr	SPRN_PID,r6
50*4882a593Smuzhiyun	wrtee	r5
51*4882a593Smuzhiyun	bne	1f
52*4882a593Smuzhiyun	sync
53*4882a593Smuzhiyun	/* There are only 64 TLB entries, so r3 < 64, which means bit 25 is
54*4882a593Smuzhiyun	 * clear. Since 25 is the V bit in the TLB_TAG, loading this value
55*4882a593Smuzhiyun	 * will invalidate the TLB entry. */
56*4882a593Smuzhiyun	tlbwe	r3, r3, TLB_TAG
57*4882a593Smuzhiyun	isync
58*4882a593Smuzhiyun1:	blr
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun#elif defined(CONFIG_PPC_8xx)
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun/*
63*4882a593Smuzhiyun * Nothing to do for 8xx, everything is inline
64*4882a593Smuzhiyun */
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun#elif defined(CONFIG_44x) /* Includes 47x */
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun/*
69*4882a593Smuzhiyun * 440 implementation uses tlbsx/we for tlbil_va and a full sweep
70*4882a593Smuzhiyun * of the TLB for everything else.
71*4882a593Smuzhiyun */
72*4882a593Smuzhiyun_GLOBAL(__tlbil_va)
73*4882a593Smuzhiyun	mfspr	r5,SPRN_MMUCR
74*4882a593Smuzhiyun	mfmsr   r10
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun	/*
77*4882a593Smuzhiyun	 * We write 16 bits of STID since 47x supports that much, we
78*4882a593Smuzhiyun	 * will never be passed out of bounds values on 440 (hopefully)
79*4882a593Smuzhiyun	 */
80*4882a593Smuzhiyun	rlwimi  r5,r4,0,16,31
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun	/* We have to run the search with interrupts disabled, otherwise
83*4882a593Smuzhiyun	 * an interrupt which causes a TLB miss can clobber the MMUCR
84*4882a593Smuzhiyun	 * between the mtspr and the tlbsx.
85*4882a593Smuzhiyun	 *
86*4882a593Smuzhiyun	 * Critical and Machine Check interrupts take care of saving
87*4882a593Smuzhiyun	 * and restoring MMUCR, so only normal interrupts have to be
88*4882a593Smuzhiyun	 * taken care of.
89*4882a593Smuzhiyun	 */
90*4882a593Smuzhiyun	wrteei	0
91*4882a593Smuzhiyun	mtspr	SPRN_MMUCR,r5
92*4882a593Smuzhiyun	tlbsx.	r6,0,r3
93*4882a593Smuzhiyun	bne	10f
94*4882a593Smuzhiyun	sync
95*4882a593SmuzhiyunBEGIN_MMU_FTR_SECTION
96*4882a593Smuzhiyun	b	2f
97*4882a593SmuzhiyunEND_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
98*4882a593Smuzhiyun	/* On 440 There are only 64 TLB entries, so r3 < 64, which means bit
99*4882a593Smuzhiyun	 * 22, is clear.  Since 22 is the V bit in the TLB_PAGEID, loading this
100*4882a593Smuzhiyun	 * value will invalidate the TLB entry.
101*4882a593Smuzhiyun	 */
102*4882a593Smuzhiyun	tlbwe	r6,r6,PPC44x_TLB_PAGEID
103*4882a593Smuzhiyun	isync
104*4882a593Smuzhiyun10:	wrtee	r10
105*4882a593Smuzhiyun	blr
106*4882a593Smuzhiyun2:
107*4882a593Smuzhiyun#ifdef CONFIG_PPC_47x
108*4882a593Smuzhiyun	oris	r7,r6,0x8000	/* specify way explicitly */
109*4882a593Smuzhiyun	clrrwi	r4,r3,12	/* get an EPN for the hashing with V = 0 */
110*4882a593Smuzhiyun	ori	r4,r4,PPC47x_TLBE_SIZE
111*4882a593Smuzhiyun	tlbwe   r4,r7,0		/* write it */
112*4882a593Smuzhiyun	isync
113*4882a593Smuzhiyun	wrtee	r10
114*4882a593Smuzhiyun	blr
115*4882a593Smuzhiyun#else /* CONFIG_PPC_47x */
116*4882a593Smuzhiyun1:	trap
117*4882a593Smuzhiyun	EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
118*4882a593Smuzhiyun#endif /* !CONFIG_PPC_47x */
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun_GLOBAL(_tlbil_all)
121*4882a593Smuzhiyun_GLOBAL(_tlbil_pid)
122*4882a593SmuzhiyunBEGIN_MMU_FTR_SECTION
123*4882a593Smuzhiyun	b	2f
124*4882a593SmuzhiyunEND_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
125*4882a593Smuzhiyun	li	r3,0
126*4882a593Smuzhiyun	sync
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun	/* Load high watermark */
129*4882a593Smuzhiyun	lis	r4,tlb_44x_hwater@ha
130*4882a593Smuzhiyun	lwz	r5,tlb_44x_hwater@l(r4)
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun1:	tlbwe	r3,r3,PPC44x_TLB_PAGEID
133*4882a593Smuzhiyun	addi	r3,r3,1
134*4882a593Smuzhiyun	cmpw	0,r3,r5
135*4882a593Smuzhiyun	ble	1b
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun	isync
138*4882a593Smuzhiyun	blr
139*4882a593Smuzhiyun2:
140*4882a593Smuzhiyun#ifdef CONFIG_PPC_47x
141*4882a593Smuzhiyun	/* 476 variant. There's not simple way to do this, hopefully we'll
142*4882a593Smuzhiyun	 * try to limit the amount of such full invalidates
143*4882a593Smuzhiyun	 */
144*4882a593Smuzhiyun	mfmsr	r11		/* Interrupts off */
145*4882a593Smuzhiyun	wrteei	0
146*4882a593Smuzhiyun	li	r3,-1		/* Current set */
147*4882a593Smuzhiyun	lis	r10,tlb_47x_boltmap@h
148*4882a593Smuzhiyun	ori	r10,r10,tlb_47x_boltmap@l
149*4882a593Smuzhiyun	lis	r7,0x8000	/* Specify way explicitly */
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun	b	9f		/* For each set */
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun1:	li	r9,4		/* Number of ways */
154*4882a593Smuzhiyun	li	r4,0		/* Current way */
155*4882a593Smuzhiyun	li	r6,0		/* Default entry value 0 */
156*4882a593Smuzhiyun	andi.	r0,r8,1		/* Check if way 0 is bolted */
157*4882a593Smuzhiyun	mtctr	r9		/* Load way counter */
158*4882a593Smuzhiyun	bne-	3f		/* Bolted, skip loading it */
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun2:	/* For each way */
161*4882a593Smuzhiyun	or	r5,r3,r4	/* Make way|index for tlbre */
162*4882a593Smuzhiyun	rlwimi	r5,r5,16,8,15	/* Copy index into position */
163*4882a593Smuzhiyun	tlbre	r6,r5,0		/* Read entry */
164*4882a593Smuzhiyun3:	addis	r4,r4,0x2000	/* Next way */
165*4882a593Smuzhiyun	andi.	r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */
166*4882a593Smuzhiyun	beq	4f		/* Nope, skip it */
167*4882a593Smuzhiyun	rlwimi	r7,r5,0,1,2	/* Insert way number */
168*4882a593Smuzhiyun	rlwinm	r6,r6,0,21,19	/* Clear V */
169*4882a593Smuzhiyun	tlbwe   r6,r7,0		/* Write it */
170*4882a593Smuzhiyun4:	bdnz	2b		/* Loop for each way */
171*4882a593Smuzhiyun	srwi	r8,r8,1		/* Next boltmap bit */
172*4882a593Smuzhiyun9:	cmpwi	cr1,r3,255	/* Last set done ? */
173*4882a593Smuzhiyun	addi	r3,r3,1		/* Next set */
174*4882a593Smuzhiyun	beq	cr1,1f		/* End of loop */
175*4882a593Smuzhiyun	andi.	r0,r3,0x1f	/* Need to load a new boltmap word ? */
176*4882a593Smuzhiyun	bne	1b		/* No, loop */
177*4882a593Smuzhiyun	lwz	r8,0(r10)	/* Load boltmap entry */
178*4882a593Smuzhiyun	addi	r10,r10,4	/* Next word */
179*4882a593Smuzhiyun	b	1b		/* Then loop */
180*4882a593Smuzhiyun1:	isync			/* Sync shadows */
181*4882a593Smuzhiyun	wrtee	r11
182*4882a593Smuzhiyun#else /* CONFIG_PPC_47x */
183*4882a593Smuzhiyun1:	trap
184*4882a593Smuzhiyun	EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
185*4882a593Smuzhiyun#endif /* !CONFIG_PPC_47x */
186*4882a593Smuzhiyun	blr
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun#ifdef CONFIG_PPC_47x
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun/*
191*4882a593Smuzhiyun * _tlbivax_bcast is only on 47x. We don't bother doing a runtime
192*4882a593Smuzhiyun * check though, it will blow up soon enough if we mistakenly try
193*4882a593Smuzhiyun * to use it on a 440.
194*4882a593Smuzhiyun */
195*4882a593Smuzhiyun_GLOBAL(_tlbivax_bcast)
196*4882a593Smuzhiyun	mfspr	r5,SPRN_MMUCR
197*4882a593Smuzhiyun	mfmsr	r10
198*4882a593Smuzhiyun	rlwimi	r5,r4,0,16,31
199*4882a593Smuzhiyun	wrteei	0
200*4882a593Smuzhiyun	mtspr	SPRN_MMUCR,r5
201*4882a593Smuzhiyun	isync
202*4882a593Smuzhiyun	PPC_TLBIVAX(0, R3)
203*4882a593Smuzhiyun	isync
204*4882a593Smuzhiyun	eieio
205*4882a593Smuzhiyun	tlbsync
206*4882a593SmuzhiyunBEGIN_FTR_SECTION
207*4882a593Smuzhiyun	b	1f
208*4882a593SmuzhiyunEND_FTR_SECTION_IFSET(CPU_FTR_476_DD2)
209*4882a593Smuzhiyun	sync
210*4882a593Smuzhiyun	wrtee	r10
211*4882a593Smuzhiyun	blr
212*4882a593Smuzhiyun/*
213*4882a593Smuzhiyun * DD2 HW could hang if in instruction fetch happens before msync completes.
214*4882a593Smuzhiyun * Touch enough instruction cache lines to ensure cache hits
215*4882a593Smuzhiyun */
216*4882a593Smuzhiyun1:	mflr	r9
217*4882a593Smuzhiyun	bl	2f
218*4882a593Smuzhiyun2:	mflr	r6
219*4882a593Smuzhiyun	li	r7,32
220*4882a593Smuzhiyun	PPC_ICBT(0,R6,R7)		/* touch next cache line */
221*4882a593Smuzhiyun	add	r6,r6,r7
222*4882a593Smuzhiyun	PPC_ICBT(0,R6,R7)		/* touch next cache line */
223*4882a593Smuzhiyun	add	r6,r6,r7
224*4882a593Smuzhiyun	PPC_ICBT(0,R6,R7)		/* touch next cache line */
225*4882a593Smuzhiyun	sync
226*4882a593Smuzhiyun	nop
227*4882a593Smuzhiyun	nop
228*4882a593Smuzhiyun	nop
229*4882a593Smuzhiyun	nop
230*4882a593Smuzhiyun	nop
231*4882a593Smuzhiyun	nop
232*4882a593Smuzhiyun	nop
233*4882a593Smuzhiyun	nop
234*4882a593Smuzhiyun	mtlr	r9
235*4882a593Smuzhiyun	wrtee	r10
236*4882a593Smuzhiyun	blr
237*4882a593Smuzhiyun#endif /* CONFIG_PPC_47x */
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun#elif defined(CONFIG_FSL_BOOKE)
240*4882a593Smuzhiyun/*
241*4882a593Smuzhiyun * FSL BookE implementations.
242*4882a593Smuzhiyun *
243*4882a593Smuzhiyun * Since feature sections are using _SECTION_ELSE we need
244*4882a593Smuzhiyun * to have the larger code path before the _SECTION_ELSE
245*4882a593Smuzhiyun */
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun/*
248*4882a593Smuzhiyun * Flush MMU TLB on the local processor
249*4882a593Smuzhiyun */
250*4882a593Smuzhiyun_GLOBAL(_tlbil_all)
251*4882a593SmuzhiyunBEGIN_MMU_FTR_SECTION
252*4882a593Smuzhiyun	li	r3,(MMUCSR0_TLBFI)@l
253*4882a593Smuzhiyun	mtspr	SPRN_MMUCSR0, r3
254*4882a593Smuzhiyun1:
255*4882a593Smuzhiyun	mfspr	r3,SPRN_MMUCSR0
256*4882a593Smuzhiyun	andi.	r3,r3,MMUCSR0_TLBFI@l
257*4882a593Smuzhiyun	bne	1b
258*4882a593SmuzhiyunMMU_FTR_SECTION_ELSE
259*4882a593Smuzhiyun	PPC_TLBILX_ALL(0,R0)
260*4882a593SmuzhiyunALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
261*4882a593Smuzhiyun	msync
262*4882a593Smuzhiyun	isync
263*4882a593Smuzhiyun	blr
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun_GLOBAL(_tlbil_pid)
266*4882a593SmuzhiyunBEGIN_MMU_FTR_SECTION
267*4882a593Smuzhiyun	slwi	r3,r3,16
268*4882a593Smuzhiyun	mfmsr	r10
269*4882a593Smuzhiyun	wrteei	0
270*4882a593Smuzhiyun	mfspr	r4,SPRN_MAS6	/* save MAS6 */
271*4882a593Smuzhiyun	mtspr	SPRN_MAS6,r3
272*4882a593Smuzhiyun	PPC_TLBILX_PID(0,R0)
273*4882a593Smuzhiyun	mtspr	SPRN_MAS6,r4	/* restore MAS6 */
274*4882a593Smuzhiyun	wrtee	r10
275*4882a593SmuzhiyunMMU_FTR_SECTION_ELSE
276*4882a593Smuzhiyun	li	r3,(MMUCSR0_TLBFI)@l
277*4882a593Smuzhiyun	mtspr	SPRN_MMUCSR0, r3
278*4882a593Smuzhiyun1:
279*4882a593Smuzhiyun	mfspr	r3,SPRN_MMUCSR0
280*4882a593Smuzhiyun	andi.	r3,r3,MMUCSR0_TLBFI@l
281*4882a593Smuzhiyun	bne	1b
282*4882a593SmuzhiyunALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX)
283*4882a593Smuzhiyun	msync
284*4882a593Smuzhiyun	isync
285*4882a593Smuzhiyun	blr
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun/*
288*4882a593Smuzhiyun * Flush MMU TLB for a particular address, but only on the local processor
289*4882a593Smuzhiyun * (no broadcast)
290*4882a593Smuzhiyun */
291*4882a593Smuzhiyun_GLOBAL(__tlbil_va)
292*4882a593Smuzhiyun	mfmsr	r10
293*4882a593Smuzhiyun	wrteei	0
294*4882a593Smuzhiyun	slwi	r4,r4,16
295*4882a593Smuzhiyun	ori	r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l
296*4882a593Smuzhiyun	mtspr	SPRN_MAS6,r4		/* assume AS=0 for now */
297*4882a593SmuzhiyunBEGIN_MMU_FTR_SECTION
298*4882a593Smuzhiyun	tlbsx	0,r3
299*4882a593Smuzhiyun	mfspr	r4,SPRN_MAS1		/* check valid */
300*4882a593Smuzhiyun	andis.	r3,r4,MAS1_VALID@h
301*4882a593Smuzhiyun	beq	1f
302*4882a593Smuzhiyun	rlwinm	r4,r4,0,1,31
303*4882a593Smuzhiyun	mtspr	SPRN_MAS1,r4
304*4882a593Smuzhiyun	tlbwe
305*4882a593SmuzhiyunMMU_FTR_SECTION_ELSE
306*4882a593Smuzhiyun	PPC_TLBILX_VA(0,R3)
307*4882a593SmuzhiyunALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
308*4882a593Smuzhiyun	msync
309*4882a593Smuzhiyun	isync
310*4882a593Smuzhiyun1:	wrtee	r10
311*4882a593Smuzhiyun	blr
312*4882a593Smuzhiyun#elif defined(CONFIG_PPC_BOOK3E)
313*4882a593Smuzhiyun/*
314*4882a593Smuzhiyun * New Book3E (>= 2.06) implementation
315*4882a593Smuzhiyun *
316*4882a593Smuzhiyun * Note: We may be able to get away without the interrupt masking stuff
317*4882a593Smuzhiyun * if we save/restore MAS6 on exceptions that might modify it
318*4882a593Smuzhiyun */
319*4882a593Smuzhiyun_GLOBAL(_tlbil_pid)
320*4882a593Smuzhiyun	slwi	r4,r3,MAS6_SPID_SHIFT
321*4882a593Smuzhiyun	mfmsr	r10
322*4882a593Smuzhiyun	wrteei	0
323*4882a593Smuzhiyun	mtspr	SPRN_MAS6,r4
324*4882a593Smuzhiyun	PPC_TLBILX_PID(0,R0)
325*4882a593Smuzhiyun	wrtee	r10
326*4882a593Smuzhiyun	msync
327*4882a593Smuzhiyun	isync
328*4882a593Smuzhiyun	blr
329*4882a593Smuzhiyun
330*4882a593Smuzhiyun_GLOBAL(_tlbil_pid_noind)
331*4882a593Smuzhiyun	slwi	r4,r3,MAS6_SPID_SHIFT
332*4882a593Smuzhiyun	mfmsr	r10
333*4882a593Smuzhiyun	ori	r4,r4,MAS6_SIND
334*4882a593Smuzhiyun	wrteei	0
335*4882a593Smuzhiyun	mtspr	SPRN_MAS6,r4
336*4882a593Smuzhiyun	PPC_TLBILX_PID(0,R0)
337*4882a593Smuzhiyun	wrtee	r10
338*4882a593Smuzhiyun	msync
339*4882a593Smuzhiyun	isync
340*4882a593Smuzhiyun	blr
341*4882a593Smuzhiyun
342*4882a593Smuzhiyun_GLOBAL(_tlbil_all)
343*4882a593Smuzhiyun	PPC_TLBILX_ALL(0,R0)
344*4882a593Smuzhiyun	msync
345*4882a593Smuzhiyun	isync
346*4882a593Smuzhiyun	blr
347*4882a593Smuzhiyun
348*4882a593Smuzhiyun_GLOBAL(_tlbil_va)
349*4882a593Smuzhiyun	mfmsr	r10
350*4882a593Smuzhiyun	wrteei	0
351*4882a593Smuzhiyun	cmpwi	cr0,r6,0
352*4882a593Smuzhiyun	slwi	r4,r4,MAS6_SPID_SHIFT
353*4882a593Smuzhiyun	rlwimi	r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
354*4882a593Smuzhiyun	beq	1f
355*4882a593Smuzhiyun	rlwimi	r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
356*4882a593Smuzhiyun1:	mtspr	SPRN_MAS6,r4		/* assume AS=0 for now */
357*4882a593Smuzhiyun	PPC_TLBILX_VA(0,R3)
358*4882a593Smuzhiyun	msync
359*4882a593Smuzhiyun	isync
360*4882a593Smuzhiyun	wrtee	r10
361*4882a593Smuzhiyun	blr
362*4882a593Smuzhiyun
363*4882a593Smuzhiyun_GLOBAL(_tlbivax_bcast)
364*4882a593Smuzhiyun	mfmsr	r10
365*4882a593Smuzhiyun	wrteei	0
366*4882a593Smuzhiyun	cmpwi	cr0,r6,0
367*4882a593Smuzhiyun	slwi	r4,r4,MAS6_SPID_SHIFT
368*4882a593Smuzhiyun	rlwimi	r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
369*4882a593Smuzhiyun	beq	1f
370*4882a593Smuzhiyun	rlwimi	r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
371*4882a593Smuzhiyun1:	mtspr	SPRN_MAS6,r4		/* assume AS=0 for now */
372*4882a593Smuzhiyun	PPC_TLBIVAX(0,R3)
373*4882a593Smuzhiyun	eieio
374*4882a593Smuzhiyun	tlbsync
375*4882a593Smuzhiyun	sync
376*4882a593Smuzhiyun	wrtee	r10
377*4882a593Smuzhiyun	blr
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun_GLOBAL(set_context)
380*4882a593Smuzhiyun#ifdef CONFIG_BDI_SWITCH
381*4882a593Smuzhiyun	/* Context switch the PTE pointer for the Abatron BDI2000.
382*4882a593Smuzhiyun	 * The PGDIR is the second parameter.
383*4882a593Smuzhiyun	 */
384*4882a593Smuzhiyun	lis	r5, abatron_pteptrs@h
385*4882a593Smuzhiyun	ori	r5, r5, abatron_pteptrs@l
386*4882a593Smuzhiyun	stw	r4, 0x4(r5)
387*4882a593Smuzhiyun#endif
388*4882a593Smuzhiyun	mtspr	SPRN_PID,r3
389*4882a593Smuzhiyun	isync			/* Force context change */
390*4882a593Smuzhiyun	blr
391*4882a593Smuzhiyun#else
392*4882a593Smuzhiyun#error Unsupported processor type !
393*4882a593Smuzhiyun#endif
394*4882a593Smuzhiyun
395*4882a593Smuzhiyun#if defined(CONFIG_PPC_FSL_BOOK3E)
396*4882a593Smuzhiyun/*
397*4882a593Smuzhiyun * extern void loadcam_entry(unsigned int index)
398*4882a593Smuzhiyun *
399*4882a593Smuzhiyun * Load TLBCAM[index] entry in to the L2 CAM MMU
400*4882a593Smuzhiyun * Must preserve r7, r8, r9, r10 and r11
401*4882a593Smuzhiyun */
402*4882a593Smuzhiyun_GLOBAL(loadcam_entry)
403*4882a593Smuzhiyun	mflr	r5
404*4882a593Smuzhiyun	LOAD_REG_ADDR_PIC(r4, TLBCAM)
405*4882a593Smuzhiyun	mtlr	r5
406*4882a593Smuzhiyun	mulli	r5,r3,TLBCAM_SIZE
407*4882a593Smuzhiyun	add	r3,r5,r4
408*4882a593Smuzhiyun	lwz	r4,TLBCAM_MAS0(r3)
409*4882a593Smuzhiyun	mtspr	SPRN_MAS0,r4
410*4882a593Smuzhiyun	lwz	r4,TLBCAM_MAS1(r3)
411*4882a593Smuzhiyun	mtspr	SPRN_MAS1,r4
412*4882a593Smuzhiyun	PPC_LL	r4,TLBCAM_MAS2(r3)
413*4882a593Smuzhiyun	mtspr	SPRN_MAS2,r4
414*4882a593Smuzhiyun	lwz	r4,TLBCAM_MAS3(r3)
415*4882a593Smuzhiyun	mtspr	SPRN_MAS3,r4
416*4882a593SmuzhiyunBEGIN_MMU_FTR_SECTION
417*4882a593Smuzhiyun	lwz	r4,TLBCAM_MAS7(r3)
418*4882a593Smuzhiyun	mtspr	SPRN_MAS7,r4
419*4882a593SmuzhiyunEND_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
420*4882a593Smuzhiyun	isync
421*4882a593Smuzhiyun	tlbwe
422*4882a593Smuzhiyun	isync
423*4882a593Smuzhiyun	blr
424*4882a593Smuzhiyun
425*4882a593Smuzhiyun/*
426*4882a593Smuzhiyun * Load multiple TLB entries at once, using an alternate-space
427*4882a593Smuzhiyun * trampoline so that we don't have to care about whether the same
428*4882a593Smuzhiyun * TLB entry maps us before and after.
429*4882a593Smuzhiyun *
430*4882a593Smuzhiyun * r3 = first entry to write
431*4882a593Smuzhiyun * r4 = number of entries to write
432*4882a593Smuzhiyun * r5 = temporary tlb entry
433*4882a593Smuzhiyun */
434*4882a593Smuzhiyun_GLOBAL(loadcam_multi)
435*4882a593Smuzhiyun	mflr	r8
436*4882a593Smuzhiyun	/* Don't switch to AS=1 if already there */
437*4882a593Smuzhiyun	mfmsr	r11
438*4882a593Smuzhiyun	andi.	r11,r11,MSR_IS
439*4882a593Smuzhiyun	bne	10f
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun	/*
442*4882a593Smuzhiyun	 * Set up temporary TLB entry that is the same as what we're
443*4882a593Smuzhiyun	 * running from, but in AS=1.
444*4882a593Smuzhiyun	 */
445*4882a593Smuzhiyun	bl	1f
446*4882a593Smuzhiyun1:	mflr	r6
447*4882a593Smuzhiyun	tlbsx	0,r8
448*4882a593Smuzhiyun	mfspr	r6,SPRN_MAS1
449*4882a593Smuzhiyun	ori	r6,r6,MAS1_TS
450*4882a593Smuzhiyun	mtspr	SPRN_MAS1,r6
451*4882a593Smuzhiyun	mfspr	r6,SPRN_MAS0
452*4882a593Smuzhiyun	rlwimi	r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
453*4882a593Smuzhiyun	mr	r7,r5
454*4882a593Smuzhiyun	mtspr	SPRN_MAS0,r6
455*4882a593Smuzhiyun	isync
456*4882a593Smuzhiyun	tlbwe
457*4882a593Smuzhiyun	isync
458*4882a593Smuzhiyun
459*4882a593Smuzhiyun	/* Switch to AS=1 */
460*4882a593Smuzhiyun	mfmsr	r6
461*4882a593Smuzhiyun	ori	r6,r6,MSR_IS|MSR_DS
462*4882a593Smuzhiyun	mtmsr	r6
463*4882a593Smuzhiyun	isync
464*4882a593Smuzhiyun
465*4882a593Smuzhiyun10:
466*4882a593Smuzhiyun	mr	r9,r3
467*4882a593Smuzhiyun	add	r10,r3,r4
468*4882a593Smuzhiyun2:	bl	loadcam_entry
469*4882a593Smuzhiyun	addi	r9,r9,1
470*4882a593Smuzhiyun	cmpw	r9,r10
471*4882a593Smuzhiyun	mr	r3,r9
472*4882a593Smuzhiyun	blt	2b
473*4882a593Smuzhiyun
474*4882a593Smuzhiyun	/* Don't return to AS=0 if we were in AS=1 at function start */
475*4882a593Smuzhiyun	andi.	r11,r11,MSR_IS
476*4882a593Smuzhiyun	bne	3f
477*4882a593Smuzhiyun
478*4882a593Smuzhiyun	/* Return to AS=0 and clear the temporary entry */
479*4882a593Smuzhiyun	mfmsr	r6
480*4882a593Smuzhiyun	rlwinm.	r6,r6,0,~(MSR_IS|MSR_DS)
481*4882a593Smuzhiyun	mtmsr	r6
482*4882a593Smuzhiyun	isync
483*4882a593Smuzhiyun
484*4882a593Smuzhiyun	li	r6,0
485*4882a593Smuzhiyun	mtspr	SPRN_MAS1,r6
486*4882a593Smuzhiyun	rlwinm	r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
487*4882a593Smuzhiyun	oris	r6,r6,MAS0_TLBSEL(1)@h
488*4882a593Smuzhiyun	mtspr	SPRN_MAS0,r6
489*4882a593Smuzhiyun	isync
490*4882a593Smuzhiyun	tlbwe
491*4882a593Smuzhiyun	isync
492*4882a593Smuzhiyun
493*4882a593Smuzhiyun3:
494*4882a593Smuzhiyun	mtlr	r8
495*4882a593Smuzhiyun	blr
496*4882a593Smuzhiyun#endif
497