xref: /OK3568_Linux_fs/kernel/arch/xtensa/include/asm/initialize_mmu.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * arch/xtensa/include/asm/initialize_mmu.h
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Initializes MMU:
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  *      For the new V3 MMU we remap the TLB from virtual == physical
7*4882a593Smuzhiyun  *      to the standard Linux mapping used in earlier MMU's.
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  *      For the MMU we also support a new configuration register that
10*4882a593Smuzhiyun  *      specifies how the S32C1I instruction operates with the cache
11*4882a593Smuzhiyun  *      controller.
12*4882a593Smuzhiyun  *
13*4882a593Smuzhiyun  * This file is subject to the terms and conditions of the GNU General
14*4882a593Smuzhiyun  * Public License.  See the file "COPYING" in the main directory of
15*4882a593Smuzhiyun  * this archive for more details.
16*4882a593Smuzhiyun  *
17*4882a593Smuzhiyun  * Copyright (C) 2008 - 2012 Tensilica, Inc.
18*4882a593Smuzhiyun  *
19*4882a593Smuzhiyun  *   Marc Gauthier <marc@tensilica.com>
20*4882a593Smuzhiyun  *   Pete Delaney <piet@tensilica.com>
21*4882a593Smuzhiyun  */
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun #ifndef _XTENSA_INITIALIZE_MMU_H
24*4882a593Smuzhiyun #define _XTENSA_INITIALIZE_MMU_H
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun #include <linux/init.h>
27*4882a593Smuzhiyun #include <linux/pgtable.h>
28*4882a593Smuzhiyun #include <asm/vectors.h>
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun #if XCHAL_HAVE_PTP_MMU
31*4882a593Smuzhiyun #define CA_BYPASS	(_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
32*4882a593Smuzhiyun #define CA_WRITEBACK	(_PAGE_CA_WB     | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
33*4882a593Smuzhiyun #else
34*4882a593Smuzhiyun #define CA_WRITEBACK	(0x4)
35*4882a593Smuzhiyun #endif
36*4882a593Smuzhiyun 
37*4882a593Smuzhiyun #ifdef __ASSEMBLY__
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun #define XTENSA_HWVERSION_RC_2009_0 230000
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun 	.macro	initialize_mmu
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun #if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
44*4882a593Smuzhiyun /*
45*4882a593Smuzhiyun  * We Have Atomic Operation Control (ATOMCTL) Register; Initialize it.
46*4882a593Smuzhiyun  * For details see Documentation/xtensa/atomctl.rst
47*4882a593Smuzhiyun  */
48*4882a593Smuzhiyun #if XCHAL_DCACHE_IS_COHERENT
49*4882a593Smuzhiyun 	movi	a3, 0x25	/* For SMP/MX -- internal for writeback,
50*4882a593Smuzhiyun 				 * RCW otherwise
51*4882a593Smuzhiyun 				 */
52*4882a593Smuzhiyun #else
53*4882a593Smuzhiyun 	movi	a3, 0x29	/* non-MX -- Most cores use Std Memory
54*4882a593Smuzhiyun 				 * Controlers which usually can't use RCW
55*4882a593Smuzhiyun 				 */
56*4882a593Smuzhiyun #endif
57*4882a593Smuzhiyun 	wsr	a3, atomctl
58*4882a593Smuzhiyun #endif  /* XCHAL_HAVE_S32C1I &&
59*4882a593Smuzhiyun 	 * (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
60*4882a593Smuzhiyun 	 */
61*4882a593Smuzhiyun 
62*4882a593Smuzhiyun #if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
63*4882a593Smuzhiyun /*
64*4882a593Smuzhiyun  * Have MMU v3
65*4882a593Smuzhiyun  */
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun #if !XCHAL_HAVE_VECBASE
68*4882a593Smuzhiyun # error "MMU v3 requires reloc vectors"
69*4882a593Smuzhiyun #endif
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun 	movi	a1, 0
72*4882a593Smuzhiyun 	_call0	1f
73*4882a593Smuzhiyun 	_j	2f
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun 	.align	4
76*4882a593Smuzhiyun 1:	movi	a2, 0x10000000
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun #if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul
79*4882a593Smuzhiyun #define TEMP_MAPPING_VADDR 0x40000000
80*4882a593Smuzhiyun #else
81*4882a593Smuzhiyun #define TEMP_MAPPING_VADDR 0x00000000
82*4882a593Smuzhiyun #endif
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun 	/* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun 	movi	a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY
87*4882a593Smuzhiyun 	idtlb	a2
88*4882a593Smuzhiyun 	iitlb	a2
89*4882a593Smuzhiyun 	isync
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun 	/* Step 2: map 0x40000000..0x47FFFFFF to paddr containing this code
92*4882a593Smuzhiyun 	 * and jump to the new mapping.
93*4882a593Smuzhiyun 	 */
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun 	srli	a3, a0, 27
96*4882a593Smuzhiyun 	slli	a3, a3, 27
97*4882a593Smuzhiyun 	addi	a3, a3, CA_BYPASS
98*4882a593Smuzhiyun 	addi	a7, a2, 5 - XCHAL_SPANNING_WAY
99*4882a593Smuzhiyun 	wdtlb	a3, a7
100*4882a593Smuzhiyun 	witlb	a3, a7
101*4882a593Smuzhiyun 	isync
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 	slli	a4, a0, 5
104*4882a593Smuzhiyun 	srli	a4, a4, 5
105*4882a593Smuzhiyun 	addi	a5, a2, -XCHAL_SPANNING_WAY
106*4882a593Smuzhiyun 	add	a4, a4, a5
107*4882a593Smuzhiyun 	jx	a4
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	/* Step 3: unmap everything other than current area.
110*4882a593Smuzhiyun 	 *	   Start at 0x60000000, wrap around, and end with 0x20000000
111*4882a593Smuzhiyun 	 */
112*4882a593Smuzhiyun 2:	movi	a4, 0x20000000
113*4882a593Smuzhiyun 	add	a5, a2, a4
114*4882a593Smuzhiyun 3:	idtlb	a5
115*4882a593Smuzhiyun 	iitlb	a5
116*4882a593Smuzhiyun 	add	a5, a5, a4
117*4882a593Smuzhiyun 	bne	a5, a2, 3b
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 	/* Step 4: Setup MMU with the requested static mappings. */
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	movi	a6, 0x01000000
122*4882a593Smuzhiyun 	wsr	a6, ITLBCFG
123*4882a593Smuzhiyun 	wsr	a6, DTLBCFG
124*4882a593Smuzhiyun 	isync
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun 	movi	a5, XCHAL_KSEG_CACHED_VADDR + XCHAL_KSEG_TLB_WAY
127*4882a593Smuzhiyun 	movi	a4, XCHAL_KSEG_PADDR + CA_WRITEBACK
128*4882a593Smuzhiyun 	wdtlb	a4, a5
129*4882a593Smuzhiyun 	witlb	a4, a5
130*4882a593Smuzhiyun 
131*4882a593Smuzhiyun 	movi	a5, XCHAL_KSEG_BYPASS_VADDR + XCHAL_KSEG_TLB_WAY
132*4882a593Smuzhiyun 	movi	a4, XCHAL_KSEG_PADDR + CA_BYPASS
133*4882a593Smuzhiyun 	wdtlb	a4, a5
134*4882a593Smuzhiyun 	witlb	a4, a5
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun #ifdef CONFIG_XTENSA_KSEG_512M
137*4882a593Smuzhiyun 	movi	a5, XCHAL_KSEG_CACHED_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
138*4882a593Smuzhiyun 	movi	a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_WRITEBACK
139*4882a593Smuzhiyun 	wdtlb	a4, a5
140*4882a593Smuzhiyun 	witlb	a4, a5
141*4882a593Smuzhiyun 
142*4882a593Smuzhiyun 	movi	a5, XCHAL_KSEG_BYPASS_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
143*4882a593Smuzhiyun 	movi	a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_BYPASS
144*4882a593Smuzhiyun 	wdtlb	a4, a5
145*4882a593Smuzhiyun 	witlb	a4, a5
146*4882a593Smuzhiyun #endif
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 	movi	a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY
149*4882a593Smuzhiyun 	movi	a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK
150*4882a593Smuzhiyun 	wdtlb	a4, a5
151*4882a593Smuzhiyun 	witlb	a4, a5
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun 	movi	a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY
154*4882a593Smuzhiyun 	movi	a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS
155*4882a593Smuzhiyun 	wdtlb	a4, a5
156*4882a593Smuzhiyun 	witlb	a4, a5
157*4882a593Smuzhiyun 
158*4882a593Smuzhiyun 	isync
159*4882a593Smuzhiyun 
160*4882a593Smuzhiyun 	/* Jump to self, using final mappings. */
161*4882a593Smuzhiyun 	movi	a4, 1f
162*4882a593Smuzhiyun 	jx	a4
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun 1:
165*4882a593Smuzhiyun 	/* Step 5: remove temporary mapping. */
166*4882a593Smuzhiyun 	idtlb	a7
167*4882a593Smuzhiyun 	iitlb	a7
168*4882a593Smuzhiyun 	isync
169*4882a593Smuzhiyun 
170*4882a593Smuzhiyun 	movi	a0, 0
171*4882a593Smuzhiyun 	wsr	a0, ptevaddr
172*4882a593Smuzhiyun 	rsync
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun #endif /* defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU &&
175*4882a593Smuzhiyun 	  XCHAL_HAVE_SPANNING_WAY */
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun 	.endm
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun 	.macro	initialize_cacheattr
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun #if !defined(CONFIG_MMU) && (XCHAL_HAVE_TLBS || XCHAL_HAVE_MPU)
182*4882a593Smuzhiyun #if CONFIG_MEMMAP_CACHEATTR == 0x22222222 && XCHAL_HAVE_PTP_MMU
183*4882a593Smuzhiyun #error Default MEMMAP_CACHEATTR of 0x22222222 does not work with full MMU.
184*4882a593Smuzhiyun #endif
185*4882a593Smuzhiyun 
186*4882a593Smuzhiyun #if XCHAL_HAVE_MPU
187*4882a593Smuzhiyun 	__REFCONST
188*4882a593Smuzhiyun 	.align	4
189*4882a593Smuzhiyun .Lattribute_table:
190*4882a593Smuzhiyun 	.long 0x000000, 0x1fff00, 0x1ddf00, 0x1eef00
191*4882a593Smuzhiyun 	.long 0x006600, 0x000000, 0x000000, 0x000000
192*4882a593Smuzhiyun 	.long 0x000000, 0x000000, 0x000000, 0x000000
193*4882a593Smuzhiyun 	.long 0x000000, 0x000000, 0x000000, 0x000000
194*4882a593Smuzhiyun 	.previous
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 	movi	a3, .Lattribute_table
197*4882a593Smuzhiyun 	movi	a4, CONFIG_MEMMAP_CACHEATTR
198*4882a593Smuzhiyun 	movi	a5, 1
199*4882a593Smuzhiyun 	movi	a6, XCHAL_MPU_ENTRIES
200*4882a593Smuzhiyun 	movi	a10, 0x20000000
201*4882a593Smuzhiyun 	movi	a11, -1
202*4882a593Smuzhiyun 1:
203*4882a593Smuzhiyun 	sub	a5, a5, a10
204*4882a593Smuzhiyun 	extui	a8, a4, 28, 4
205*4882a593Smuzhiyun 	beq	a8, a11, 2f
206*4882a593Smuzhiyun 	addi	a6, a6, -1
207*4882a593Smuzhiyun 	mov	a11, a8
208*4882a593Smuzhiyun 2:
209*4882a593Smuzhiyun 	addx4	a9, a8, a3
210*4882a593Smuzhiyun 	l32i	a9, a9, 0
211*4882a593Smuzhiyun 	or	a9, a9, a6
212*4882a593Smuzhiyun 	wptlb	a9, a5
213*4882a593Smuzhiyun 	slli	a4, a4, 4
214*4882a593Smuzhiyun 	bgeu	a5, a10, 1b
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun #else
217*4882a593Smuzhiyun 	movi	a5, XCHAL_SPANNING_WAY
218*4882a593Smuzhiyun 	movi	a6, ~_PAGE_ATTRIB_MASK
219*4882a593Smuzhiyun 	movi	a4, CONFIG_MEMMAP_CACHEATTR
220*4882a593Smuzhiyun 	movi	a8, 0x20000000
221*4882a593Smuzhiyun 1:
222*4882a593Smuzhiyun 	rdtlb1	a3, a5
223*4882a593Smuzhiyun 	xor	a3, a3, a4
224*4882a593Smuzhiyun 	and	a3, a3, a6
225*4882a593Smuzhiyun 	xor	a3, a3, a4
226*4882a593Smuzhiyun 	wdtlb	a3, a5
227*4882a593Smuzhiyun 	ritlb1	a3, a5
228*4882a593Smuzhiyun 	xor	a3, a3, a4
229*4882a593Smuzhiyun 	and	a3, a3, a6
230*4882a593Smuzhiyun 	xor	a3, a3, a4
231*4882a593Smuzhiyun 	witlb	a3, a5
232*4882a593Smuzhiyun 
233*4882a593Smuzhiyun 	add	a5, a5, a8
234*4882a593Smuzhiyun 	srli	a4, a4, 4
235*4882a593Smuzhiyun 	bgeu	a5, a8, 1b
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun 	isync
238*4882a593Smuzhiyun #endif
239*4882a593Smuzhiyun #endif
240*4882a593Smuzhiyun 
241*4882a593Smuzhiyun 	.endm
242*4882a593Smuzhiyun 
243*4882a593Smuzhiyun #endif /*__ASSEMBLY__*/
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun #endif /* _XTENSA_INITIALIZE_MMU_H */
246