xref: /OK3568_Linux_fs/kernel/arch/xtensa/boot/boot-redboot/bootstrap.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun#include <asm/core.h>
3*4882a593Smuzhiyun#include <asm/regs.h>
4*4882a593Smuzhiyun#include <asm/asmmacro.h>
5*4882a593Smuzhiyun#include <asm/cacheasm.h>
6*4882a593Smuzhiyun	/*
7*4882a593Smuzhiyun	 * RB-Data: RedBoot data/bss
8*4882a593Smuzhiyun	 * P:	    Boot-Parameters
9*4882a593Smuzhiyun	 * L:	    Kernel-Loader
10*4882a593Smuzhiyun	 *
11*4882a593Smuzhiyun	 * The Linux-Kernel image including the loader must be loaded
12*4882a593Smuzhiyun	 * to a position so that the kernel and the boot parameters
13*4882a593Smuzhiyun	 * can fit in the space before the load address.
14*4882a593Smuzhiyun	 *  ______________________________________________________
15*4882a593Smuzhiyun	 * |_RB-Data_|_P_|__________|_L_|___Linux-Kernel___|______|
16*4882a593Smuzhiyun	 *                          ^
17*4882a593Smuzhiyun	 *                          ^ Load address
18*4882a593Smuzhiyun	 *  ______________________________________________________
19*4882a593Smuzhiyun	 * |___Linux-Kernel___|_P_|_L_|___________________________|
20*4882a593Smuzhiyun	 *
21*4882a593Smuzhiyun	 * The loader copies the parameter to the position that will
22*4882a593Smuzhiyun	 * be the end of the kernel and itself to the end of the
23*4882a593Smuzhiyun	 * parameter list.
24*4882a593Smuzhiyun	 */
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun/* Make sure we have enough space for the 'uncompressor' */
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun#define STACK_SIZE 32768
29*4882a593Smuzhiyun#define HEAP_SIZE (131072*4)
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun	# a2: Parameter list
32*4882a593Smuzhiyun	# a3: Size of parameter list
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun	.section .start, "ax"
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun	.globl __start
37*4882a593Smuzhiyun	/* this must be the first byte of the loader! */
38*4882a593Smuzhiyun__start:
39*4882a593Smuzhiyun	entry	sp, 32		# we do not intend to return
40*4882a593Smuzhiyun	_call0	_start
41*4882a593Smuzhiyun__start_a0:
42*4882a593Smuzhiyun	.align 4
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun	.section .text, "ax"
45*4882a593Smuzhiyun	.literal_position
46*4882a593Smuzhiyun	.begin literal_prefix .text
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun	/* put literals in here! */
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun	.globl _start
51*4882a593Smuzhiyun_start:
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun	/* 'reset' window registers */
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun	movi	a4, 1
56*4882a593Smuzhiyun	wsr	a4, ps
57*4882a593Smuzhiyun	rsync
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun	rsr	a5, windowbase
60*4882a593Smuzhiyun	ssl	a5
61*4882a593Smuzhiyun	sll	a4, a4
62*4882a593Smuzhiyun	wsr	a4, windowstart
63*4882a593Smuzhiyun	rsync
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun	movi	a4, 0x00040000
66*4882a593Smuzhiyun	wsr	a4, ps
67*4882a593Smuzhiyun	rsync
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun	/* copy the loader to its address
70*4882a593Smuzhiyun	 * Note: The loader itself is a very small piece, so we assume we
71*4882a593Smuzhiyun	 *       don't partially overlap. We also assume (even more important)
72*4882a593Smuzhiyun	 *	 that the kernel image is out of the way. Usually, when the
73*4882a593Smuzhiyun	 *	 load address of this image is not at an arbitrary address,
74*4882a593Smuzhiyun	 *	 but aligned to some 10K's we shouldn't overlap.
75*4882a593Smuzhiyun	 */
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun	/* Note: The assembler cannot relax "addi a0, a0, ..." to an
78*4882a593Smuzhiyun	   l32r, so we load to a4 first. */
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun	# addi	a4, a0, __start - __start_a0
81*4882a593Smuzhiyun	# mov	a0, a4
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun	movi	a4, __start
84*4882a593Smuzhiyun	movi	a5, __start_a0
85*4882a593Smuzhiyun	add	a4, a0, a4
86*4882a593Smuzhiyun	sub	a0, a4, a5
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun	movi	a4, __start
89*4882a593Smuzhiyun	movi	a5, __reloc_end
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun	# a0: address where this code has been loaded
92*4882a593Smuzhiyun	# a4: compiled address of __start
93*4882a593Smuzhiyun	# a5: compiled end address
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun	mov.n	a7, a0
96*4882a593Smuzhiyun	mov.n	a8, a4
97*4882a593Smuzhiyun
98*4882a593Smuzhiyun1:
99*4882a593Smuzhiyun	l32i	a10, a7, 0
100*4882a593Smuzhiyun	l32i	a11, a7, 4
101*4882a593Smuzhiyun	s32i	a10, a8, 0
102*4882a593Smuzhiyun	s32i	a11, a8, 4
103*4882a593Smuzhiyun	l32i	a10, a7, 8
104*4882a593Smuzhiyun	l32i	a11, a7, 12
105*4882a593Smuzhiyun	s32i	a10, a8, 8
106*4882a593Smuzhiyun	s32i	a11, a8, 12
107*4882a593Smuzhiyun	addi	a8, a8, 16
108*4882a593Smuzhiyun	addi	a7, a7, 16
109*4882a593Smuzhiyun	blt	a8, a5, 1b
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun	/* We have to flush and invalidate the caches here before we jump. */
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun#if XCHAL_DCACHE_IS_WRITEBACK
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun	___flush_dcache_all a5 a6
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun#endif
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun	___invalidate_icache_all a5 a6
121*4882a593Smuzhiyun	isync
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun	movi	a11, _reloc
124*4882a593Smuzhiyun	jx	a11
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun	.globl _reloc
127*4882a593Smuzhiyun_reloc:
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun	/* RedBoot is now at the end of the memory, so we don't have
130*4882a593Smuzhiyun	 * to copy the parameter list. Keep the code around; in case
131*4882a593Smuzhiyun	 * we need it again. */
132*4882a593Smuzhiyun#if 0
133*4882a593Smuzhiyun	# a0: load address
134*4882a593Smuzhiyun	# a2: start address of parameter list
135*4882a593Smuzhiyun	# a3: length of parameter list
136*4882a593Smuzhiyun	# a4: __start
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun	/* copy the parameter list out of the way */
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun	movi	a6, _param_start
141*4882a593Smuzhiyun	add	a3, a2, a3
142*4882a593Smuzhiyun2:
143*4882a593Smuzhiyun	l32i	a8, a2, 0
144*4882a593Smuzhiyun	s32i	a8, a6, 0
145*4882a593Smuzhiyun	addi	a2, a2, 4
146*4882a593Smuzhiyun	addi	a6, a6, 4
147*4882a593Smuzhiyun	blt	a2, a3, 2b
148*4882a593Smuzhiyun#endif
149*4882a593Smuzhiyun
150*4882a593Smuzhiyun	/* clear BSS section */
151*4882a593Smuzhiyun	movi	a6, __bss_start
152*4882a593Smuzhiyun	movi	a7, __bss_end
153*4882a593Smuzhiyun	movi.n	a5, 0
154*4882a593Smuzhiyun3:
155*4882a593Smuzhiyun	s32i	a5, a6, 0
156*4882a593Smuzhiyun	addi	a6, a6, 4
157*4882a593Smuzhiyun	blt	a6, a7, 3b
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun	movi	a5, -16
160*4882a593Smuzhiyun	movi	a1, _stack + STACK_SIZE
161*4882a593Smuzhiyun	and	a1, a1, a5
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun	/* Uncompress the kernel */
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun	# a0: load address
166*4882a593Smuzhiyun	# a2: boot parameter
167*4882a593Smuzhiyun	# a4: __start
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun	movi	a3, __image_load
170*4882a593Smuzhiyun	sub	a4, a3, a4
171*4882a593Smuzhiyun	add	a8, a0, a4
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun	# a1  Stack
174*4882a593Smuzhiyun	# a8(a4)  Load address of the image
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun	movi	a6, _image_start
177*4882a593Smuzhiyun	movi	a10, _image_end
178*4882a593Smuzhiyun	movi	a7, 0x1000000
179*4882a593Smuzhiyun	sub	a11, a10, a6
180*4882a593Smuzhiyun	movi	a9, complen
181*4882a593Smuzhiyun	s32i	a11, a9, 0
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun	movi	a0, 0
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun	# a6 destination
186*4882a593Smuzhiyun	# a7 maximum size of destination
187*4882a593Smuzhiyun	# a8 source
188*4882a593Smuzhiyun	# a9 ptr to length
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun	.extern gunzip
191*4882a593Smuzhiyun	movi	a4, gunzip
192*4882a593Smuzhiyun	beqz	a4, 1f
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun	callx4	a4
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun	j	2f
197*4882a593Smuzhiyun
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun	# a6 destination start
200*4882a593Smuzhiyun	# a7 maximum size of destination
201*4882a593Smuzhiyun	# a8 source start
202*4882a593Smuzhiyun	# a9 ptr to length
203*4882a593Smuzhiyun	# a10 destination end
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun1:
206*4882a593Smuzhiyun        l32i    a9, a8, 0
207*4882a593Smuzhiyun        l32i    a11, a8, 4
208*4882a593Smuzhiyun        s32i    a9, a6, 0
209*4882a593Smuzhiyun        s32i    a11, a6, 4
210*4882a593Smuzhiyun        l32i    a9, a8, 8
211*4882a593Smuzhiyun        l32i    a11, a8, 12
212*4882a593Smuzhiyun        s32i    a9, a6, 8
213*4882a593Smuzhiyun        s32i    a11, a6, 12
214*4882a593Smuzhiyun        addi    a6, a6, 16
215*4882a593Smuzhiyun        addi    a8, a8, 16
216*4882a593Smuzhiyun        blt     a6, a10, 1b
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun	/* jump to the kernel */
220*4882a593Smuzhiyun2:
221*4882a593Smuzhiyun#if XCHAL_DCACHE_IS_WRITEBACK
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun	___flush_dcache_all a5 a6
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun#endif
226*4882a593Smuzhiyun
227*4882a593Smuzhiyun	___invalidate_icache_all a5 a6
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun	isync
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun	# a2  Boot parameter list
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun	movi	a0, _image_start
234*4882a593Smuzhiyun	jx	a0
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun	.align 16
237*4882a593Smuzhiyun	.data
238*4882a593Smuzhiyun	.globl avail_ram
239*4882a593Smuzhiyunavail_ram:
240*4882a593Smuzhiyun	.long	_heap
241*4882a593Smuzhiyun	.globl end_avail
242*4882a593Smuzhiyunend_avail:
243*4882a593Smuzhiyun	.long	_heap + HEAP_SIZE
244*4882a593Smuzhiyun
245*4882a593Smuzhiyun	.comm _stack, STACK_SIZE
246*4882a593Smuzhiyun	.comm _heap, HEAP_SIZE
247*4882a593Smuzhiyun
248*4882a593Smuzhiyun	.globl end_avail
249*4882a593Smuzhiyun	.comm complen, 4
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun	.end	literal_prefix
252