xref: /OK3568_Linux_fs/u-boot/arch/arm/cpu/u-boot.lds (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/*
2*4882a593Smuzhiyun * Copyright (c) 2004-2008 Texas Instruments
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * (C) Copyright 2002
5*4882a593Smuzhiyun * Gary Jennejohn, DENX Software Engineering, <garyj@denx.de>
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * SPDX-License-Identifier:	GPL-2.0+
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun#include <config.h>
11*4882a593Smuzhiyun#include <asm/psci.h>
12*4882a593Smuzhiyun
13*4882a593SmuzhiyunOUTPUT_FORMAT("elf32-littlearm", "elf32-littlearm", "elf32-littlearm")
14*4882a593SmuzhiyunOUTPUT_ARCH(arm)
15*4882a593SmuzhiyunENTRY(_start)
16*4882a593SmuzhiyunSECTIONS
17*4882a593Smuzhiyun{
18*4882a593Smuzhiyun#if !defined(CONFIG_CMDLINE) && !defined(CONFIG_U_BOOT_CMD_ALWAYS)
19*4882a593Smuzhiyun	/DISCARD/ : { *(.u_boot_list_2_cmd_*) }
20*4882a593Smuzhiyun#endif
21*4882a593Smuzhiyun#if defined(CONFIG_ARMV7_SECURE_BASE) && defined(CONFIG_ARMV7_NONSEC)
22*4882a593Smuzhiyun	/*
23*4882a593Smuzhiyun	 * If CONFIG_ARMV7_SECURE_BASE is true, secure code will not
24*4882a593Smuzhiyun	 * bundle with u-boot, and code offsets are fixed. Secure zone
25*4882a593Smuzhiyun	 * only needs to be copied from the loading address to
26*4882a593Smuzhiyun	 * CONFIG_ARMV7_SECURE_BASE, which is the linking and running
27*4882a593Smuzhiyun	 * address for secure code.
28*4882a593Smuzhiyun	 *
29*4882a593Smuzhiyun	 * If CONFIG_ARMV7_SECURE_BASE is undefined, the secure zone will
30*4882a593Smuzhiyun	 * be included in u-boot address space, and some absolute address
31*4882a593Smuzhiyun	 * were used in secure code. The absolute addresses of the secure
32*4882a593Smuzhiyun	 * code also needs to be relocated along with the accompanying u-boot
33*4882a593Smuzhiyun	 * code.
34*4882a593Smuzhiyun	 *
35*4882a593Smuzhiyun	 * So DISCARD is only for CONFIG_ARMV7_SECURE_BASE.
36*4882a593Smuzhiyun	 */
37*4882a593Smuzhiyun	/DISCARD/ : { *(.rel._secure*) }
38*4882a593Smuzhiyun#endif
39*4882a593Smuzhiyun	. = 0x00000000;
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun	. = ALIGN(4);
42*4882a593Smuzhiyun	.text :
43*4882a593Smuzhiyun	{
44*4882a593Smuzhiyun		*(.__image_copy_start)
45*4882a593Smuzhiyun		*(.vectors)
46*4882a593Smuzhiyun		CPUDIR/start.o (.text*)
47*4882a593Smuzhiyun		*(.text*)
48*4882a593Smuzhiyun	}
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun#ifdef CONFIG_ARMV7_NONSEC
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun	/* Align the secure section only if we're going to use it in situ */
53*4882a593Smuzhiyun	.__secure_start :
54*4882a593Smuzhiyun#ifndef CONFIG_ARMV7_SECURE_BASE
55*4882a593Smuzhiyun		ALIGN(CONSTANT(COMMONPAGESIZE))
56*4882a593Smuzhiyun#endif
57*4882a593Smuzhiyun	{
58*4882a593Smuzhiyun		KEEP(*(.__secure_start))
59*4882a593Smuzhiyun	}
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun#ifndef CONFIG_ARMV7_SECURE_BASE
62*4882a593Smuzhiyun#define CONFIG_ARMV7_SECURE_BASE
63*4882a593Smuzhiyun#define __ARMV7_PSCI_STACK_IN_RAM
64*4882a593Smuzhiyun#endif
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun	.secure_text CONFIG_ARMV7_SECURE_BASE :
67*4882a593Smuzhiyun		AT(ADDR(.__secure_start) + SIZEOF(.__secure_start))
68*4882a593Smuzhiyun	{
69*4882a593Smuzhiyun		*(._secure.text)
70*4882a593Smuzhiyun	}
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun	.secure_data : AT(LOADADDR(.secure_text) + SIZEOF(.secure_text))
73*4882a593Smuzhiyun	{
74*4882a593Smuzhiyun		*(._secure.data)
75*4882a593Smuzhiyun	}
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun#ifdef CONFIG_ARMV7_PSCI
78*4882a593Smuzhiyun	.secure_stack ALIGN(ADDR(.secure_data) + SIZEOF(.secure_data),
79*4882a593Smuzhiyun			    CONSTANT(COMMONPAGESIZE)) (NOLOAD) :
80*4882a593Smuzhiyun#ifdef __ARMV7_PSCI_STACK_IN_RAM
81*4882a593Smuzhiyun		AT(ADDR(.secure_stack))
82*4882a593Smuzhiyun#else
83*4882a593Smuzhiyun		AT(LOADADDR(.secure_data) + SIZEOF(.secure_data))
84*4882a593Smuzhiyun#endif
85*4882a593Smuzhiyun	{
86*4882a593Smuzhiyun		KEEP(*(.__secure_stack_start))
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun		/* Skip addreses for stack */
89*4882a593Smuzhiyun		. = . + CONFIG_ARMV7_PSCI_NR_CPUS * ARM_PSCI_STACK_SIZE;
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun		/* Align end of stack section to page boundary */
92*4882a593Smuzhiyun		. = ALIGN(CONSTANT(COMMONPAGESIZE));
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun		KEEP(*(.__secure_stack_end))
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun#ifdef CONFIG_ARMV7_SECURE_MAX_SIZE
97*4882a593Smuzhiyun		/*
98*4882a593Smuzhiyun		 * We are not checking (__secure_end - __secure_start) here,
99*4882a593Smuzhiyun		 * as these are the load addresses, and do not include the
100*4882a593Smuzhiyun		 * stack section. Instead, use the end of the stack section
101*4882a593Smuzhiyun		 * and the start of the text section.
102*4882a593Smuzhiyun		 */
103*4882a593Smuzhiyun		ASSERT((. - ADDR(.secure_text)) <= CONFIG_ARMV7_SECURE_MAX_SIZE,
104*4882a593Smuzhiyun		       "Error: secure section exceeds secure memory size");
105*4882a593Smuzhiyun#endif
106*4882a593Smuzhiyun	}
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun#ifndef __ARMV7_PSCI_STACK_IN_RAM
109*4882a593Smuzhiyun	/* Reset VMA but don't allocate space if we have secure SRAM */
110*4882a593Smuzhiyun	. = LOADADDR(.secure_stack);
111*4882a593Smuzhiyun#endif
112*4882a593Smuzhiyun
113*4882a593Smuzhiyun#endif
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun	.__secure_end : AT(ADDR(.__secure_end)) {
116*4882a593Smuzhiyun		*(.__secure_end)
117*4882a593Smuzhiyun		LONG(0x1d1071c);	/* Must output something to reset LMA */
118*4882a593Smuzhiyun	}
119*4882a593Smuzhiyun#endif
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun	. = ALIGN(4);
122*4882a593Smuzhiyun	.rodata : { *(SORT_BY_ALIGNMENT(SORT_BY_NAME(.rodata*))) }
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun	. = ALIGN(4);
125*4882a593Smuzhiyun	.data : {
126*4882a593Smuzhiyun		*(.data*)
127*4882a593Smuzhiyun	}
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun	. = ALIGN(4);
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun	. = .;
132*4882a593Smuzhiyun
133*4882a593Smuzhiyun	. = ALIGN(4);
134*4882a593Smuzhiyun	.u_boot_list : {
135*4882a593Smuzhiyun		KEEP(*(SORT(.u_boot_list*)));
136*4882a593Smuzhiyun	}
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun	/*
139*4882a593Smuzhiyun	 * Stack unwinding tables
140*4882a593Smuzhiyun	 */
141*4882a593Smuzhiyun	. = ALIGN(8);
142*4882a593Smuzhiyun	/* .ARM.exidx is sorted, so has to go in its own output section. */
143*4882a593Smuzhiyun	.ARM.exidx : {
144*4882a593Smuzhiyun		__exidx_start = .;
145*4882a593Smuzhiyun		*(.ARM.exidx*)
146*4882a593Smuzhiyun		__exidx_end = .;
147*4882a593Smuzhiyun	}
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun	.ARM.extab : {
150*4882a593Smuzhiyun		__extab_start = .;
151*4882a593Smuzhiyun		*(.ARM.extab*)
152*4882a593Smuzhiyun		__extab_end = .;
153*4882a593Smuzhiyun	}
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun	. = ALIGN(4);
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun	.__efi_runtime_start : {
158*4882a593Smuzhiyun		*(.__efi_runtime_start)
159*4882a593Smuzhiyun	}
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun	.efi_runtime : {
162*4882a593Smuzhiyun		*(efi_runtime_text)
163*4882a593Smuzhiyun		*(efi_runtime_data)
164*4882a593Smuzhiyun	}
165*4882a593Smuzhiyun
166*4882a593Smuzhiyun	.__efi_runtime_stop : {
167*4882a593Smuzhiyun		*(.__efi_runtime_stop)
168*4882a593Smuzhiyun	}
169*4882a593Smuzhiyun
170*4882a593Smuzhiyun	.efi_runtime_rel_start :
171*4882a593Smuzhiyun	{
172*4882a593Smuzhiyun		*(.__efi_runtime_rel_start)
173*4882a593Smuzhiyun	}
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun	.efi_runtime_rel : {
176*4882a593Smuzhiyun		*(.relefi_runtime_text)
177*4882a593Smuzhiyun		*(.relefi_runtime_data)
178*4882a593Smuzhiyun	}
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun	.efi_runtime_rel_stop :
181*4882a593Smuzhiyun	{
182*4882a593Smuzhiyun		*(.__efi_runtime_rel_stop)
183*4882a593Smuzhiyun	}
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun	. = ALIGN(8);
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun	.image_copy_end :
188*4882a593Smuzhiyun	{
189*4882a593Smuzhiyun		*(.__image_copy_end)
190*4882a593Smuzhiyun	}
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun	.rel_dyn_start :
193*4882a593Smuzhiyun	{
194*4882a593Smuzhiyun		*(.__rel_dyn_start)
195*4882a593Smuzhiyun	}
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun	.rel.dyn : {
198*4882a593Smuzhiyun		*(.rel*)
199*4882a593Smuzhiyun	}
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun	.rel_dyn_end :
202*4882a593Smuzhiyun	{
203*4882a593Smuzhiyun		*(.__rel_dyn_end)
204*4882a593Smuzhiyun	}
205*4882a593Smuzhiyun
206*4882a593Smuzhiyun	.end :
207*4882a593Smuzhiyun	{
208*4882a593Smuzhiyun		. = ALIGN(8);
209*4882a593Smuzhiyun		*(.__end)
210*4882a593Smuzhiyun	}
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun	_image_binary_end = .;
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun	/*
215*4882a593Smuzhiyun	 * Deprecated: this MMU section is used by pxa at present but
216*4882a593Smuzhiyun	 * should not be used by new boards/CPUs.
217*4882a593Smuzhiyun	 */
218*4882a593Smuzhiyun	. = ALIGN(4096);
219*4882a593Smuzhiyun	.mmutable : {
220*4882a593Smuzhiyun		*(.mmutable)
221*4882a593Smuzhiyun	}
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun/*
224*4882a593Smuzhiyun * Compiler-generated __bss_start and __bss_end, see arch/arm/lib/bss.c
225*4882a593Smuzhiyun * __bss_base and __bss_limit are for linker only (overlay ordering)
226*4882a593Smuzhiyun */
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun	.bss_start __rel_dyn_start (OVERLAY) : {
229*4882a593Smuzhiyun		KEEP(*(.__bss_start));
230*4882a593Smuzhiyun		__bss_base = .;
231*4882a593Smuzhiyun	}
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun	.bss __bss_base (OVERLAY) : {
234*4882a593Smuzhiyun		*(.bss*)
235*4882a593Smuzhiyun		 . = ALIGN(4);
236*4882a593Smuzhiyun		 __bss_limit = .;
237*4882a593Smuzhiyun	}
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun	.bss_end __bss_limit (OVERLAY) : {
240*4882a593Smuzhiyun		KEEP(*(.__bss_end));
241*4882a593Smuzhiyun	}
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun	.dynsym _image_binary_end : { *(.dynsym) }
244*4882a593Smuzhiyun	.dynbss : { *(.dynbss) }
245*4882a593Smuzhiyun	.dynstr : { *(.dynstr*) }
246*4882a593Smuzhiyun	.dynamic : { *(.dynamic*) }
247*4882a593Smuzhiyun	.plt : { *(.plt*) }
248*4882a593Smuzhiyun	.interp : { *(.interp*) }
249*4882a593Smuzhiyun	.gnu.hash : { *(.gnu.hash) }
250*4882a593Smuzhiyun	.gnu : { *(.gnu*) }
251*4882a593Smuzhiyun	.ARM.exidx : { *(.ARM.exidx*) }
252*4882a593Smuzhiyun	.gnu.linkonce.armexidx : { *(.gnu.linkonce.armexidx.*) }
253*4882a593Smuzhiyun}
254