xref: /rk3399_ARM-atf/bl2/bl2.ld.S (revision 665e71b8ea28162ec7737c1411bca3ea89e5957e)
14f6ad66aSAchin Gupta/*
2ab1981dbSLouis Mayencourt * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
34f6ad66aSAchin Gupta *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
54f6ad66aSAchin Gupta */
64f6ad66aSAchin Gupta
75f0cdb05SDan Handley#include <platform_def.h>
809d40e0eSAntonio Nino Diaz
9*665e71b8SMasahiro Yamada#include <common/bl_common.ld.h>
1009d40e0eSAntonio Nino Diaz#include <lib/xlat_tables/xlat_tables_defs.h>
114f6ad66aSAchin Gupta
124f6ad66aSAchin GuptaOUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
134f6ad66aSAchin GuptaOUTPUT_ARCH(PLATFORM_LINKER_ARCH)
149f98aa1aSJeenu ViswambharanENTRY(bl2_entrypoint)
154f6ad66aSAchin Gupta
164f6ad66aSAchin GuptaMEMORY {
17d7fbf132SJuan Castillo    RAM (rwx): ORIGIN = BL2_BASE, LENGTH = BL2_LIMIT - BL2_BASE
184f6ad66aSAchin Gupta}
194f6ad66aSAchin Gupta
204f6ad66aSAchin Gupta
214f6ad66aSAchin GuptaSECTIONS
224f6ad66aSAchin Gupta{
234f6ad66aSAchin Gupta    . = BL2_BASE;
24a2aedac2SAntonio Nino Diaz    ASSERT(. == ALIGN(PAGE_SIZE),
258d69a03fSSandrine Bailleux           "BL2_BASE address is not aligned on a page boundary.")
264f6ad66aSAchin Gupta
275d1c104fSSandrine Bailleux#if SEPARATE_CODE_AND_RODATA
285d1c104fSSandrine Bailleux    .text . : {
295d1c104fSSandrine Bailleux        __TEXT_START__ = .;
305d1c104fSSandrine Bailleux        *bl2_entrypoint.o(.text*)
31ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.text*))
325d1c104fSSandrine Bailleux        *(.vectors)
335629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
345d1c104fSSandrine Bailleux        __TEXT_END__ = .;
355d1c104fSSandrine Bailleux     } >RAM
365d1c104fSSandrine Bailleux
37ad925094SRoberto Vargas     /* .ARM.extab and .ARM.exidx are only added because Clang need them */
38ad925094SRoberto Vargas     .ARM.extab . : {
39ad925094SRoberto Vargas        *(.ARM.extab* .gnu.linkonce.armextab.*)
40ad925094SRoberto Vargas     } >RAM
41ad925094SRoberto Vargas
42ad925094SRoberto Vargas     .ARM.exidx . : {
43ad925094SRoberto Vargas        *(.ARM.exidx* .gnu.linkonce.armexidx.*)
44ad925094SRoberto Vargas     } >RAM
45ad925094SRoberto Vargas
465d1c104fSSandrine Bailleux    .rodata . : {
475d1c104fSSandrine Bailleux        __RODATA_START__ = .;
48ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.rodata*))
495d1c104fSSandrine Bailleux
50ab1981dbSLouis Mayencourt       . = ALIGN(8);
51ab1981dbSLouis Mayencourt        __FCONF_POPULATOR_START__ = .;
52ab1981dbSLouis Mayencourt        KEEP(*(.fconf_populator))
53ab1981dbSLouis Mayencourt        __FCONF_POPULATOR_END__ = .;
54ab1981dbSLouis Mayencourt
555d1c104fSSandrine Bailleux        /* Ensure 8-byte alignment for descriptors and ensure inclusion */
565d1c104fSSandrine Bailleux        . = ALIGN(8);
575d1c104fSSandrine Bailleux        __PARSER_LIB_DESCS_START__ = .;
585d1c104fSSandrine Bailleux        KEEP(*(.img_parser_lib_descs))
595d1c104fSSandrine Bailleux        __PARSER_LIB_DESCS_END__ = .;
605d1c104fSSandrine Bailleux
615629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
625d1c104fSSandrine Bailleux        __RODATA_END__ = .;
635d1c104fSSandrine Bailleux    } >RAM
645d1c104fSSandrine Bailleux#else
658d69a03fSSandrine Bailleux    ro . : {
668d69a03fSSandrine Bailleux        __RO_START__ = .;
67dccc537aSAndrew Thoelke        *bl2_entrypoint.o(.text*)
68ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.text*))
69ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.rodata*))
7005799ae0SJuan Castillo
71ab1981dbSLouis Mayencourt	. = ALIGN(8);
72ab1981dbSLouis Mayencourt         __FCONF_POPULATOR_START__ = .;
73ab1981dbSLouis Mayencourt         KEEP(*(.fconf_populator))
74ab1981dbSLouis Mayencourt         __FCONF_POPULATOR_END__ = .;
75ab1981dbSLouis Mayencourt
7605799ae0SJuan Castillo        /* Ensure 8-byte alignment for descriptors and ensure inclusion */
7705799ae0SJuan Castillo        . = ALIGN(8);
7805799ae0SJuan Castillo        __PARSER_LIB_DESCS_START__ = .;
7905799ae0SJuan Castillo        KEEP(*(.img_parser_lib_descs))
8005799ae0SJuan Castillo        __PARSER_LIB_DESCS_END__ = .;
8105799ae0SJuan Castillo
82b739f22aSAchin Gupta        *(.vectors)
838d69a03fSSandrine Bailleux        __RO_END_UNALIGNED__ = .;
848d69a03fSSandrine Bailleux        /*
858d69a03fSSandrine Bailleux         * Memory page(s) mapped to this section will be marked as
868d69a03fSSandrine Bailleux         * read-only, executable.  No RW data from the next section must
878d69a03fSSandrine Bailleux         * creep in.  Ensure the rest of the current memory page is unused.
888d69a03fSSandrine Bailleux         */
895629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
908d69a03fSSandrine Bailleux        __RO_END__ = .;
914f6ad66aSAchin Gupta    } >RAM
925d1c104fSSandrine Bailleux#endif
934f6ad66aSAchin Gupta
9454dc71e7SAchin Gupta    /*
9554dc71e7SAchin Gupta     * Define a linker symbol to mark start of the RW memory area for this
9654dc71e7SAchin Gupta     * image.
9754dc71e7SAchin Gupta     */
9854dc71e7SAchin Gupta    __RW_START__ = . ;
9954dc71e7SAchin Gupta
10051faada7SDouglas Raillard    /*
10151faada7SDouglas Raillard     * .data must be placed at a lower address than the stacks if the stack
10251faada7SDouglas Raillard     * protector is enabled. Alternatively, the .data.stack_protector_canary
10351faada7SDouglas Raillard     * section can be placed independently of the main .data section.
10451faada7SDouglas Raillard     */
1058d69a03fSSandrine Bailleux    .data . : {
1068d69a03fSSandrine Bailleux        __DATA_START__ = .;
107ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.data*))
1088d69a03fSSandrine Bailleux        __DATA_END__ = .;
1098d69a03fSSandrine Bailleux    } >RAM
1108d69a03fSSandrine Bailleux
1118d69a03fSSandrine Bailleux    stacks (NOLOAD) : {
1128d69a03fSSandrine Bailleux        __STACKS_START__ = .;
1138d69a03fSSandrine Bailleux        *(tzfw_normal_stacks)
1148d69a03fSSandrine Bailleux        __STACKS_END__ = .;
1158d69a03fSSandrine Bailleux    } >RAM
1168d69a03fSSandrine Bailleux
1178d69a03fSSandrine Bailleux    /*
1188d69a03fSSandrine Bailleux     * The .bss section gets initialised to 0 at runtime.
119308d359bSDouglas Raillard     * Its base address should be 16-byte aligned for better performance of the
120308d359bSDouglas Raillard     * zero-initialization code.
1218d69a03fSSandrine Bailleux     */
1228d69a03fSSandrine Bailleux    .bss : ALIGN(16) {
1238d69a03fSSandrine Bailleux        __BSS_START__ = .;
124dccc537aSAndrew Thoelke        *(SORT_BY_ALIGNMENT(.bss*))
1254f6ad66aSAchin Gupta        *(COMMON)
1268d69a03fSSandrine Bailleux        __BSS_END__ = .;
1274f6ad66aSAchin Gupta    } >RAM
1284f6ad66aSAchin Gupta
129*665e71b8SMasahiro Yamada    XLAT_TABLE_SECTION >RAM
130a0cd989dSAchin Gupta
131ab8707e6SSoby Mathew#if USE_COHERENT_MEM
132a0cd989dSAchin Gupta    /*
1338d69a03fSSandrine Bailleux     * The base address of the coherent memory section must be page-aligned (4K)
1348d69a03fSSandrine Bailleux     * to guarantee that the coherent data are stored on their own pages and
1358d69a03fSSandrine Bailleux     * are not mixed with normal data.  This is required to set up the correct
1368d69a03fSSandrine Bailleux     * memory attributes for the coherent data page tables.
1378d69a03fSSandrine Bailleux     */
138a2aedac2SAntonio Nino Diaz    coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
1398d69a03fSSandrine Bailleux        __COHERENT_RAM_START__ = .;
1408d69a03fSSandrine Bailleux        *(tzfw_coherent_mem)
1418d69a03fSSandrine Bailleux        __COHERENT_RAM_END_UNALIGNED__ = .;
1428d69a03fSSandrine Bailleux        /*
1438d69a03fSSandrine Bailleux         * Memory page(s) mapped to this section will be marked
1448d69a03fSSandrine Bailleux         * as device memory.  No other unexpected data must creep in.
1458d69a03fSSandrine Bailleux         * Ensure the rest of the current memory page is unused.
1468d69a03fSSandrine Bailleux         */
1475629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
1488d69a03fSSandrine Bailleux        __COHERENT_RAM_END__ = .;
1494f6ad66aSAchin Gupta    } >RAM
150ab8707e6SSoby Mathew#endif
1514f6ad66aSAchin Gupta
15254dc71e7SAchin Gupta    /*
15354dc71e7SAchin Gupta     * Define a linker symbol to mark end of the RW memory area for this
15454dc71e7SAchin Gupta     * image.
15554dc71e7SAchin Gupta     */
15654dc71e7SAchin Gupta    __RW_END__ = .;
1578d69a03fSSandrine Bailleux    __BL2_END__ = .;
1584f6ad66aSAchin Gupta
1598d69a03fSSandrine Bailleux    __BSS_SIZE__ = SIZEOF(.bss);
160ab8707e6SSoby Mathew
161ab8707e6SSoby Mathew#if USE_COHERENT_MEM
1628d69a03fSSandrine Bailleux    __COHERENT_RAM_UNALIGNED_SIZE__ =
1638d69a03fSSandrine Bailleux        __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
164ab8707e6SSoby Mathew#endif
165a37255a2SSandrine Bailleux
166a37255a2SSandrine Bailleux    ASSERT(. <= BL2_LIMIT, "BL2 image has exceeded its limit.")
1674f6ad66aSAchin Gupta}
168