xref: /rk3399_ARM-atf/bl2/bl2.ld.S (revision ab1981db9ea793accf1279446b9f7666a3be04ca)
14f6ad66aSAchin Gupta/*
2*ab1981dbSLouis Mayencourt * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
34f6ad66aSAchin Gupta *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
54f6ad66aSAchin Gupta */
64f6ad66aSAchin Gupta
75f0cdb05SDan Handley#include <platform_def.h>
809d40e0eSAntonio Nino Diaz
909d40e0eSAntonio Nino Diaz#include <lib/xlat_tables/xlat_tables_defs.h>
104f6ad66aSAchin Gupta
114f6ad66aSAchin GuptaOUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
124f6ad66aSAchin GuptaOUTPUT_ARCH(PLATFORM_LINKER_ARCH)
139f98aa1aSJeenu ViswambharanENTRY(bl2_entrypoint)
144f6ad66aSAchin Gupta
154f6ad66aSAchin GuptaMEMORY {
16d7fbf132SJuan Castillo    RAM (rwx): ORIGIN = BL2_BASE, LENGTH = BL2_LIMIT - BL2_BASE
174f6ad66aSAchin Gupta}
184f6ad66aSAchin Gupta
194f6ad66aSAchin Gupta
204f6ad66aSAchin GuptaSECTIONS
214f6ad66aSAchin Gupta{
224f6ad66aSAchin Gupta    . = BL2_BASE;
23a2aedac2SAntonio Nino Diaz    ASSERT(. == ALIGN(PAGE_SIZE),
248d69a03fSSandrine Bailleux           "BL2_BASE address is not aligned on a page boundary.")
254f6ad66aSAchin Gupta
265d1c104fSSandrine Bailleux#if SEPARATE_CODE_AND_RODATA
275d1c104fSSandrine Bailleux    .text . : {
285d1c104fSSandrine Bailleux        __TEXT_START__ = .;
295d1c104fSSandrine Bailleux        *bl2_entrypoint.o(.text*)
30ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.text*))
315d1c104fSSandrine Bailleux        *(.vectors)
325629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
335d1c104fSSandrine Bailleux        __TEXT_END__ = .;
345d1c104fSSandrine Bailleux     } >RAM
355d1c104fSSandrine Bailleux
36ad925094SRoberto Vargas     /* .ARM.extab and .ARM.exidx are only added because Clang need them */
37ad925094SRoberto Vargas     .ARM.extab . : {
38ad925094SRoberto Vargas        *(.ARM.extab* .gnu.linkonce.armextab.*)
39ad925094SRoberto Vargas     } >RAM
40ad925094SRoberto Vargas
41ad925094SRoberto Vargas     .ARM.exidx . : {
42ad925094SRoberto Vargas        *(.ARM.exidx* .gnu.linkonce.armexidx.*)
43ad925094SRoberto Vargas     } >RAM
44ad925094SRoberto Vargas
455d1c104fSSandrine Bailleux    .rodata . : {
465d1c104fSSandrine Bailleux        __RODATA_START__ = .;
47ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.rodata*))
485d1c104fSSandrine Bailleux
49*ab1981dbSLouis Mayencourt       . = ALIGN(8);
50*ab1981dbSLouis Mayencourt        __FCONF_POPULATOR_START__ = .;
51*ab1981dbSLouis Mayencourt        KEEP(*(.fconf_populator))
52*ab1981dbSLouis Mayencourt        __FCONF_POPULATOR_END__ = .;
53*ab1981dbSLouis Mayencourt
545d1c104fSSandrine Bailleux        /* Ensure 8-byte alignment for descriptors and ensure inclusion */
555d1c104fSSandrine Bailleux        . = ALIGN(8);
565d1c104fSSandrine Bailleux        __PARSER_LIB_DESCS_START__ = .;
575d1c104fSSandrine Bailleux        KEEP(*(.img_parser_lib_descs))
585d1c104fSSandrine Bailleux        __PARSER_LIB_DESCS_END__ = .;
595d1c104fSSandrine Bailleux
605629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
615d1c104fSSandrine Bailleux        __RODATA_END__ = .;
625d1c104fSSandrine Bailleux    } >RAM
635d1c104fSSandrine Bailleux#else
648d69a03fSSandrine Bailleux    ro . : {
658d69a03fSSandrine Bailleux        __RO_START__ = .;
66dccc537aSAndrew Thoelke        *bl2_entrypoint.o(.text*)
67ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.text*))
68ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.rodata*))
6905799ae0SJuan Castillo
70*ab1981dbSLouis Mayencourt	. = ALIGN(8);
71*ab1981dbSLouis Mayencourt         __FCONF_POPULATOR_START__ = .;
72*ab1981dbSLouis Mayencourt         KEEP(*(.fconf_populator))
73*ab1981dbSLouis Mayencourt         __FCONF_POPULATOR_END__ = .;
74*ab1981dbSLouis Mayencourt
7505799ae0SJuan Castillo        /* Ensure 8-byte alignment for descriptors and ensure inclusion */
7605799ae0SJuan Castillo        . = ALIGN(8);
7705799ae0SJuan Castillo        __PARSER_LIB_DESCS_START__ = .;
7805799ae0SJuan Castillo        KEEP(*(.img_parser_lib_descs))
7905799ae0SJuan Castillo        __PARSER_LIB_DESCS_END__ = .;
8005799ae0SJuan Castillo
81b739f22aSAchin Gupta        *(.vectors)
828d69a03fSSandrine Bailleux        __RO_END_UNALIGNED__ = .;
838d69a03fSSandrine Bailleux        /*
848d69a03fSSandrine Bailleux         * Memory page(s) mapped to this section will be marked as
858d69a03fSSandrine Bailleux         * read-only, executable.  No RW data from the next section must
868d69a03fSSandrine Bailleux         * creep in.  Ensure the rest of the current memory page is unused.
878d69a03fSSandrine Bailleux         */
885629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
898d69a03fSSandrine Bailleux        __RO_END__ = .;
904f6ad66aSAchin Gupta    } >RAM
915d1c104fSSandrine Bailleux#endif
924f6ad66aSAchin Gupta
9354dc71e7SAchin Gupta    /*
9454dc71e7SAchin Gupta     * Define a linker symbol to mark start of the RW memory area for this
9554dc71e7SAchin Gupta     * image.
9654dc71e7SAchin Gupta     */
9754dc71e7SAchin Gupta    __RW_START__ = . ;
9854dc71e7SAchin Gupta
9951faada7SDouglas Raillard    /*
10051faada7SDouglas Raillard     * .data must be placed at a lower address than the stacks if the stack
10151faada7SDouglas Raillard     * protector is enabled. Alternatively, the .data.stack_protector_canary
10251faada7SDouglas Raillard     * section can be placed independently of the main .data section.
10351faada7SDouglas Raillard     */
1048d69a03fSSandrine Bailleux    .data . : {
1058d69a03fSSandrine Bailleux        __DATA_START__ = .;
106ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.data*))
1078d69a03fSSandrine Bailleux        __DATA_END__ = .;
1088d69a03fSSandrine Bailleux    } >RAM
1098d69a03fSSandrine Bailleux
1108d69a03fSSandrine Bailleux    stacks (NOLOAD) : {
1118d69a03fSSandrine Bailleux        __STACKS_START__ = .;
1128d69a03fSSandrine Bailleux        *(tzfw_normal_stacks)
1138d69a03fSSandrine Bailleux        __STACKS_END__ = .;
1148d69a03fSSandrine Bailleux    } >RAM
1158d69a03fSSandrine Bailleux
1168d69a03fSSandrine Bailleux    /*
1178d69a03fSSandrine Bailleux     * The .bss section gets initialised to 0 at runtime.
118308d359bSDouglas Raillard     * Its base address should be 16-byte aligned for better performance of the
119308d359bSDouglas Raillard     * zero-initialization code.
1208d69a03fSSandrine Bailleux     */
1218d69a03fSSandrine Bailleux    .bss : ALIGN(16) {
1228d69a03fSSandrine Bailleux        __BSS_START__ = .;
123dccc537aSAndrew Thoelke        *(SORT_BY_ALIGNMENT(.bss*))
1244f6ad66aSAchin Gupta        *(COMMON)
1258d69a03fSSandrine Bailleux        __BSS_END__ = .;
1264f6ad66aSAchin Gupta    } >RAM
1274f6ad66aSAchin Gupta
1288d69a03fSSandrine Bailleux    /*
129e3fff153SJeenu Viswambharan     * The xlat_table section is for full, aligned page tables (4K).
130a0cd989dSAchin Gupta     * Removing them from .bss avoids forcing 4K alignment on
131883d1b5dSAntonio Nino Diaz     * the .bss section. The tables are initialized to zero by the translation
132883d1b5dSAntonio Nino Diaz     * tables library.
133a0cd989dSAchin Gupta     */
134a0cd989dSAchin Gupta    xlat_table (NOLOAD) : {
135a0cd989dSAchin Gupta        *(xlat_table)
136a0cd989dSAchin Gupta    } >RAM
137a0cd989dSAchin Gupta
138ab8707e6SSoby Mathew#if USE_COHERENT_MEM
139a0cd989dSAchin Gupta    /*
1408d69a03fSSandrine Bailleux     * The base address of the coherent memory section must be page-aligned (4K)
1418d69a03fSSandrine Bailleux     * to guarantee that the coherent data are stored on their own pages and
1428d69a03fSSandrine Bailleux     * are not mixed with normal data.  This is required to set up the correct
1438d69a03fSSandrine Bailleux     * memory attributes for the coherent data page tables.
1448d69a03fSSandrine Bailleux     */
145a2aedac2SAntonio Nino Diaz    coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
1468d69a03fSSandrine Bailleux        __COHERENT_RAM_START__ = .;
1478d69a03fSSandrine Bailleux        *(tzfw_coherent_mem)
1488d69a03fSSandrine Bailleux        __COHERENT_RAM_END_UNALIGNED__ = .;
1498d69a03fSSandrine Bailleux        /*
1508d69a03fSSandrine Bailleux         * Memory page(s) mapped to this section will be marked
1518d69a03fSSandrine Bailleux         * as device memory.  No other unexpected data must creep in.
1528d69a03fSSandrine Bailleux         * Ensure the rest of the current memory page is unused.
1538d69a03fSSandrine Bailleux         */
1545629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
1558d69a03fSSandrine Bailleux        __COHERENT_RAM_END__ = .;
1564f6ad66aSAchin Gupta    } >RAM
157ab8707e6SSoby Mathew#endif
1584f6ad66aSAchin Gupta
15954dc71e7SAchin Gupta    /*
16054dc71e7SAchin Gupta     * Define a linker symbol to mark end of the RW memory area for this
16154dc71e7SAchin Gupta     * image.
16254dc71e7SAchin Gupta     */
16354dc71e7SAchin Gupta    __RW_END__ = .;
1648d69a03fSSandrine Bailleux    __BL2_END__ = .;
1654f6ad66aSAchin Gupta
1668d69a03fSSandrine Bailleux    __BSS_SIZE__ = SIZEOF(.bss);
167ab8707e6SSoby Mathew
168ab8707e6SSoby Mathew#if USE_COHERENT_MEM
1698d69a03fSSandrine Bailleux    __COHERENT_RAM_UNALIGNED_SIZE__ =
1708d69a03fSSandrine Bailleux        __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
171ab8707e6SSoby Mathew#endif
172a37255a2SSandrine Bailleux
173a37255a2SSandrine Bailleux    ASSERT(. <= BL2_LIMIT, "BL2 image has exceeded its limit.")
1744f6ad66aSAchin Gupta}
175