xref: /rk3399_ARM-atf/bl2u/bl2u.ld.S (revision f7d445fcbbd3d5146d95698ace3381fcf522b9af)
19003fa0bSYatharth Kochar/*
2da04341eSChris Kay * Copyright (c) 2015-2023, Arm Limited and Contributors. All rights reserved.
39003fa0bSYatharth Kochar *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
59003fa0bSYatharth Kochar */
69003fa0bSYatharth Kochar
79003fa0bSYatharth Kochar#include <platform_def.h>
809d40e0eSAntonio Nino Diaz
9665e71b8SMasahiro Yamada#include <common/bl_common.ld.h>
1009d40e0eSAntonio Nino Diaz#include <lib/xlat_tables/xlat_tables_defs.h>
119003fa0bSYatharth Kochar
129003fa0bSYatharth KocharOUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
139003fa0bSYatharth KocharOUTPUT_ARCH(PLATFORM_LINKER_ARCH)
149003fa0bSYatharth KocharENTRY(bl2u_entrypoint)
159003fa0bSYatharth Kochar
169003fa0bSYatharth KocharMEMORY {
179003fa0bSYatharth Kochar    RAM (rwx): ORIGIN = BL2U_BASE, LENGTH = BL2U_LIMIT - BL2U_BASE
189003fa0bSYatharth Kochar}
199003fa0bSYatharth Kochar
20f90fe02fSChris KaySECTIONS {
21f6088168SHarrison Mutai    RAM_REGION_START = ORIGIN(RAM);
22f6088168SHarrison Mutai    RAM_REGION_LENGTH = LENGTH(RAM);
239003fa0bSYatharth Kochar    . = BL2U_BASE;
24f90fe02fSChris Kay
25a2aedac2SAntonio Nino Diaz    ASSERT(. == ALIGN(PAGE_SIZE),
269003fa0bSYatharth Kochar        "BL2U_BASE address is not aligned on a page boundary.")
279003fa0bSYatharth Kochar
285d1c104fSSandrine Bailleux#if SEPARATE_CODE_AND_RODATA
295d1c104fSSandrine Bailleux    .text . : {
305d1c104fSSandrine Bailleux        __TEXT_START__ = .;
31f90fe02fSChris Kay
325d1c104fSSandrine Bailleux        *bl2u_entrypoint.o(.text*)
33ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.text*))
345d1c104fSSandrine Bailleux        *(.vectors)
35*f7d445fcSMichal Simek        __TEXT_END_UNALIGNED__ = .;
36f90fe02fSChris Kay
375629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
38f90fe02fSChris Kay
395d1c104fSSandrine Bailleux        __TEXT_END__ = .;
405d1c104fSSandrine Bailleux    } >RAM
415d1c104fSSandrine Bailleux
42f90fe02fSChris Kay    /* .ARM.extab and .ARM.exidx are only added because Clang needs them */
43ad925094SRoberto Vargas    .ARM.extab . : {
44ad925094SRoberto Vargas        *(.ARM.extab* .gnu.linkonce.armextab.*)
45ad925094SRoberto Vargas    } >RAM
46ad925094SRoberto Vargas
47ad925094SRoberto Vargas    .ARM.exidx . : {
48ad925094SRoberto Vargas        *(.ARM.exidx* .gnu.linkonce.armexidx.*)
49ad925094SRoberto Vargas    } >RAM
50ad925094SRoberto Vargas
515d1c104fSSandrine Bailleux    .rodata . : {
525d1c104fSSandrine Bailleux        __RODATA_START__ = .;
53ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.rodata*))
540a0a7a9aSMasahiro Yamada
550a0a7a9aSMasahiro Yamada        RODATA_COMMON
560a0a7a9aSMasahiro Yamada
57*f7d445fcSMichal Simek        __RODATA_END_UNALIGNED__ = .;
585629b2b1SRoberto Vargas        . = ALIGN(PAGE_SIZE);
595d1c104fSSandrine Bailleux        __RODATA_END__ = .;
605d1c104fSSandrine Bailleux    } >RAM
61f90fe02fSChris Kay#else /* SEPARATE_CODE_AND_RODATA */
62da04341eSChris Kay    .ro . : {
639003fa0bSYatharth Kochar        __RO_START__ = .;
64f90fe02fSChris Kay
659003fa0bSYatharth Kochar        *bl2u_entrypoint.o(.text*)
66ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.text*))
67ebd6efaeSSamuel Holland        *(SORT_BY_ALIGNMENT(.rodata*))
689003fa0bSYatharth Kochar
690a0a7a9aSMasahiro Yamada        RODATA_COMMON
700a0a7a9aSMasahiro Yamada
719003fa0bSYatharth Kochar        *(.vectors)
72f90fe02fSChris Kay
739003fa0bSYatharth Kochar        __RO_END_UNALIGNED__ = .;
749003fa0bSYatharth Kochar
759003fa0bSYatharth Kochar        /*
76f90fe02fSChris Kay         * Memory page(s) mapped to this section will be marked as read-only,
77f90fe02fSChris Kay         * executable. No RW data from the next section must creep in. Ensure
78f90fe02fSChris Kay         * that the rest of the current memory page is unused.
799003fa0bSYatharth Kochar         */
80f90fe02fSChris Kay        . = ALIGN(PAGE_SIZE);
81f90fe02fSChris Kay
82f90fe02fSChris Kay        __RO_END__ = .;
83f90fe02fSChris Kay    } >RAM
84f90fe02fSChris Kay#endif /* SEPARATE_CODE_AND_RODATA */
85f90fe02fSChris Kay
869003fa0bSYatharth Kochar    __RW_START__ = .;
879003fa0bSYatharth Kochar
88caa3e7e0SMasahiro Yamada    DATA_SECTION >RAM
89a926a9f6SMasahiro Yamada    STACK_SECTION >RAM
90a7739bc7SMasahiro Yamada    BSS_SECTION >RAM
91665e71b8SMasahiro Yamada    XLAT_TABLE_SECTION >RAM
929003fa0bSYatharth Kochar
939003fa0bSYatharth Kochar#if USE_COHERENT_MEM
949003fa0bSYatharth Kochar    /*
95f90fe02fSChris Kay     * The base address of the coherent memory section must be page-aligned to
96f90fe02fSChris Kay     * guarantee that the coherent data are stored on their own pages and are
97f90fe02fSChris Kay     * not mixed with normal data.  This is required to set up the correct
989003fa0bSYatharth Kochar     * memory attributes for the coherent data page tables.
999003fa0bSYatharth Kochar     */
100da04341eSChris Kay    .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
1019003fa0bSYatharth Kochar        __COHERENT_RAM_START__ = .;
102da04341eSChris Kay        *(.tzfw_coherent_mem)
1039003fa0bSYatharth Kochar        __COHERENT_RAM_END_UNALIGNED__ = .;
1049003fa0bSYatharth Kochar
1059003fa0bSYatharth Kochar        /*
106f90fe02fSChris Kay         * Memory page(s) mapped to this section will be marked as device
107f90fe02fSChris Kay         * memory. No other unexpected data must creep in. Ensure the rest of
108f90fe02fSChris Kay         * the current memory page is unused.
1099003fa0bSYatharth Kochar         */
110f90fe02fSChris Kay        . = ALIGN(PAGE_SIZE);
111f90fe02fSChris Kay
112f90fe02fSChris Kay        __COHERENT_RAM_END__ = .;
113f90fe02fSChris Kay    } >RAM
114f90fe02fSChris Kay#endif /* USE_COHERENT_MEM */
115f90fe02fSChris Kay
1169003fa0bSYatharth Kochar    __RW_END__ = .;
1179003fa0bSYatharth Kochar    __BL2U_END__ = .;
1189003fa0bSYatharth Kochar
1199003fa0bSYatharth Kochar    __BSS_SIZE__ = SIZEOF(.bss);
1209003fa0bSYatharth Kochar
1219003fa0bSYatharth Kochar    ASSERT(. <= BL2U_LIMIT, "BL2U image has exceeded its limit.")
122f6088168SHarrison Mutai    RAM_REGION_END = .;
1239003fa0bSYatharth Kochar}
124