xref: /rk3399_ARM-atf/bl1/bl1.ld.S (revision 8d69a03f6a7db3c437b7cfdd15402627277d8cb4)
14f6ad66aSAchin Gupta/*
24f6ad66aSAchin Gupta * Copyright (c) 2013, ARM Limited. All rights reserved.
34f6ad66aSAchin Gupta *
44f6ad66aSAchin Gupta * Redistribution and use in source and binary forms, with or without
54f6ad66aSAchin Gupta * modification, are permitted provided that the following conditions are met:
64f6ad66aSAchin Gupta *
74f6ad66aSAchin Gupta * Redistributions of source code must retain the above copyright notice, this
84f6ad66aSAchin Gupta * list of conditions and the following disclaimer.
94f6ad66aSAchin Gupta *
104f6ad66aSAchin Gupta * Redistributions in binary form must reproduce the above copyright notice,
114f6ad66aSAchin Gupta * this list of conditions and the following disclaimer in the documentation
124f6ad66aSAchin Gupta * and/or other materials provided with the distribution.
134f6ad66aSAchin Gupta *
144f6ad66aSAchin Gupta * Neither the name of ARM nor the names of its contributors may be used
154f6ad66aSAchin Gupta * to endorse or promote products derived from this software without specific
164f6ad66aSAchin Gupta * prior written permission.
174f6ad66aSAchin Gupta *
184f6ad66aSAchin Gupta * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
194f6ad66aSAchin Gupta * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204f6ad66aSAchin Gupta * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214f6ad66aSAchin Gupta * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
224f6ad66aSAchin Gupta * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
234f6ad66aSAchin Gupta * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
244f6ad66aSAchin Gupta * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
254f6ad66aSAchin Gupta * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
264f6ad66aSAchin Gupta * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
274f6ad66aSAchin Gupta * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
284f6ad66aSAchin Gupta * POSSIBILITY OF SUCH DAMAGE.
294f6ad66aSAchin Gupta */
304f6ad66aSAchin Gupta
314f6ad66aSAchin Gupta#include <platform.h>
324f6ad66aSAchin Gupta
334f6ad66aSAchin GuptaOUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
344f6ad66aSAchin GuptaOUTPUT_ARCH(PLATFORM_LINKER_ARCH)
354f6ad66aSAchin Gupta
364f6ad66aSAchin GuptaMEMORY {
374f6ad66aSAchin Gupta    ROM (rx): ORIGIN = TZROM_BASE, LENGTH = TZROM_SIZE
384f6ad66aSAchin Gupta    RAM (rwx): ORIGIN = TZRAM_BASE, LENGTH = TZRAM_SIZE
394f6ad66aSAchin Gupta}
404f6ad66aSAchin Gupta
414f6ad66aSAchin GuptaSECTIONS
424f6ad66aSAchin Gupta{
43*8d69a03fSSandrine Bailleux    ro : {
44*8d69a03fSSandrine Bailleux        __RO_START__ = .;
45*8d69a03fSSandrine Bailleux        *bl1_entrypoint.o(.text)
464f6ad66aSAchin Gupta        *(.text)
47*8d69a03fSSandrine Bailleux        *(.rodata*)
48*8d69a03fSSandrine Bailleux        __RO_END__ = .;
494f6ad66aSAchin Gupta    } >ROM
504f6ad66aSAchin Gupta
51*8d69a03fSSandrine Bailleux    /*
52*8d69a03fSSandrine Bailleux     * The .data section gets copied from ROM to RAM at runtime.
53*8d69a03fSSandrine Bailleux     * Its LMA and VMA must be 16-byte aligned.
54*8d69a03fSSandrine Bailleux     */
55*8d69a03fSSandrine Bailleux    . = NEXT(16);        /* Align LMA */
56*8d69a03fSSandrine Bailleux    .data : ALIGN(16) {  /* Align VMA */
574f6ad66aSAchin Gupta        __DATA_RAM_START__ = .;
584f6ad66aSAchin Gupta        *(.data)
59*8d69a03fSSandrine Bailleux        __DATA_RAM_END__ = .;
604f6ad66aSAchin Gupta    } >RAM AT>ROM
614f6ad66aSAchin Gupta
62*8d69a03fSSandrine Bailleux    stacks (NOLOAD) : {
63*8d69a03fSSandrine Bailleux        __STACKS_START__ = .;
644f6ad66aSAchin Gupta        *(tzfw_normal_stacks)
65*8d69a03fSSandrine Bailleux        __STACKS_END__ = .;
664f6ad66aSAchin Gupta    } >RAM
674f6ad66aSAchin Gupta
68*8d69a03fSSandrine Bailleux    /*
69*8d69a03fSSandrine Bailleux     * The .bss section gets initialised to 0 at runtime.
70*8d69a03fSSandrine Bailleux     * Its base address must be 16-byte aligned.
71*8d69a03fSSandrine Bailleux     */
72*8d69a03fSSandrine Bailleux    .bss : ALIGN(16) {
73*8d69a03fSSandrine Bailleux        __BSS_START__ = .;
74*8d69a03fSSandrine Bailleux        *(.bss)
75*8d69a03fSSandrine Bailleux        *(COMMON)
76*8d69a03fSSandrine Bailleux        __BSS_END__ = .;
77*8d69a03fSSandrine Bailleux    } >RAM
784f6ad66aSAchin Gupta
79*8d69a03fSSandrine Bailleux    /*
80*8d69a03fSSandrine Bailleux     * The base address of the coherent memory section must be page-aligned (4K)
81*8d69a03fSSandrine Bailleux     * to guarantee that the coherent data are stored on their own pages and
82*8d69a03fSSandrine Bailleux     * are not mixed with normal data.  This is required to set up the correct
83*8d69a03fSSandrine Bailleux     * memory attributes for the coherent data page tables.
84*8d69a03fSSandrine Bailleux     */
85*8d69a03fSSandrine Bailleux    coherent_ram (NOLOAD) : ALIGN(4096) {
86*8d69a03fSSandrine Bailleux        __COHERENT_RAM_START__ = .;
87*8d69a03fSSandrine Bailleux        *(tzfw_coherent_mem)
88*8d69a03fSSandrine Bailleux        __COHERENT_RAM_END_UNALIGNED__ = .;
89*8d69a03fSSandrine Bailleux        /*
90*8d69a03fSSandrine Bailleux         * Memory page(s) mapped to this section will be marked
91*8d69a03fSSandrine Bailleux         * as device memory.  No other unexpected data must creep in.
92*8d69a03fSSandrine Bailleux         * Ensure the rest of the current memory page is unused.
93*8d69a03fSSandrine Bailleux         */
94*8d69a03fSSandrine Bailleux        . = NEXT(4096);
95*8d69a03fSSandrine Bailleux        __COHERENT_RAM_END__ = .;
96*8d69a03fSSandrine Bailleux    } >RAM
974f6ad66aSAchin Gupta
98*8d69a03fSSandrine Bailleux    __BL1_RAM_START__ = ADDR(.data);
99*8d69a03fSSandrine Bailleux    __BL1_RAM_END__ = .;
1004f6ad66aSAchin Gupta
101*8d69a03fSSandrine Bailleux    __DATA_ROM_START__ = LOADADDR(.data);
102*8d69a03fSSandrine Bailleux    __DATA_SIZE__ = SIZEOF(.data);
103*8d69a03fSSandrine Bailleux
104*8d69a03fSSandrine Bailleux    __BSS_SIZE__ = SIZEOF(.bss);
105*8d69a03fSSandrine Bailleux
106*8d69a03fSSandrine Bailleux    __COHERENT_RAM_UNALIGNED_SIZE__ =
107*8d69a03fSSandrine Bailleux        __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
108*8d69a03fSSandrine Bailleux
109*8d69a03fSSandrine Bailleux    ASSERT(. <= BL31_BASE, "BL31 image overlaps BL1 image.")
1104f6ad66aSAchin Gupta}
111