19003fa0bSYatharth Kochar/* 2*da04341eSChris Kay * Copyright (c) 2015-2023, Arm Limited and Contributors. All rights reserved. 39003fa0bSYatharth Kochar * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 59003fa0bSYatharth Kochar */ 69003fa0bSYatharth Kochar 79003fa0bSYatharth Kochar#include <platform_def.h> 809d40e0eSAntonio Nino Diaz 9665e71b8SMasahiro Yamada#include <common/bl_common.ld.h> 1009d40e0eSAntonio Nino Diaz#include <lib/xlat_tables/xlat_tables_defs.h> 119003fa0bSYatharth Kochar 129003fa0bSYatharth KocharOUTPUT_FORMAT(PLATFORM_LINKER_FORMAT) 139003fa0bSYatharth KocharOUTPUT_ARCH(PLATFORM_LINKER_ARCH) 149003fa0bSYatharth KocharENTRY(bl2u_entrypoint) 159003fa0bSYatharth Kochar 169003fa0bSYatharth KocharMEMORY { 179003fa0bSYatharth Kochar RAM (rwx): ORIGIN = BL2U_BASE, LENGTH = BL2U_LIMIT - BL2U_BASE 189003fa0bSYatharth Kochar} 199003fa0bSYatharth Kochar 20f90fe02fSChris KaySECTIONS { 219003fa0bSYatharth Kochar . = BL2U_BASE; 22f90fe02fSChris Kay 23a2aedac2SAntonio Nino Diaz ASSERT(. == ALIGN(PAGE_SIZE), 249003fa0bSYatharth Kochar "BL2U_BASE address is not aligned on a page boundary.") 259003fa0bSYatharth Kochar 265d1c104fSSandrine Bailleux#if SEPARATE_CODE_AND_RODATA 275d1c104fSSandrine Bailleux .text . : { 285d1c104fSSandrine Bailleux __TEXT_START__ = .; 29f90fe02fSChris Kay 305d1c104fSSandrine Bailleux *bl2u_entrypoint.o(.text*) 31ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.text*)) 325d1c104fSSandrine Bailleux *(.vectors) 33f90fe02fSChris Kay 345629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 35f90fe02fSChris Kay 365d1c104fSSandrine Bailleux __TEXT_END__ = .; 375d1c104fSSandrine Bailleux } >RAM 385d1c104fSSandrine Bailleux 39f90fe02fSChris Kay /* .ARM.extab and .ARM.exidx are only added because Clang needs them */ 40ad925094SRoberto Vargas .ARM.extab . : { 41ad925094SRoberto Vargas *(.ARM.extab* .gnu.linkonce.armextab.*) 42ad925094SRoberto Vargas } >RAM 43ad925094SRoberto Vargas 44ad925094SRoberto Vargas .ARM.exidx . : { 45ad925094SRoberto Vargas *(.ARM.exidx* .gnu.linkonce.armexidx.*) 46ad925094SRoberto Vargas } >RAM 47ad925094SRoberto Vargas 485d1c104fSSandrine Bailleux .rodata . : { 495d1c104fSSandrine Bailleux __RODATA_START__ = .; 50ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.rodata*)) 510a0a7a9aSMasahiro Yamada 520a0a7a9aSMasahiro Yamada RODATA_COMMON 530a0a7a9aSMasahiro Yamada 545629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 555d1c104fSSandrine Bailleux __RODATA_END__ = .; 565d1c104fSSandrine Bailleux } >RAM 57f90fe02fSChris Kay#else /* SEPARATE_CODE_AND_RODATA */ 58*da04341eSChris Kay .ro . : { 599003fa0bSYatharth Kochar __RO_START__ = .; 60f90fe02fSChris Kay 619003fa0bSYatharth Kochar *bl2u_entrypoint.o(.text*) 62ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.text*)) 63ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.rodata*)) 649003fa0bSYatharth Kochar 650a0a7a9aSMasahiro Yamada RODATA_COMMON 660a0a7a9aSMasahiro Yamada 679003fa0bSYatharth Kochar *(.vectors) 68f90fe02fSChris Kay 699003fa0bSYatharth Kochar __RO_END_UNALIGNED__ = .; 709003fa0bSYatharth Kochar 719003fa0bSYatharth Kochar /* 72f90fe02fSChris Kay * Memory page(s) mapped to this section will be marked as read-only, 73f90fe02fSChris Kay * executable. No RW data from the next section must creep in. Ensure 74f90fe02fSChris Kay * that the rest of the current memory page is unused. 759003fa0bSYatharth Kochar */ 76f90fe02fSChris Kay . = ALIGN(PAGE_SIZE); 77f90fe02fSChris Kay 78f90fe02fSChris Kay __RO_END__ = .; 79f90fe02fSChris Kay } >RAM 80f90fe02fSChris Kay#endif /* SEPARATE_CODE_AND_RODATA */ 81f90fe02fSChris Kay 829003fa0bSYatharth Kochar __RW_START__ = .; 839003fa0bSYatharth Kochar 84caa3e7e0SMasahiro Yamada DATA_SECTION >RAM 85a926a9f6SMasahiro Yamada STACK_SECTION >RAM 86a7739bc7SMasahiro Yamada BSS_SECTION >RAM 87665e71b8SMasahiro Yamada XLAT_TABLE_SECTION >RAM 889003fa0bSYatharth Kochar 899003fa0bSYatharth Kochar#if USE_COHERENT_MEM 909003fa0bSYatharth Kochar /* 91f90fe02fSChris Kay * The base address of the coherent memory section must be page-aligned to 92f90fe02fSChris Kay * guarantee that the coherent data are stored on their own pages and are 93f90fe02fSChris Kay * not mixed with normal data. This is required to set up the correct 949003fa0bSYatharth Kochar * memory attributes for the coherent data page tables. 959003fa0bSYatharth Kochar */ 96*da04341eSChris Kay .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) { 979003fa0bSYatharth Kochar __COHERENT_RAM_START__ = .; 98*da04341eSChris Kay *(.tzfw_coherent_mem) 999003fa0bSYatharth Kochar __COHERENT_RAM_END_UNALIGNED__ = .; 1009003fa0bSYatharth Kochar 1019003fa0bSYatharth Kochar /* 102f90fe02fSChris Kay * Memory page(s) mapped to this section will be marked as device 103f90fe02fSChris Kay * memory. No other unexpected data must creep in. Ensure the rest of 104f90fe02fSChris Kay * the current memory page is unused. 1059003fa0bSYatharth Kochar */ 106f90fe02fSChris Kay . = ALIGN(PAGE_SIZE); 107f90fe02fSChris Kay 108f90fe02fSChris Kay __COHERENT_RAM_END__ = .; 109f90fe02fSChris Kay } >RAM 110f90fe02fSChris Kay#endif /* USE_COHERENT_MEM */ 111f90fe02fSChris Kay 1129003fa0bSYatharth Kochar __RW_END__ = .; 1139003fa0bSYatharth Kochar __BL2U_END__ = .; 1149003fa0bSYatharth Kochar 1159003fa0bSYatharth Kochar __BSS_SIZE__ = SIZEOF(.bss); 1169003fa0bSYatharth Kochar 1179003fa0bSYatharth Kochar ASSERT(. <= BL2U_LIMIT, "BL2U image has exceeded its limit.") 1189003fa0bSYatharth Kochar} 119