14f6ad66aSAchin Gupta/* 2*da04341eSChris Kay * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved. 34f6ad66aSAchin Gupta * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 54f6ad66aSAchin Gupta */ 64f6ad66aSAchin Gupta 7665e71b8SMasahiro Yamada#include <common/bl_common.ld.h> 809d40e0eSAntonio Nino Diaz#include <lib/xlat_tables/xlat_tables_defs.h> 94f6ad66aSAchin Gupta 104f6ad66aSAchin GuptaOUTPUT_FORMAT(PLATFORM_LINKER_FORMAT) 114f6ad66aSAchin GuptaOUTPUT_ARCH(PLATFORM_LINKER_ARCH) 129f98aa1aSJeenu ViswambharanENTRY(bl2_entrypoint) 134f6ad66aSAchin Gupta 144f6ad66aSAchin GuptaMEMORY { 15d7fbf132SJuan Castillo RAM (rwx): ORIGIN = BL2_BASE, LENGTH = BL2_LIMIT - BL2_BASE 164f6ad66aSAchin Gupta} 174f6ad66aSAchin Gupta 18f90fe02fSChris KaySECTIONS { 194f6ad66aSAchin Gupta . = BL2_BASE; 20f90fe02fSChris Kay 21a2aedac2SAntonio Nino Diaz ASSERT(. == ALIGN(PAGE_SIZE), 228d69a03fSSandrine Bailleux "BL2_BASE address is not aligned on a page boundary.") 234f6ad66aSAchin Gupta 245d1c104fSSandrine Bailleux#if SEPARATE_CODE_AND_RODATA 255d1c104fSSandrine Bailleux .text . : { 265d1c104fSSandrine Bailleux __TEXT_START__ = .; 27f90fe02fSChris Kay 286c09af9fSZelalem Aweke#if ENABLE_RME 296c09af9fSZelalem Aweke *bl2_rme_entrypoint.o(.text*) 306c09af9fSZelalem Aweke#else /* ENABLE_RME */ 315d1c104fSSandrine Bailleux *bl2_entrypoint.o(.text*) 326c09af9fSZelalem Aweke#endif /* ENABLE_RME */ 33f90fe02fSChris Kay 34ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.text*)) 355d1c104fSSandrine Bailleux *(.vectors) 36f90fe02fSChris Kay 375629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 38f90fe02fSChris Kay 395d1c104fSSandrine Bailleux __TEXT_END__ = .; 405d1c104fSSandrine Bailleux } >RAM 415d1c104fSSandrine Bailleux 42f90fe02fSChris Kay /* .ARM.extab and .ARM.exidx are only added because Clang needs them */ 43ad925094SRoberto Vargas .ARM.extab . : { 44ad925094SRoberto Vargas *(.ARM.extab* .gnu.linkonce.armextab.*) 45ad925094SRoberto Vargas } >RAM 46ad925094SRoberto Vargas 47ad925094SRoberto Vargas .ARM.exidx . : { 48ad925094SRoberto Vargas *(.ARM.exidx* .gnu.linkonce.armexidx.*) 49ad925094SRoberto Vargas } >RAM 50ad925094SRoberto Vargas 515d1c104fSSandrine Bailleux .rodata . : { 525d1c104fSSandrine Bailleux __RODATA_START__ = .; 53f90fe02fSChris Kay 54ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.rodata*)) 555d1c104fSSandrine Bailleux 560a0a7a9aSMasahiro Yamada RODATA_COMMON 575d1c104fSSandrine Bailleux 585629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 59f90fe02fSChris Kay 605d1c104fSSandrine Bailleux __RODATA_END__ = .; 615d1c104fSSandrine Bailleux } >RAM 62f90fe02fSChris Kay#else /* SEPARATE_CODE_AND_RODATA */ 63*da04341eSChris Kay .ro . : { 648d69a03fSSandrine Bailleux __RO_START__ = .; 65f90fe02fSChris Kay 66dccc537aSAndrew Thoelke *bl2_entrypoint.o(.text*) 67ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.text*)) 68ebd6efaeSSamuel Holland *(SORT_BY_ALIGNMENT(.rodata*)) 6905799ae0SJuan Castillo 700a0a7a9aSMasahiro Yamada RODATA_COMMON 7105799ae0SJuan Castillo 72b739f22aSAchin Gupta *(.vectors) 73f90fe02fSChris Kay 748d69a03fSSandrine Bailleux __RO_END_UNALIGNED__ = .; 754f6ad66aSAchin Gupta 7654dc71e7SAchin Gupta /* 77f90fe02fSChris Kay * Memory page(s) mapped to this section will be marked as read-only, 78f90fe02fSChris Kay * executable. No RW data from the next section must creep in. Ensure 79f90fe02fSChris Kay * that the rest of the current memory page is unused. 8054dc71e7SAchin Gupta */ 81f90fe02fSChris Kay . = ALIGN(PAGE_SIZE); 82f90fe02fSChris Kay 83f90fe02fSChris Kay __RO_END__ = .; 84f90fe02fSChris Kay } >RAM 85f90fe02fSChris Kay#endif /* SEPARATE_CODE_AND_RODATA */ 86f90fe02fSChris Kay 8754dc71e7SAchin Gupta __RW_START__ = .; 8854dc71e7SAchin Gupta 89caa3e7e0SMasahiro Yamada DATA_SECTION >RAM 90a926a9f6SMasahiro Yamada STACK_SECTION >RAM 91a7739bc7SMasahiro Yamada BSS_SECTION >RAM 92665e71b8SMasahiro Yamada XLAT_TABLE_SECTION >RAM 93a0cd989dSAchin Gupta 94ab8707e6SSoby Mathew#if USE_COHERENT_MEM 95a0cd989dSAchin Gupta /* 96f90fe02fSChris Kay * The base address of the coherent memory section must be page-aligned to 97f90fe02fSChris Kay * guarantee that the coherent data are stored on their own pages and are 98f90fe02fSChris Kay * not mixed with normal data. This is required to set up the correct 998d69a03fSSandrine Bailleux * memory attributes for the coherent data page tables. 1008d69a03fSSandrine Bailleux */ 101*da04341eSChris Kay .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) { 1028d69a03fSSandrine Bailleux __COHERENT_RAM_START__ = .; 103*da04341eSChris Kay *(.tzfw_coherent_mem) 1048d69a03fSSandrine Bailleux __COHERENT_RAM_END_UNALIGNED__ = .; 1054f6ad66aSAchin Gupta 10654dc71e7SAchin Gupta /* 107f90fe02fSChris Kay * Memory page(s) mapped to this section will be marked as device 108f90fe02fSChris Kay * memory. No other unexpected data must creep in. Ensure the rest of 109f90fe02fSChris Kay * the current memory page is unused. 11054dc71e7SAchin Gupta */ 111f90fe02fSChris Kay . = ALIGN(PAGE_SIZE); 112f90fe02fSChris Kay 113f90fe02fSChris Kay __COHERENT_RAM_END__ = .; 114f90fe02fSChris Kay } >RAM 115f90fe02fSChris Kay#endif /* USE_COHERENT_MEM */ 116f90fe02fSChris Kay 11754dc71e7SAchin Gupta __RW_END__ = .; 1188d69a03fSSandrine Bailleux __BL2_END__ = .; 1194f6ad66aSAchin Gupta 1208d69a03fSSandrine Bailleux __BSS_SIZE__ = SIZEOF(.bss); 121ab8707e6SSoby Mathew 122ab8707e6SSoby Mathew#if USE_COHERENT_MEM 1238d69a03fSSandrine Bailleux __COHERENT_RAM_UNALIGNED_SIZE__ = 1248d69a03fSSandrine Bailleux __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__; 125f90fe02fSChris Kay#endif /* USE_COHERENT_MEM */ 126a37255a2SSandrine Bailleux 127a37255a2SSandrine Bailleux ASSERT(. <= BL2_LIMIT, "BL2 image has exceeded its limit.") 1284f6ad66aSAchin Gupta} 129