1c11ba852SSoby Mathew/* 2da04341eSChris Kay * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved. 3c11ba852SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5c11ba852SSoby Mathew */ 6c11ba852SSoby Mathew 7665e71b8SMasahiro Yamada#include <common/bl_common.ld.h> 809d40e0eSAntonio Nino Diaz#include <lib/xlat_tables/xlat_tables_defs.h> 9c11ba852SSoby Mathew 10c11ba852SSoby MathewOUTPUT_FORMAT(elf32-littlearm) 11c11ba852SSoby MathewOUTPUT_ARCH(arm) 12c11ba852SSoby MathewENTRY(sp_min_vector_table) 13c11ba852SSoby Mathew 14c11ba852SSoby MathewMEMORY { 15c11ba852SSoby Mathew RAM (rwx): ORIGIN = BL32_BASE, LENGTH = BL32_LIMIT - BL32_BASE 16c11ba852SSoby Mathew} 17c11ba852SSoby Mathew 1814e09cc4SHeiko Stuebner#ifdef PLAT_SP_MIN_EXTRA_LD_SCRIPT 1914e09cc4SHeiko Stuebner# include <plat_sp_min.ld.S> 20f90fe02fSChris Kay#endif /* PLAT_SP_MIN_EXTRA_LD_SCRIPT */ 21c11ba852SSoby Mathew 22f90fe02fSChris KaySECTIONS { 23fcb72e16SHarrison Mutai RAM_REGION_START = ORIGIN(RAM); 24fcb72e16SHarrison Mutai RAM_REGION_LENGTH = LENGTH(RAM); 25c11ba852SSoby Mathew . = BL32_BASE; 26f90fe02fSChris Kay 27a2aedac2SAntonio Nino Diaz ASSERT(. == ALIGN(PAGE_SIZE), 28c11ba852SSoby Mathew "BL32_BASE address is not aligned on a page boundary.") 29c11ba852SSoby Mathew 30c11ba852SSoby Mathew#if SEPARATE_CODE_AND_RODATA 31c11ba852SSoby Mathew .text . : { 32*3d6edc32SAndrey Skvortsov ASSERT(. == ALIGN(PAGE_SIZE), 33*3d6edc32SAndrey Skvortsov ".text address is not aligned on a page boundary."); 34*3d6edc32SAndrey Skvortsov 35c11ba852SSoby Mathew __TEXT_START__ = .; 36f90fe02fSChris Kay 37c11ba852SSoby Mathew *entrypoint.o(.text*) 38b1f596b6SYann Gautier *(SORT_BY_ALIGNMENT(.text*)) 393bdf0e5dSYatharth Kochar *(.vectors) 40f7d445fcSMichal Simek __TEXT_END_UNALIGNED__ = .; 41f90fe02fSChris Kay 425629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 43f90fe02fSChris Kay 44c11ba852SSoby Mathew __TEXT_END__ = .; 45c11ba852SSoby Mathew } >RAM 46c11ba852SSoby Mathew 47f90fe02fSChris Kay /* .ARM.extab and .ARM.exidx are only added because Clang needs them */ 48ad925094SRoberto Vargas .ARM.extab . : { 49ad925094SRoberto Vargas *(.ARM.extab* .gnu.linkonce.armextab.*) 50ad925094SRoberto Vargas } >RAM 51ad925094SRoberto Vargas 52ad925094SRoberto Vargas .ARM.exidx . : { 53ad925094SRoberto Vargas *(.ARM.exidx* .gnu.linkonce.armexidx.*) 54ad925094SRoberto Vargas } >RAM 55ad925094SRoberto Vargas 56c11ba852SSoby Mathew .rodata . : { 57c11ba852SSoby Mathew __RODATA_START__ = .; 58b1f596b6SYann Gautier *(SORT_BY_ALIGNMENT(.rodata*)) 59c11ba852SSoby Mathew 600a0a7a9aSMasahiro Yamada RODATA_COMMON 61c11ba852SSoby Mathew 628e743bcdSJeenu Viswambharan . = ALIGN(8); 63f90fe02fSChris Kay 6409d40e0eSAntonio Nino Diaz# include <lib/el3_runtime/pubsub_events.h> 65f7d445fcSMichal Simek __RODATA_END_UNALIGNED__ = .; 668e743bcdSJeenu Viswambharan 675629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 68f90fe02fSChris Kay 69c11ba852SSoby Mathew __RODATA_END__ = .; 70c11ba852SSoby Mathew } >RAM 71f90fe02fSChris Kay#else /* SEPARATE_CODE_AND_RODATA */ 72da04341eSChris Kay .ro . : { 73*3d6edc32SAndrey Skvortsov ASSERT(. == ALIGN(PAGE_SIZE), 74*3d6edc32SAndrey Skvortsov ".ro address is not aligned on a page boundary."); 75*3d6edc32SAndrey Skvortsov 76c11ba852SSoby Mathew __RO_START__ = .; 77f90fe02fSChris Kay 78c11ba852SSoby Mathew *entrypoint.o(.text*) 79b1f596b6SYann Gautier *(SORT_BY_ALIGNMENT(.text*)) 80b1f596b6SYann Gautier *(SORT_BY_ALIGNMENT(.rodata*)) 81c11ba852SSoby Mathew 820a0a7a9aSMasahiro Yamada RODATA_COMMON 83c11ba852SSoby Mathew 848e743bcdSJeenu Viswambharan . = ALIGN(8); 85f90fe02fSChris Kay 8609d40e0eSAntonio Nino Diaz# include <lib/el3_runtime/pubsub_events.h> 878e743bcdSJeenu Viswambharan 883bdf0e5dSYatharth Kochar *(.vectors) 89f90fe02fSChris Kay 90c11ba852SSoby Mathew __RO_END_UNALIGNED__ = .; 91c11ba852SSoby Mathew 92c11ba852SSoby Mathew /* 93f90fe02fSChris Kay * Memory page(s) mapped to this section will be marked as device 94f90fe02fSChris Kay * memory. No other unexpected data must creep in. Ensure that the rest 95f90fe02fSChris Kay * of the current memory page is unused. 96c11ba852SSoby Mathew */ 975629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 98f90fe02fSChris Kay 99c11ba852SSoby Mathew __RO_END__ = .; 100c11ba852SSoby Mathew } >RAM 101f90fe02fSChris Kay#endif /* SEPARATE_CODE_AND_RODATA */ 102c11ba852SSoby Mathew 103c11ba852SSoby Mathew ASSERT(__CPU_OPS_END__ > __CPU_OPS_START__, 104c11ba852SSoby Mathew "cpu_ops not defined for this platform.") 105f90fe02fSChris Kay 106c11ba852SSoby Mathew __RW_START__ = .; 107c11ba852SSoby Mathew 108caa3e7e0SMasahiro Yamada DATA_SECTION >RAM 1094324a14bSYann Gautier RELA_SECTION >RAM 110c11ba852SSoby Mathew 1115744e874SSoby Mathew#ifdef BL32_PROGBITS_LIMIT 1125744e874SSoby Mathew ASSERT(. <= BL32_PROGBITS_LIMIT, "BL32 progbits has exceeded its limit.") 113f90fe02fSChris Kay#endif /* BL32_PROGBITS_LIMIT */ 1145744e874SSoby Mathew 115a926a9f6SMasahiro Yamada STACK_SECTION >RAM 116a7739bc7SMasahiro Yamada BSS_SECTION >RAM 117665e71b8SMasahiro Yamada XLAT_TABLE_SECTION >RAM 118c11ba852SSoby Mathew 119c11ba852SSoby Mathew __BSS_SIZE__ = SIZEOF(.bss); 120c11ba852SSoby Mathew 121c11ba852SSoby Mathew#if USE_COHERENT_MEM 122c11ba852SSoby Mathew /* 123f90fe02fSChris Kay * The base address of the coherent memory section must be page-aligned to 124f90fe02fSChris Kay * guarantee that the coherent data are stored on their own pages and are 125f90fe02fSChris Kay * not mixed with normal data. This is required to set up the correct 126c11ba852SSoby Mathew * memory attributes for the coherent data page tables. 127c11ba852SSoby Mathew */ 128da04341eSChris Kay .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) { 129c11ba852SSoby Mathew __COHERENT_RAM_START__ = .; 130f90fe02fSChris Kay 131c11ba852SSoby Mathew /* 132f90fe02fSChris Kay * Bakery locks are stored in coherent memory. Each lock's data is 133f90fe02fSChris Kay * contiguous and fully allocated by the compiler. 134c11ba852SSoby Mathew */ 135da04341eSChris Kay *(.bakery_lock) 136da04341eSChris Kay *(.tzfw_coherent_mem) 137f90fe02fSChris Kay 138c11ba852SSoby Mathew __COHERENT_RAM_END_UNALIGNED__ = .; 139f90fe02fSChris Kay 140c11ba852SSoby Mathew /* 141f90fe02fSChris Kay * Memory page(s) mapped to this section will be marked as device 142f90fe02fSChris Kay * memory. No other unexpected data must creep in. Ensure that the rest 143f90fe02fSChris Kay * of the current memory page is unused. 144c11ba852SSoby Mathew */ 1455629b2b1SRoberto Vargas . = ALIGN(PAGE_SIZE); 146f90fe02fSChris Kay 147c11ba852SSoby Mathew __COHERENT_RAM_END__ = .; 148c11ba852SSoby Mathew } >RAM 149c11ba852SSoby Mathew 150c11ba852SSoby Mathew __COHERENT_RAM_UNALIGNED_SIZE__ = 151c11ba852SSoby Mathew __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__; 152f90fe02fSChris Kay#endif /* USE_COHERENT_MEM */ 153c11ba852SSoby Mathew 154c11ba852SSoby Mathew __RW_END__ = .; 155c11ba852SSoby Mathew __BL32_END__ = .; 156fdd97d7cSYann Gautier 1574324a14bSYann Gautier /DISCARD/ : { 1584324a14bSYann Gautier *(.dynsym .dynstr .hash .gnu.hash) 1594324a14bSYann Gautier } 1604324a14bSYann Gautier 161fdd97d7cSYann Gautier ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.") 162fcb72e16SHarrison Mutai RAM_REGION_END = .; 163c11ba852SSoby Mathew} 164