14f6ad66aSAchin Gupta/* 21c3ea103SAntonio Nino Diaz * Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved. 34f6ad66aSAchin Gupta * 44f6ad66aSAchin Gupta * Redistribution and use in source and binary forms, with or without 54f6ad66aSAchin Gupta * modification, are permitted provided that the following conditions are met: 64f6ad66aSAchin Gupta * 74f6ad66aSAchin Gupta * Redistributions of source code must retain the above copyright notice, this 84f6ad66aSAchin Gupta * list of conditions and the following disclaimer. 94f6ad66aSAchin Gupta * 104f6ad66aSAchin Gupta * Redistributions in binary form must reproduce the above copyright notice, 114f6ad66aSAchin Gupta * this list of conditions and the following disclaimer in the documentation 124f6ad66aSAchin Gupta * and/or other materials provided with the distribution. 134f6ad66aSAchin Gupta * 144f6ad66aSAchin Gupta * Neither the name of ARM nor the names of its contributors may be used 154f6ad66aSAchin Gupta * to endorse or promote products derived from this software without specific 164f6ad66aSAchin Gupta * prior written permission. 174f6ad66aSAchin Gupta * 184f6ad66aSAchin Gupta * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 194f6ad66aSAchin Gupta * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 204f6ad66aSAchin Gupta * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 214f6ad66aSAchin Gupta * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 224f6ad66aSAchin Gupta * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 234f6ad66aSAchin Gupta * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 244f6ad66aSAchin Gupta * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 254f6ad66aSAchin Gupta * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 264f6ad66aSAchin Gupta * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 274f6ad66aSAchin Gupta * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 284f6ad66aSAchin Gupta * POSSIBILITY OF SUCH DAMAGE. 294f6ad66aSAchin Gupta */ 304f6ad66aSAchin Gupta 31c10bd2ceSSandrine Bailleux#include <arch.h> 320a30cf54SAndrew Thoelke#include <asm_macros.S> 3397043ac9SDan Handley#include <bl_common.h> 344f6ad66aSAchin Gupta 354f6ad66aSAchin Gupta 364f6ad66aSAchin Gupta .globl bl2_entrypoint 374f6ad66aSAchin Gupta 384f6ad66aSAchin Gupta 394f6ad66aSAchin Gupta 400a30cf54SAndrew Thoelkefunc bl2_entrypoint 414f6ad66aSAchin Gupta /*--------------------------------------------- 425698c5b3SYatharth Kochar * Save from x1 the extents of the tzram 435698c5b3SYatharth Kochar * available to BL2 for future use. 445698c5b3SYatharth Kochar * x0 is not currently used. 454f6ad66aSAchin Gupta * --------------------------------------------- 464f6ad66aSAchin Gupta */ 475698c5b3SYatharth Kochar mov x20, x1 484f6ad66aSAchin Gupta 494f6ad66aSAchin Gupta /* --------------------------------------------- 50c10bd2ceSSandrine Bailleux * Set the exception vector to something sane. 51c10bd2ceSSandrine Bailleux * --------------------------------------------- 52c10bd2ceSSandrine Bailleux */ 53c10bd2ceSSandrine Bailleux adr x0, early_exceptions 54c10bd2ceSSandrine Bailleux msr vbar_el1, x0 550c8d4fefSAchin Gupta isb 560c8d4fefSAchin Gupta 570c8d4fefSAchin Gupta /* --------------------------------------------- 580c8d4fefSAchin Gupta * Enable the SError interrupt now that the 590c8d4fefSAchin Gupta * exception vectors have been setup. 600c8d4fefSAchin Gupta * --------------------------------------------- 610c8d4fefSAchin Gupta */ 620c8d4fefSAchin Gupta msr daifclr, #DAIF_ABT_BIT 63c10bd2ceSSandrine Bailleux 64c10bd2ceSSandrine Bailleux /* --------------------------------------------- 65ec3c1003SAchin Gupta * Enable the instruction cache, stack pointer 66ec3c1003SAchin Gupta * and data access alignment checks 67c10bd2ceSSandrine Bailleux * --------------------------------------------- 68c10bd2ceSSandrine Bailleux */ 69ec3c1003SAchin Gupta mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT) 70c10bd2ceSSandrine Bailleux mrs x0, sctlr_el1 71ec3c1003SAchin Gupta orr x0, x0, x1 72c10bd2ceSSandrine Bailleux msr sctlr_el1, x0 73c10bd2ceSSandrine Bailleux isb 74c10bd2ceSSandrine Bailleux 7565f546a1SSandrine Bailleux /* --------------------------------------------- 7654dc71e7SAchin Gupta * Invalidate the RW memory used by the BL2 7754dc71e7SAchin Gupta * image. This includes the data and NOBITS 7854dc71e7SAchin Gupta * sections. This is done to safeguard against 7954dc71e7SAchin Gupta * possible corruption of this memory by dirty 8054dc71e7SAchin Gupta * cache lines in a system cache as a result of 8154dc71e7SAchin Gupta * use by an earlier boot loader stage. 8254dc71e7SAchin Gupta * --------------------------------------------- 8354dc71e7SAchin Gupta */ 8454dc71e7SAchin Gupta adr x0, __RW_START__ 8554dc71e7SAchin Gupta adr x1, __RW_END__ 8654dc71e7SAchin Gupta sub x1, x1, x0 8754dc71e7SAchin Gupta bl inv_dcache_range 8854dc71e7SAchin Gupta 8954dc71e7SAchin Gupta /* --------------------------------------------- 9065f546a1SSandrine Bailleux * Zero out NOBITS sections. There are 2 of them: 9165f546a1SSandrine Bailleux * - the .bss section; 9265f546a1SSandrine Bailleux * - the coherent memory section. 9365f546a1SSandrine Bailleux * --------------------------------------------- 9465f546a1SSandrine Bailleux */ 9565f546a1SSandrine Bailleux ldr x0, =__BSS_START__ 9665f546a1SSandrine Bailleux ldr x1, =__BSS_SIZE__ 9765f546a1SSandrine Bailleux bl zeromem16 9865f546a1SSandrine Bailleux 99ab8707e6SSoby Mathew#if USE_COHERENT_MEM 10065f546a1SSandrine Bailleux ldr x0, =__COHERENT_RAM_START__ 10165f546a1SSandrine Bailleux ldr x1, =__COHERENT_RAM_UNALIGNED_SIZE__ 10265f546a1SSandrine Bailleux bl zeromem16 103ab8707e6SSoby Mathew#endif 10465f546a1SSandrine Bailleux 1054f6ad66aSAchin Gupta /* -------------------------------------------- 106754a2b7aSAchin Gupta * Allocate a stack whose memory will be marked 107754a2b7aSAchin Gupta * as Normal-IS-WBWA when the MMU is enabled. 108754a2b7aSAchin Gupta * There is no risk of reading stale stack 109754a2b7aSAchin Gupta * memory after enabling the MMU as only the 110754a2b7aSAchin Gupta * primary cpu is running at the moment. 1114f6ad66aSAchin Gupta * -------------------------------------------- 1124f6ad66aSAchin Gupta */ 11385a181ceSSoby Mathew bl plat_set_my_stack 1144f6ad66aSAchin Gupta 1154f6ad66aSAchin Gupta /* --------------------------------------------- 1164f6ad66aSAchin Gupta * Perform early platform setup & platform 1174f6ad66aSAchin Gupta * specific early arch. setup e.g. mmu setup 1184f6ad66aSAchin Gupta * --------------------------------------------- 1194f6ad66aSAchin Gupta */ 1205698c5b3SYatharth Kochar mov x0, x20 1214f6ad66aSAchin Gupta bl bl2_early_platform_setup 1224f6ad66aSAchin Gupta bl bl2_plat_arch_setup 1234f6ad66aSAchin Gupta 1244f6ad66aSAchin Gupta /* --------------------------------------------- 1254f6ad66aSAchin Gupta * Jump to main function. 1264f6ad66aSAchin Gupta * --------------------------------------------- 1274f6ad66aSAchin Gupta */ 1284f6ad66aSAchin Gupta bl bl2_main 1291c3ea103SAntonio Nino Diaz 1301c3ea103SAntonio Nino Diaz /* --------------------------------------------- 1311c3ea103SAntonio Nino Diaz * Should never reach this point. 1321c3ea103SAntonio Nino Diaz * --------------------------------------------- 1331c3ea103SAntonio Nino Diaz */ 134*a806dad5SJeenu Viswambharan no_ret plat_panic_handler 1351c3ea103SAntonio Nino Diaz 1368b779620SKévin Petitendfunc bl2_entrypoint 137