14f6ad66aSAchin Gupta/* 2e83b0cadSDan Handley * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved. 34f6ad66aSAchin Gupta * 44f6ad66aSAchin Gupta * Redistribution and use in source and binary forms, with or without 54f6ad66aSAchin Gupta * modification, are permitted provided that the following conditions are met: 64f6ad66aSAchin Gupta * 74f6ad66aSAchin Gupta * Redistributions of source code must retain the above copyright notice, this 84f6ad66aSAchin Gupta * list of conditions and the following disclaimer. 94f6ad66aSAchin Gupta * 104f6ad66aSAchin Gupta * Redistributions in binary form must reproduce the above copyright notice, 114f6ad66aSAchin Gupta * this list of conditions and the following disclaimer in the documentation 124f6ad66aSAchin Gupta * and/or other materials provided with the distribution. 134f6ad66aSAchin Gupta * 144f6ad66aSAchin Gupta * Neither the name of ARM nor the names of its contributors may be used 154f6ad66aSAchin Gupta * to endorse or promote products derived from this software without specific 164f6ad66aSAchin Gupta * prior written permission. 174f6ad66aSAchin Gupta * 184f6ad66aSAchin Gupta * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 194f6ad66aSAchin Gupta * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 204f6ad66aSAchin Gupta * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 214f6ad66aSAchin Gupta * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 224f6ad66aSAchin Gupta * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 234f6ad66aSAchin Gupta * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 244f6ad66aSAchin Gupta * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 254f6ad66aSAchin Gupta * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 264f6ad66aSAchin Gupta * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 274f6ad66aSAchin Gupta * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 284f6ad66aSAchin Gupta * POSSIBILITY OF SUCH DAMAGE. 294f6ad66aSAchin Gupta */ 304f6ad66aSAchin Gupta 31c10bd2ceSSandrine Bailleux#include <arch.h> 320a30cf54SAndrew Thoelke#include <asm_macros.S> 3397043ac9SDan Handley#include <bl_common.h> 344f6ad66aSAchin Gupta 354f6ad66aSAchin Gupta .globl bl31_entrypoint 364f6ad66aSAchin Gupta 374f6ad66aSAchin Gupta 384f6ad66aSAchin Gupta /* ----------------------------------------------------- 394f6ad66aSAchin Gupta * bl31_entrypoint() is the cold boot entrypoint, 404f6ad66aSAchin Gupta * executed only by the primary cpu. 414f6ad66aSAchin Gupta * ----------------------------------------------------- 424f6ad66aSAchin Gupta */ 434f6ad66aSAchin Gupta 440a30cf54SAndrew Thoelkefunc bl31_entrypoint 454112bfa0SVikram Kanigiri /* --------------------------------------------------------------- 464112bfa0SVikram Kanigiri * Preceding bootloader has populated x0 with a pointer to a 474112bfa0SVikram Kanigiri * 'bl31_params' structure & x1 with a pointer to platform 484112bfa0SVikram Kanigiri * specific structure 494112bfa0SVikram Kanigiri * --------------------------------------------------------------- 50c10bd2ceSSandrine Bailleux */ 51dbad1bacSVikram Kanigiri#if !RESET_TO_BL31 5229fb905dSVikram Kanigiri mov x20, x0 5329fb905dSVikram Kanigiri mov x21, x1 54dbad1bacSVikram Kanigiri#else 55ec3c1003SAchin Gupta /* --------------------------------------------- 56ec3c1003SAchin Gupta * Set the CPU endianness before doing anything 57ec3c1003SAchin Gupta * that might involve memory reads or writes. 58ec3c1003SAchin Gupta * --------------------------------------------- 59ec3c1003SAchin Gupta */ 60ec3c1003SAchin Gupta mrs x0, sctlr_el3 61ec3c1003SAchin Gupta bic x0, x0, #SCTLR_EE_BIT 62ec3c1003SAchin Gupta msr sctlr_el3, x0 63ec3c1003SAchin Gupta isb 64dbad1bacSVikram Kanigiri 65dbad1bacSVikram Kanigiri /* ----------------------------------------------------- 66dbad1bacSVikram Kanigiri * Perform any processor specific actions upon reset 67dbad1bacSVikram Kanigiri * e.g. cache, tlb invalidations etc. Override the 68dbad1bacSVikram Kanigiri * Boot ROM(BL0) programming sequence 69dbad1bacSVikram Kanigiri * ----------------------------------------------------- 70dbad1bacSVikram Kanigiri */ 719b476841SSoby Mathew bl reset_handler 72dbad1bacSVikram Kanigiri#endif 73dbad1bacSVikram Kanigiri /* --------------------------------------------- 74ec3c1003SAchin Gupta * Enable the instruction cache, stack pointer 75ec3c1003SAchin Gupta * and data access alignment checks 76dbad1bacSVikram Kanigiri * --------------------------------------------- 77dbad1bacSVikram Kanigiri */ 78ec3c1003SAchin Gupta mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT) 79ec3c1003SAchin Gupta mrs x0, sctlr_el3 80ec3c1003SAchin Gupta orr x0, x0, x1 81ec3c1003SAchin Gupta msr sctlr_el3, x0 82dbad1bacSVikram Kanigiri isb 83c10bd2ceSSandrine Bailleux 84c10bd2ceSSandrine Bailleux /* --------------------------------------------- 85626ed510SSoby Mathew * Initialise cpu_data early to enable crash 86626ed510SSoby Mathew * reporting to have access to crash stack. 87626ed510SSoby Mathew * Since crash reporting depends on cpu_data to 88626ed510SSoby Mathew * report the unhandled exception, not 89626ed510SSoby Mathew * doing so can lead to recursive exceptions due 90626ed510SSoby Mathew * to a NULL TPIDR_EL3 91626ed510SSoby Mathew * --------------------------------------------- 92626ed510SSoby Mathew */ 93626ed510SSoby Mathew bl init_cpu_data_ptr 94626ed510SSoby Mathew 95626ed510SSoby Mathew /* --------------------------------------------- 96626ed510SSoby Mathew * Set the exception vector. 97c10bd2ceSSandrine Bailleux * --------------------------------------------- 98c10bd2ceSSandrine Bailleux */ 99ee94cc6fSAndrew Thoelke adr x1, runtime_exceptions 100c10bd2ceSSandrine Bailleux msr vbar_el3, x1 1010c8d4fefSAchin Gupta isb 1020c8d4fefSAchin Gupta 1030c8d4fefSAchin Gupta /* --------------------------------------------- 1040c8d4fefSAchin Gupta * Enable the SError interrupt now that the 1050c8d4fefSAchin Gupta * exception vectors have been setup. 1060c8d4fefSAchin Gupta * --------------------------------------------- 1070c8d4fefSAchin Gupta */ 1080c8d4fefSAchin Gupta msr daifclr, #DAIF_ABT_BIT 109c10bd2ceSSandrine Bailleux 1104f603683SHarry Liebel /* --------------------------------------------------------------------- 1114f603683SHarry Liebel * The initial state of the Architectural feature trap register 1124f603683SHarry Liebel * (CPTR_EL3) is unknown and it must be set to a known state. All 1134f603683SHarry Liebel * feature traps are disabled. Some bits in this register are marked as 1144f603683SHarry Liebel * Reserved and should not be modified. 1154f603683SHarry Liebel * 1164f603683SHarry Liebel * CPTR_EL3.TCPAC: This causes a direct access to the CPACR_EL1 from EL1 1174f603683SHarry Liebel * or the CPTR_EL2 from EL2 to trap to EL3 unless it is trapped at EL2. 1184f603683SHarry Liebel * CPTR_EL3.TTA: This causes access to the Trace functionality to trap 1194f603683SHarry Liebel * to EL3 when executed from EL0, EL1, EL2, or EL3. If system register 1204f603683SHarry Liebel * access to trace functionality is not supported, this bit is RES0. 1214f603683SHarry Liebel * CPTR_EL3.TFP: This causes instructions that access the registers 1224f603683SHarry Liebel * associated with Floating Point and Advanced SIMD execution to trap 1234f603683SHarry Liebel * to EL3 when executed from any exception level, unless trapped to EL1 1244f603683SHarry Liebel * or EL2. 1254f603683SHarry Liebel * --------------------------------------------------------------------- 1264f603683SHarry Liebel */ 1274f603683SHarry Liebel mrs x1, cptr_el3 1284f603683SHarry Liebel bic w1, w1, #TCPAC_BIT 1294f603683SHarry Liebel bic w1, w1, #TTA_BIT 1304f603683SHarry Liebel bic w1, w1, #TFP_BIT 1314f603683SHarry Liebel msr cptr_el3, x1 1324f603683SHarry Liebel 133dbad1bacSVikram Kanigiri#if RESET_TO_BL31 13403396c43SVikram Kanigiri /* ------------------------------------------------------- 13503396c43SVikram Kanigiri * Will not return from this macro if it is a warm boot. 13603396c43SVikram Kanigiri * ------------------------------------------------------- 13703396c43SVikram Kanigiri */ 138dbad1bacSVikram Kanigiri wait_for_entrypoint 139dbad1bacSVikram Kanigiri bl platform_mem_init 140dbad1bacSVikram Kanigiri#endif 1414f6ad66aSAchin Gupta 14265f546a1SSandrine Bailleux /* --------------------------------------------- 14365f546a1SSandrine Bailleux * Zero out NOBITS sections. There are 2 of them: 14465f546a1SSandrine Bailleux * - the .bss section; 14565f546a1SSandrine Bailleux * - the coherent memory section. 14665f546a1SSandrine Bailleux * --------------------------------------------- 14765f546a1SSandrine Bailleux */ 14865f546a1SSandrine Bailleux ldr x0, =__BSS_START__ 14965f546a1SSandrine Bailleux ldr x1, =__BSS_SIZE__ 15065f546a1SSandrine Bailleux bl zeromem16 15165f546a1SSandrine Bailleux 152*ab8707e6SSoby Mathew#if USE_COHERENT_MEM 15365f546a1SSandrine Bailleux ldr x0, =__COHERENT_RAM_START__ 15465f546a1SSandrine Bailleux ldr x1, =__COHERENT_RAM_UNALIGNED_SIZE__ 15565f546a1SSandrine Bailleux bl zeromem16 156*ab8707e6SSoby Mathew#endif 15765f546a1SSandrine Bailleux 158caa84939SJeenu Viswambharan /* --------------------------------------------- 159add40351SSoby Mathew * Initialize the cpu_ops pointer. 160add40351SSoby Mathew * --------------------------------------------- 161add40351SSoby Mathew */ 162add40351SSoby Mathew bl init_cpu_ops 163add40351SSoby Mathew 164add40351SSoby Mathew /* --------------------------------------------- 165caa84939SJeenu Viswambharan * Use SP_EL0 for the C runtime stack. 166caa84939SJeenu Viswambharan * --------------------------------------------- 167caa84939SJeenu Viswambharan */ 168caa84939SJeenu Viswambharan msr spsel, #0 169caa84939SJeenu Viswambharan 1704f6ad66aSAchin Gupta /* -------------------------------------------- 171754a2b7aSAchin Gupta * Allocate a stack whose memory will be marked 172754a2b7aSAchin Gupta * as Normal-IS-WBWA when the MMU is enabled. 173754a2b7aSAchin Gupta * There is no risk of reading stale stack 174754a2b7aSAchin Gupta * memory after enabling the MMU as only the 175754a2b7aSAchin Gupta * primary cpu is running at the moment. 1764f6ad66aSAchin Gupta * -------------------------------------------- 1774f6ad66aSAchin Gupta */ 1787935d0a5SAndrew Thoelke mrs x0, mpidr_el1 179754a2b7aSAchin Gupta bl platform_set_stack 1804f6ad66aSAchin Gupta 1814f6ad66aSAchin Gupta /* --------------------------------------------- 1824f6ad66aSAchin Gupta * Perform platform specific early arch. setup 1834f6ad66aSAchin Gupta * --------------------------------------------- 1844f6ad66aSAchin Gupta */ 185dbad1bacSVikram Kanigiri#if RESET_TO_BL31 186dbad1bacSVikram Kanigiri mov x0, 0 187dbad1bacSVikram Kanigiri mov x1, 0 188dbad1bacSVikram Kanigiri#else 1894f6ad66aSAchin Gupta mov x0, x20 1904f6ad66aSAchin Gupta mov x1, x21 191dbad1bacSVikram Kanigiri#endif 192dbad1bacSVikram Kanigiri 1934f6ad66aSAchin Gupta bl bl31_early_platform_setup 1944f6ad66aSAchin Gupta bl bl31_plat_arch_setup 1954f6ad66aSAchin Gupta 1964f6ad66aSAchin Gupta /* --------------------------------------------- 1974f6ad66aSAchin Gupta * Jump to main function. 1984f6ad66aSAchin Gupta * --------------------------------------------- 1994f6ad66aSAchin Gupta */ 2004f6ad66aSAchin Gupta bl bl31_main 2014f6ad66aSAchin Gupta 202caa84939SJeenu Viswambharan b el3_exit 203