xref: /rk3399_ARM-atf/bl31/aarch64/bl31_entrypoint.S (revision 8b779620d3bad024b83650ecfeaafd7b3ae26ccf)
14f6ad66aSAchin Gupta/*
2e83b0cadSDan Handley * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
34f6ad66aSAchin Gupta *
44f6ad66aSAchin Gupta * Redistribution and use in source and binary forms, with or without
54f6ad66aSAchin Gupta * modification, are permitted provided that the following conditions are met:
64f6ad66aSAchin Gupta *
74f6ad66aSAchin Gupta * Redistributions of source code must retain the above copyright notice, this
84f6ad66aSAchin Gupta * list of conditions and the following disclaimer.
94f6ad66aSAchin Gupta *
104f6ad66aSAchin Gupta * Redistributions in binary form must reproduce the above copyright notice,
114f6ad66aSAchin Gupta * this list of conditions and the following disclaimer in the documentation
124f6ad66aSAchin Gupta * and/or other materials provided with the distribution.
134f6ad66aSAchin Gupta *
144f6ad66aSAchin Gupta * Neither the name of ARM nor the names of its contributors may be used
154f6ad66aSAchin Gupta * to endorse or promote products derived from this software without specific
164f6ad66aSAchin Gupta * prior written permission.
174f6ad66aSAchin Gupta *
184f6ad66aSAchin Gupta * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
194f6ad66aSAchin Gupta * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204f6ad66aSAchin Gupta * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214f6ad66aSAchin Gupta * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
224f6ad66aSAchin Gupta * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
234f6ad66aSAchin Gupta * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
244f6ad66aSAchin Gupta * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
254f6ad66aSAchin Gupta * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
264f6ad66aSAchin Gupta * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
274f6ad66aSAchin Gupta * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
284f6ad66aSAchin Gupta * POSSIBILITY OF SUCH DAMAGE.
294f6ad66aSAchin Gupta */
304f6ad66aSAchin Gupta
31c10bd2ceSSandrine Bailleux#include <arch.h>
320a30cf54SAndrew Thoelke#include <asm_macros.S>
3397043ac9SDan Handley#include <bl_common.h>
344f6ad66aSAchin Gupta
354f6ad66aSAchin Gupta	.globl	bl31_entrypoint
364f6ad66aSAchin Gupta
374f6ad66aSAchin Gupta
384f6ad66aSAchin Gupta	/* -----------------------------------------------------
394f6ad66aSAchin Gupta	 * bl31_entrypoint() is the cold boot entrypoint,
404f6ad66aSAchin Gupta	 * executed only by the primary cpu.
414f6ad66aSAchin Gupta	 * -----------------------------------------------------
424f6ad66aSAchin Gupta	 */
434f6ad66aSAchin Gupta
440a30cf54SAndrew Thoelkefunc bl31_entrypoint
454112bfa0SVikram Kanigiri	/* ---------------------------------------------------------------
464112bfa0SVikram Kanigiri	 * Preceding bootloader has populated x0 with a pointer to a
474112bfa0SVikram Kanigiri	 * 'bl31_params' structure & x1 with a pointer to platform
484112bfa0SVikram Kanigiri	 * specific structure
494112bfa0SVikram Kanigiri	 * ---------------------------------------------------------------
50c10bd2ceSSandrine Bailleux	 */
51dbad1bacSVikram Kanigiri#if !RESET_TO_BL31
5229fb905dSVikram Kanigiri	mov	x20, x0
5329fb905dSVikram Kanigiri	mov	x21, x1
54dbad1bacSVikram Kanigiri#else
55ec3c1003SAchin Gupta	/* ---------------------------------------------
56ec3c1003SAchin Gupta	 * Set the CPU endianness before doing anything
57ec3c1003SAchin Gupta	 * that might involve memory reads or writes.
58ec3c1003SAchin Gupta	 * ---------------------------------------------
59ec3c1003SAchin Gupta	 */
60ec3c1003SAchin Gupta	mrs	x0, sctlr_el3
61ec3c1003SAchin Gupta	bic	x0, x0, #SCTLR_EE_BIT
62ec3c1003SAchin Gupta	msr	sctlr_el3, x0
63ec3c1003SAchin Gupta	isb
6479a97b2eSYatharth Kochar#endif
65dbad1bacSVikram Kanigiri
6679a97b2eSYatharth Kochar	/* ---------------------------------------------
6779a97b2eSYatharth Kochar	 * When RESET_TO_BL31 is true, perform any
6879a97b2eSYatharth Kochar	 * processor specific actions upon reset e.g.
6979a97b2eSYatharth Kochar	 * cache, tlb invalidations, errata workarounds
7079a97b2eSYatharth Kochar	 * etc.
7179a97b2eSYatharth Kochar	 * When RESET_TO_BL31 is false, perform any
7279a97b2eSYatharth Kochar	 * processor specific actions which undo or are
7379a97b2eSYatharth Kochar	 * in addition to the actions performed by the
7479a97b2eSYatharth Kochar	 * reset handler in the Boot ROM (BL1).
7579a97b2eSYatharth Kochar	 * ---------------------------------------------
76dbad1bacSVikram Kanigiri	 */
779b476841SSoby Mathew	bl	reset_handler
7879a97b2eSYatharth Kochar
79dbad1bacSVikram Kanigiri	/* ---------------------------------------------
80ec3c1003SAchin Gupta	 * Enable the instruction cache, stack pointer
81ec3c1003SAchin Gupta	 * and data access alignment checks
82dbad1bacSVikram Kanigiri	 * ---------------------------------------------
83dbad1bacSVikram Kanigiri	 */
84ec3c1003SAchin Gupta	mov	x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
85ec3c1003SAchin Gupta	mrs	x0, sctlr_el3
86ec3c1003SAchin Gupta	orr	x0, x0, x1
87ec3c1003SAchin Gupta	msr	sctlr_el3, x0
88dbad1bacSVikram Kanigiri	isb
89c10bd2ceSSandrine Bailleux
90c10bd2ceSSandrine Bailleux	/* ---------------------------------------------
91626ed510SSoby Mathew	 * Initialise cpu_data early to enable crash
92626ed510SSoby Mathew	 * reporting to have access to crash stack.
93626ed510SSoby Mathew	 * Since crash reporting depends on cpu_data to
94626ed510SSoby Mathew	 * report the unhandled exception, not
95626ed510SSoby Mathew	 * doing so can lead to recursive exceptions due
96626ed510SSoby Mathew	 * to a NULL TPIDR_EL3
97626ed510SSoby Mathew	 * ---------------------------------------------
98626ed510SSoby Mathew	 */
99626ed510SSoby Mathew	bl	init_cpu_data_ptr
100626ed510SSoby Mathew
101626ed510SSoby Mathew	/* ---------------------------------------------
102626ed510SSoby Mathew	 * Set the exception vector.
103c10bd2ceSSandrine Bailleux	 * ---------------------------------------------
104c10bd2ceSSandrine Bailleux	 */
105ee94cc6fSAndrew Thoelke	adr	x1, runtime_exceptions
106c10bd2ceSSandrine Bailleux	msr	vbar_el3, x1
1070c8d4fefSAchin Gupta	isb
1080c8d4fefSAchin Gupta
1090c8d4fefSAchin Gupta	/* ---------------------------------------------
1100c8d4fefSAchin Gupta	 * Enable the SError interrupt now that the
1110c8d4fefSAchin Gupta	 * exception vectors have been setup.
1120c8d4fefSAchin Gupta	 * ---------------------------------------------
1130c8d4fefSAchin Gupta	 */
1140c8d4fefSAchin Gupta	msr	daifclr, #DAIF_ABT_BIT
115c10bd2ceSSandrine Bailleux
1164f603683SHarry Liebel	/* ---------------------------------------------------------------------
1174f603683SHarry Liebel	 * The initial state of the Architectural feature trap register
1184f603683SHarry Liebel	 * (CPTR_EL3) is unknown and it must be set to a known state. All
1194f603683SHarry Liebel	 * feature traps are disabled. Some bits in this register are marked as
1204f603683SHarry Liebel	 * Reserved and should not be modified.
1214f603683SHarry Liebel	 *
1224f603683SHarry Liebel	 * CPTR_EL3.TCPAC: This causes a direct access to the CPACR_EL1 from EL1
1234f603683SHarry Liebel	 *  or the CPTR_EL2 from EL2 to trap to EL3 unless it is trapped at EL2.
1244f603683SHarry Liebel	 * CPTR_EL3.TTA: This causes access to the Trace functionality to trap
1254f603683SHarry Liebel	 *  to EL3 when executed from EL0, EL1, EL2, or EL3. If system register
1264f603683SHarry Liebel	 *  access to trace functionality is not supported, this bit is RES0.
1274f603683SHarry Liebel	 * CPTR_EL3.TFP: This causes instructions that access the registers
1284f603683SHarry Liebel	 *  associated with Floating Point and Advanced SIMD execution to trap
1294f603683SHarry Liebel	 *  to EL3 when executed from any exception level, unless trapped to EL1
1304f603683SHarry Liebel	 *  or EL2.
1314f603683SHarry Liebel	 * ---------------------------------------------------------------------
1324f603683SHarry Liebel	 */
1334f603683SHarry Liebel	mrs	x1, cptr_el3
1344f603683SHarry Liebel	bic	w1, w1, #TCPAC_BIT
1354f603683SHarry Liebel	bic	w1, w1, #TTA_BIT
1364f603683SHarry Liebel	bic	w1, w1, #TFP_BIT
1374f603683SHarry Liebel	msr	cptr_el3, x1
1384f603683SHarry Liebel
139dbad1bacSVikram Kanigiri#if RESET_TO_BL31
14003396c43SVikram Kanigiri	/* -------------------------------------------------------
14103396c43SVikram Kanigiri	 * Will not return from this macro if it is a warm boot.
14203396c43SVikram Kanigiri	 * -------------------------------------------------------
14303396c43SVikram Kanigiri	 */
144dbad1bacSVikram Kanigiri	wait_for_entrypoint
145dbad1bacSVikram Kanigiri	bl	platform_mem_init
146dbad1bacSVikram Kanigiri#endif
1474f6ad66aSAchin Gupta
14865f546a1SSandrine Bailleux	/* ---------------------------------------------
14965f546a1SSandrine Bailleux	 * Zero out NOBITS sections. There are 2 of them:
15065f546a1SSandrine Bailleux	 *   - the .bss section;
15165f546a1SSandrine Bailleux	 *   - the coherent memory section.
15265f546a1SSandrine Bailleux	 * ---------------------------------------------
15365f546a1SSandrine Bailleux	 */
15465f546a1SSandrine Bailleux	ldr	x0, =__BSS_START__
15565f546a1SSandrine Bailleux	ldr	x1, =__BSS_SIZE__
15665f546a1SSandrine Bailleux	bl	zeromem16
15765f546a1SSandrine Bailleux
158ab8707e6SSoby Mathew#if USE_COHERENT_MEM
15965f546a1SSandrine Bailleux	ldr	x0, =__COHERENT_RAM_START__
16065f546a1SSandrine Bailleux	ldr	x1, =__COHERENT_RAM_UNALIGNED_SIZE__
16165f546a1SSandrine Bailleux	bl	zeromem16
162ab8707e6SSoby Mathew#endif
16365f546a1SSandrine Bailleux
164caa84939SJeenu Viswambharan	/* ---------------------------------------------
165caa84939SJeenu Viswambharan	 * Use SP_EL0 for the C runtime stack.
166caa84939SJeenu Viswambharan	 * ---------------------------------------------
167caa84939SJeenu Viswambharan	 */
168caa84939SJeenu Viswambharan	msr	spsel, #0
169caa84939SJeenu Viswambharan
1704f6ad66aSAchin Gupta	/* --------------------------------------------
171754a2b7aSAchin Gupta	 * Allocate a stack whose memory will be marked
172754a2b7aSAchin Gupta	 * as Normal-IS-WBWA when the MMU is enabled.
173754a2b7aSAchin Gupta	 * There is no risk of reading stale stack
174754a2b7aSAchin Gupta	 * memory after enabling the MMU as only the
175754a2b7aSAchin Gupta	 * primary cpu is running at the moment.
1764f6ad66aSAchin Gupta	 * --------------------------------------------
1774f6ad66aSAchin Gupta	 */
1787935d0a5SAndrew Thoelke	mrs	x0, mpidr_el1
179754a2b7aSAchin Gupta	bl	platform_set_stack
1804f6ad66aSAchin Gupta
1814f6ad66aSAchin Gupta	/* ---------------------------------------------
1824f6ad66aSAchin Gupta	 * Perform platform specific early arch. setup
1834f6ad66aSAchin Gupta	 * ---------------------------------------------
1844f6ad66aSAchin Gupta	 */
185dbad1bacSVikram Kanigiri#if RESET_TO_BL31
186dbad1bacSVikram Kanigiri	mov	x0, 0
187dbad1bacSVikram Kanigiri	mov	x1, 0
188dbad1bacSVikram Kanigiri#else
1894f6ad66aSAchin Gupta	mov	x0, x20
1904f6ad66aSAchin Gupta	mov	x1, x21
191dbad1bacSVikram Kanigiri#endif
192dbad1bacSVikram Kanigiri
1934f6ad66aSAchin Gupta	bl	bl31_early_platform_setup
1944f6ad66aSAchin Gupta	bl	bl31_plat_arch_setup
1954f6ad66aSAchin Gupta
1964f6ad66aSAchin Gupta	/* ---------------------------------------------
1974f6ad66aSAchin Gupta	 * Jump to main function.
1984f6ad66aSAchin Gupta	 * ---------------------------------------------
1994f6ad66aSAchin Gupta	 */
2004f6ad66aSAchin Gupta	bl	bl31_main
2014f6ad66aSAchin Gupta
202caa84939SJeenu Viswambharan	b	el3_exit
203*8b779620SKévin Petitendfunc bl31_entrypoint
204