xref: /rk3399_ARM-atf/bl2/aarch64/bl2_entrypoint.S (revision 54dc71e7ec9f2a069907e8c0c24b5c8f8cc5f66a)
14f6ad66aSAchin Gupta/*
285a181ceSSoby Mathew * Copyright (c) 2013-2015, ARM Limited and Contributors. All rights reserved.
34f6ad66aSAchin Gupta *
44f6ad66aSAchin Gupta * Redistribution and use in source and binary forms, with or without
54f6ad66aSAchin Gupta * modification, are permitted provided that the following conditions are met:
64f6ad66aSAchin Gupta *
74f6ad66aSAchin Gupta * Redistributions of source code must retain the above copyright notice, this
84f6ad66aSAchin Gupta * list of conditions and the following disclaimer.
94f6ad66aSAchin Gupta *
104f6ad66aSAchin Gupta * Redistributions in binary form must reproduce the above copyright notice,
114f6ad66aSAchin Gupta * this list of conditions and the following disclaimer in the documentation
124f6ad66aSAchin Gupta * and/or other materials provided with the distribution.
134f6ad66aSAchin Gupta *
144f6ad66aSAchin Gupta * Neither the name of ARM nor the names of its contributors may be used
154f6ad66aSAchin Gupta * to endorse or promote products derived from this software without specific
164f6ad66aSAchin Gupta * prior written permission.
174f6ad66aSAchin Gupta *
184f6ad66aSAchin Gupta * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
194f6ad66aSAchin Gupta * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204f6ad66aSAchin Gupta * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214f6ad66aSAchin Gupta * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
224f6ad66aSAchin Gupta * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
234f6ad66aSAchin Gupta * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
244f6ad66aSAchin Gupta * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
254f6ad66aSAchin Gupta * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
264f6ad66aSAchin Gupta * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
274f6ad66aSAchin Gupta * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
284f6ad66aSAchin Gupta * POSSIBILITY OF SUCH DAMAGE.
294f6ad66aSAchin Gupta */
304f6ad66aSAchin Gupta
31c10bd2ceSSandrine Bailleux#include <arch.h>
320a30cf54SAndrew Thoelke#include <asm_macros.S>
3397043ac9SDan Handley#include <bl_common.h>
344f6ad66aSAchin Gupta
354f6ad66aSAchin Gupta
364f6ad66aSAchin Gupta	.globl	bl2_entrypoint
374f6ad66aSAchin Gupta
384f6ad66aSAchin Gupta
394f6ad66aSAchin Gupta
400a30cf54SAndrew Thoelkefunc bl2_entrypoint
414f6ad66aSAchin Gupta	/*---------------------------------------------
424f6ad66aSAchin Gupta	 * Store the extents of the tzram available to
434f6ad66aSAchin Gupta	 * BL2 for future use. Use the opcode param to
444f6ad66aSAchin Gupta	 * allow implement other functions if needed.
454f6ad66aSAchin Gupta	 * ---------------------------------------------
464f6ad66aSAchin Gupta	 */
474f6ad66aSAchin Gupta	mov	x20, x0
484f6ad66aSAchin Gupta	mov	x21, x1
494f6ad66aSAchin Gupta
504f6ad66aSAchin Gupta	/* ---------------------------------------------
51c10bd2ceSSandrine Bailleux	 * Set the exception vector to something sane.
52c10bd2ceSSandrine Bailleux	 * ---------------------------------------------
53c10bd2ceSSandrine Bailleux	 */
54c10bd2ceSSandrine Bailleux	adr	x0, early_exceptions
55c10bd2ceSSandrine Bailleux	msr	vbar_el1, x0
560c8d4fefSAchin Gupta	isb
570c8d4fefSAchin Gupta
580c8d4fefSAchin Gupta	/* ---------------------------------------------
590c8d4fefSAchin Gupta	 * Enable the SError interrupt now that the
600c8d4fefSAchin Gupta	 * exception vectors have been setup.
610c8d4fefSAchin Gupta	 * ---------------------------------------------
620c8d4fefSAchin Gupta	 */
630c8d4fefSAchin Gupta	msr	daifclr, #DAIF_ABT_BIT
64c10bd2ceSSandrine Bailleux
65c10bd2ceSSandrine Bailleux	/* ---------------------------------------------
66ec3c1003SAchin Gupta	 * Enable the instruction cache, stack pointer
67ec3c1003SAchin Gupta	 * and data access alignment checks
68c10bd2ceSSandrine Bailleux	 * ---------------------------------------------
69c10bd2ceSSandrine Bailleux	 */
70ec3c1003SAchin Gupta	mov	x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
71c10bd2ceSSandrine Bailleux	mrs	x0, sctlr_el1
72ec3c1003SAchin Gupta	orr	x0, x0, x1
73c10bd2ceSSandrine Bailleux	msr	sctlr_el1, x0
74c10bd2ceSSandrine Bailleux	isb
75c10bd2ceSSandrine Bailleux
7665f546a1SSandrine Bailleux	/* ---------------------------------------------
7734edaed5SSandrine Bailleux	 * Check the opcodes out of paranoia.
7834edaed5SSandrine Bailleux	 * ---------------------------------------------
7934edaed5SSandrine Bailleux	 */
8034edaed5SSandrine Bailleux	mov	x0, #RUN_IMAGE
8134edaed5SSandrine Bailleux	cmp	x0, x20
8234edaed5SSandrine Bailleux	b.ne	_panic
8334edaed5SSandrine Bailleux
8434edaed5SSandrine Bailleux	/* ---------------------------------------------
85*54dc71e7SAchin Gupta	 * Invalidate the RW memory used by the BL2
86*54dc71e7SAchin Gupta	 * image. This includes the data and NOBITS
87*54dc71e7SAchin Gupta	 * sections. This is done to safeguard against
88*54dc71e7SAchin Gupta	 * possible corruption of this memory by dirty
89*54dc71e7SAchin Gupta	 * cache lines in a system cache as a result of
90*54dc71e7SAchin Gupta	 * use by an earlier boot loader stage.
91*54dc71e7SAchin Gupta	 * ---------------------------------------------
92*54dc71e7SAchin Gupta	 */
93*54dc71e7SAchin Gupta	adr	x0, __RW_START__
94*54dc71e7SAchin Gupta	adr	x1, __RW_END__
95*54dc71e7SAchin Gupta	sub	x1, x1, x0
96*54dc71e7SAchin Gupta	bl	inv_dcache_range
97*54dc71e7SAchin Gupta
98*54dc71e7SAchin Gupta	/* ---------------------------------------------
9965f546a1SSandrine Bailleux	 * Zero out NOBITS sections. There are 2 of them:
10065f546a1SSandrine Bailleux	 *   - the .bss section;
10165f546a1SSandrine Bailleux	 *   - the coherent memory section.
10265f546a1SSandrine Bailleux	 * ---------------------------------------------
10365f546a1SSandrine Bailleux	 */
10465f546a1SSandrine Bailleux	ldr	x0, =__BSS_START__
10565f546a1SSandrine Bailleux	ldr	x1, =__BSS_SIZE__
10665f546a1SSandrine Bailleux	bl	zeromem16
10765f546a1SSandrine Bailleux
108ab8707e6SSoby Mathew#if USE_COHERENT_MEM
10965f546a1SSandrine Bailleux	ldr	x0, =__COHERENT_RAM_START__
11065f546a1SSandrine Bailleux	ldr	x1, =__COHERENT_RAM_UNALIGNED_SIZE__
11165f546a1SSandrine Bailleux	bl	zeromem16
112ab8707e6SSoby Mathew#endif
11365f546a1SSandrine Bailleux
1144f6ad66aSAchin Gupta	/* --------------------------------------------
115754a2b7aSAchin Gupta	 * Allocate a stack whose memory will be marked
116754a2b7aSAchin Gupta	 * as Normal-IS-WBWA when the MMU is enabled.
117754a2b7aSAchin Gupta	 * There is no risk of reading stale stack
118754a2b7aSAchin Gupta	 * memory after enabling the MMU as only the
119754a2b7aSAchin Gupta	 * primary cpu is running at the moment.
1204f6ad66aSAchin Gupta	 * --------------------------------------------
1214f6ad66aSAchin Gupta	 */
12285a181ceSSoby Mathew	bl	plat_set_my_stack
1234f6ad66aSAchin Gupta
1244f6ad66aSAchin Gupta	/* ---------------------------------------------
1254f6ad66aSAchin Gupta	 * Perform early platform setup & platform
1264f6ad66aSAchin Gupta	 * specific early arch. setup e.g. mmu setup
1274f6ad66aSAchin Gupta	 * ---------------------------------------------
1284f6ad66aSAchin Gupta	 */
1294f6ad66aSAchin Gupta	mov	x0, x21
1304f6ad66aSAchin Gupta	bl	bl2_early_platform_setup
1314f6ad66aSAchin Gupta	bl	bl2_plat_arch_setup
1324f6ad66aSAchin Gupta
1334f6ad66aSAchin Gupta	/* ---------------------------------------------
1344f6ad66aSAchin Gupta	 * Jump to main function.
1354f6ad66aSAchin Gupta	 * ---------------------------------------------
1364f6ad66aSAchin Gupta	 */
1374f6ad66aSAchin Gupta	bl	bl2_main
1384f6ad66aSAchin Gupta_panic:
1394f6ad66aSAchin Gupta	b	_panic
1408b779620SKévin Petitendfunc bl2_entrypoint
141