xref: /rk3399_ARM-atf/bl1/aarch64/bl1_arch_setup.c (revision 8cec598ba3b689b86d9dfc58bca5610bdc48f55a)
14f6ad66aSAchin Gupta /*
2e83b0cadSDan Handley  * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
34f6ad66aSAchin Gupta  *
44f6ad66aSAchin Gupta  * Redistribution and use in source and binary forms, with or without
54f6ad66aSAchin Gupta  * modification, are permitted provided that the following conditions are met:
64f6ad66aSAchin Gupta  *
74f6ad66aSAchin Gupta  * Redistributions of source code must retain the above copyright notice, this
84f6ad66aSAchin Gupta  * list of conditions and the following disclaimer.
94f6ad66aSAchin Gupta  *
104f6ad66aSAchin Gupta  * Redistributions in binary form must reproduce the above copyright notice,
114f6ad66aSAchin Gupta  * this list of conditions and the following disclaimer in the documentation
124f6ad66aSAchin Gupta  * and/or other materials provided with the distribution.
134f6ad66aSAchin Gupta  *
144f6ad66aSAchin Gupta  * Neither the name of ARM nor the names of its contributors may be used
154f6ad66aSAchin Gupta  * to endorse or promote products derived from this software without specific
164f6ad66aSAchin Gupta  * prior written permission.
174f6ad66aSAchin Gupta  *
184f6ad66aSAchin Gupta  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
194f6ad66aSAchin Gupta  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204f6ad66aSAchin Gupta  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214f6ad66aSAchin Gupta  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
224f6ad66aSAchin Gupta  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
234f6ad66aSAchin Gupta  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
244f6ad66aSAchin Gupta  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
254f6ad66aSAchin Gupta  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
264f6ad66aSAchin Gupta  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
274f6ad66aSAchin Gupta  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
284f6ad66aSAchin Gupta  * POSSIBILITY OF SUCH DAMAGE.
294f6ad66aSAchin Gupta  */
304f6ad66aSAchin Gupta 
3197043ac9SDan Handley #include <arch.h>
324f6ad66aSAchin Gupta #include <arch_helpers.h>
334f6ad66aSAchin Gupta 
344f6ad66aSAchin Gupta /*******************************************************************************
354f6ad66aSAchin Gupta  * Function that does the first bit of architectural setup that affects
364f6ad66aSAchin Gupta  * execution in the non-secure address space.
374f6ad66aSAchin Gupta  ******************************************************************************/
384f6ad66aSAchin Gupta void bl1_arch_setup(void)
394f6ad66aSAchin Gupta {
404f6ad66aSAchin Gupta 	unsigned long tmp_reg = 0;
414f6ad66aSAchin Gupta 
424f6ad66aSAchin Gupta 	/* Enable alignment checks and set the exception endianess to LE */
436ba0b6d6SVikram Kanigiri 	tmp_reg = read_sctlr_el3();
444f6ad66aSAchin Gupta 	tmp_reg |= (SCTLR_A_BIT | SCTLR_SA_BIT);
454f6ad66aSAchin Gupta 	tmp_reg &= ~SCTLR_EE_BIT;
466ba0b6d6SVikram Kanigiri 	write_sctlr_el3(tmp_reg);
47*8cec598bSAndrew Thoelke 	isb();
484f6ad66aSAchin Gupta 
494f6ad66aSAchin Gupta 	/*
503738274dSSandrine Bailleux 	 * Enable HVCs, route FIQs to EL3, set the next EL to be AArch64, route
513738274dSSandrine Bailleux 	 * external abort and SError interrupts to EL3
524f6ad66aSAchin Gupta 	 */
533738274dSSandrine Bailleux 	tmp_reg = SCR_RES1_BITS | SCR_RW_BIT | SCR_HCE_BIT | SCR_EA_BIT |
543738274dSSandrine Bailleux 		  SCR_FIQ_BIT;
554f6ad66aSAchin Gupta 	write_scr(tmp_reg);
564f6ad66aSAchin Gupta 
573738274dSSandrine Bailleux 	/*
583738274dSSandrine Bailleux 	 * Enable SError and Debug exceptions
593738274dSSandrine Bailleux 	 */
603738274dSSandrine Bailleux 	enable_serror();
613738274dSSandrine Bailleux 	enable_debug_exceptions();
624f6ad66aSAchin Gupta }
634f6ad66aSAchin Gupta 
644f6ad66aSAchin Gupta /*******************************************************************************
654f6ad66aSAchin Gupta  * Set the Secure EL1 required architectural state
664f6ad66aSAchin Gupta  ******************************************************************************/
674f6ad66aSAchin Gupta void bl1_arch_next_el_setup(void) {
686ba0b6d6SVikram Kanigiri 	unsigned long next_sctlr;
694f6ad66aSAchin Gupta 
704f6ad66aSAchin Gupta 	/* Use the same endianness than the current BL */
716ba0b6d6SVikram Kanigiri 	next_sctlr = (read_sctlr_el3() & SCTLR_EE_BIT);
724f6ad66aSAchin Gupta 
734f6ad66aSAchin Gupta 	/* Set SCTLR Secure EL1 */
744f6ad66aSAchin Gupta 	next_sctlr |= SCTLR_EL1_RES1;
754f6ad66aSAchin Gupta 
764f6ad66aSAchin Gupta 	write_sctlr_el1(next_sctlr);
774f6ad66aSAchin Gupta }
78