16c595b3dSSandrine Bailleux/* 2bbf8f6f9SYatharth Kochar * Copyright (c) 2013-2015, ARM Limited and Contributors. All rights reserved. 36c595b3dSSandrine Bailleux * 46c595b3dSSandrine Bailleux * Redistribution and use in source and binary forms, with or without 56c595b3dSSandrine Bailleux * modification, are permitted provided that the following conditions are met: 66c595b3dSSandrine Bailleux * 76c595b3dSSandrine Bailleux * Redistributions of source code must retain the above copyright notice, this 86c595b3dSSandrine Bailleux * list of conditions and the following disclaimer. 96c595b3dSSandrine Bailleux * 106c595b3dSSandrine Bailleux * Redistributions in binary form must reproduce the above copyright notice, 116c595b3dSSandrine Bailleux * this list of conditions and the following disclaimer in the documentation 126c595b3dSSandrine Bailleux * and/or other materials provided with the distribution. 136c595b3dSSandrine Bailleux * 146c595b3dSSandrine Bailleux * Neither the name of ARM nor the names of its contributors may be used 156c595b3dSSandrine Bailleux * to endorse or promote products derived from this software without specific 166c595b3dSSandrine Bailleux * prior written permission. 176c595b3dSSandrine Bailleux * 186c595b3dSSandrine Bailleux * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 196c595b3dSSandrine Bailleux * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 206c595b3dSSandrine Bailleux * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 216c595b3dSSandrine Bailleux * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 226c595b3dSSandrine Bailleux * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 236c595b3dSSandrine Bailleux * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 246c595b3dSSandrine Bailleux * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 256c595b3dSSandrine Bailleux * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 266c595b3dSSandrine Bailleux * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 276c595b3dSSandrine Bailleux * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 286c595b3dSSandrine Bailleux * POSSIBILITY OF SUCH DAMAGE. 296c595b3dSSandrine Bailleux */ 306c595b3dSSandrine Bailleux 316c595b3dSSandrine Bailleux#include <arch.h> 326c595b3dSSandrine Bailleux#include <asm_macros.S> 3397043ac9SDan Handley#include <bl_common.h> 34*48bfb88eSYatharth Kochar#include <bl1.h> 357baff11fSYatharth Kochar#include <context.h> 366c595b3dSSandrine Bailleux 376c595b3dSSandrine Bailleux .globl bl1_exceptions 386c595b3dSSandrine Bailleux 396c595b3dSSandrine Bailleux .section .vectors, "ax"; .align 11 406c595b3dSSandrine Bailleux 416c595b3dSSandrine Bailleux /* ----------------------------------------------------- 426c595b3dSSandrine Bailleux * Very simple stackless exception handlers used by BL1. 436c595b3dSSandrine Bailleux * ----------------------------------------------------- 446c595b3dSSandrine Bailleux */ 456c595b3dSSandrine Bailleux .align 7 466c595b3dSSandrine Bailleuxbl1_exceptions: 476c595b3dSSandrine Bailleux /* ----------------------------------------------------- 4844804252SSandrine Bailleux * Current EL with SP0 : 0x0 - 0x200 496c595b3dSSandrine Bailleux * ----------------------------------------------------- 506c595b3dSSandrine Bailleux */ 516c595b3dSSandrine BailleuxSynchronousExceptionSP0: 526c595b3dSSandrine Bailleux mov x0, #SYNC_EXCEPTION_SP_EL0 536c595b3dSSandrine Bailleux bl plat_report_exception 546c595b3dSSandrine Bailleux b SynchronousExceptionSP0 556c595b3dSSandrine Bailleux check_vector_size SynchronousExceptionSP0 566c595b3dSSandrine Bailleux 576c595b3dSSandrine Bailleux .align 7 586c595b3dSSandrine BailleuxIrqSP0: 596c595b3dSSandrine Bailleux mov x0, #IRQ_SP_EL0 606c595b3dSSandrine Bailleux bl plat_report_exception 616c595b3dSSandrine Bailleux b IrqSP0 626c595b3dSSandrine Bailleux check_vector_size IrqSP0 636c595b3dSSandrine Bailleux 646c595b3dSSandrine Bailleux .align 7 656c595b3dSSandrine BailleuxFiqSP0: 666c595b3dSSandrine Bailleux mov x0, #FIQ_SP_EL0 676c595b3dSSandrine Bailleux bl plat_report_exception 686c595b3dSSandrine Bailleux b FiqSP0 696c595b3dSSandrine Bailleux check_vector_size FiqSP0 706c595b3dSSandrine Bailleux 716c595b3dSSandrine Bailleux .align 7 726c595b3dSSandrine BailleuxSErrorSP0: 736c595b3dSSandrine Bailleux mov x0, #SERROR_SP_EL0 746c595b3dSSandrine Bailleux bl plat_report_exception 756c595b3dSSandrine Bailleux b SErrorSP0 766c595b3dSSandrine Bailleux check_vector_size SErrorSP0 776c595b3dSSandrine Bailleux 786c595b3dSSandrine Bailleux /* ----------------------------------------------------- 7944804252SSandrine Bailleux * Current EL with SPx: 0x200 - 0x400 806c595b3dSSandrine Bailleux * ----------------------------------------------------- 816c595b3dSSandrine Bailleux */ 826c595b3dSSandrine Bailleux .align 7 836c595b3dSSandrine BailleuxSynchronousExceptionSPx: 846c595b3dSSandrine Bailleux mov x0, #SYNC_EXCEPTION_SP_ELX 856c595b3dSSandrine Bailleux bl plat_report_exception 866c595b3dSSandrine Bailleux b SynchronousExceptionSPx 876c595b3dSSandrine Bailleux check_vector_size SynchronousExceptionSPx 886c595b3dSSandrine Bailleux 896c595b3dSSandrine Bailleux .align 7 906c595b3dSSandrine BailleuxIrqSPx: 916c595b3dSSandrine Bailleux mov x0, #IRQ_SP_ELX 926c595b3dSSandrine Bailleux bl plat_report_exception 936c595b3dSSandrine Bailleux b IrqSPx 946c595b3dSSandrine Bailleux check_vector_size IrqSPx 956c595b3dSSandrine Bailleux 966c595b3dSSandrine Bailleux .align 7 976c595b3dSSandrine BailleuxFiqSPx: 986c595b3dSSandrine Bailleux mov x0, #FIQ_SP_ELX 996c595b3dSSandrine Bailleux bl plat_report_exception 1006c595b3dSSandrine Bailleux b FiqSPx 1016c595b3dSSandrine Bailleux check_vector_size FiqSPx 1026c595b3dSSandrine Bailleux 1036c595b3dSSandrine Bailleux .align 7 1046c595b3dSSandrine BailleuxSErrorSPx: 1056c595b3dSSandrine Bailleux mov x0, #SERROR_SP_ELX 1066c595b3dSSandrine Bailleux bl plat_report_exception 1076c595b3dSSandrine Bailleux b SErrorSPx 1086c595b3dSSandrine Bailleux check_vector_size SErrorSPx 1096c595b3dSSandrine Bailleux 1106c595b3dSSandrine Bailleux /* ----------------------------------------------------- 11144804252SSandrine Bailleux * Lower EL using AArch64 : 0x400 - 0x600 1126c595b3dSSandrine Bailleux * ----------------------------------------------------- 1136c595b3dSSandrine Bailleux */ 1146c595b3dSSandrine Bailleux .align 7 1156c595b3dSSandrine BailleuxSynchronousExceptionA64: 1160c8d4fefSAchin Gupta /* Enable the SError interrupt */ 1170c8d4fefSAchin Gupta msr daifclr, #DAIF_ABT_BIT 1180c8d4fefSAchin Gupta 1197baff11fSYatharth Kochar str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 1207baff11fSYatharth Kochar 1211fe4d453SSandrine Bailleux /* Expect only SMC exceptions */ 1227baff11fSYatharth Kochar mrs x30, esr_el3 1237baff11fSYatharth Kochar ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 1247baff11fSYatharth Kochar cmp x30, #EC_AARCH64_SMC 1251fe4d453SSandrine Bailleux b.ne unexpected_sync_exception 12629fb905dSVikram Kanigiri 1271fe4d453SSandrine Bailleux b smc_handler64 1286c595b3dSSandrine Bailleux check_vector_size SynchronousExceptionA64 1296c595b3dSSandrine Bailleux 1306c595b3dSSandrine Bailleux .align 7 1316c595b3dSSandrine BailleuxIrqA64: 1326c595b3dSSandrine Bailleux mov x0, #IRQ_AARCH64 1336c595b3dSSandrine Bailleux bl plat_report_exception 1346c595b3dSSandrine Bailleux b IrqA64 1356c595b3dSSandrine Bailleux check_vector_size IrqA64 1366c595b3dSSandrine Bailleux 1376c595b3dSSandrine Bailleux .align 7 1386c595b3dSSandrine BailleuxFiqA64: 1396c595b3dSSandrine Bailleux mov x0, #FIQ_AARCH64 1406c595b3dSSandrine Bailleux bl plat_report_exception 1416c595b3dSSandrine Bailleux b FiqA64 1426c595b3dSSandrine Bailleux check_vector_size FiqA64 1436c595b3dSSandrine Bailleux 1446c595b3dSSandrine Bailleux .align 7 1456c595b3dSSandrine BailleuxSErrorA64: 1466c595b3dSSandrine Bailleux mov x0, #SERROR_AARCH64 1476c595b3dSSandrine Bailleux bl plat_report_exception 1486c595b3dSSandrine Bailleux b SErrorA64 1496c595b3dSSandrine Bailleux check_vector_size SErrorA64 1506c595b3dSSandrine Bailleux 1516c595b3dSSandrine Bailleux /* ----------------------------------------------------- 15244804252SSandrine Bailleux * Lower EL using AArch32 : 0x600 - 0x800 1536c595b3dSSandrine Bailleux * ----------------------------------------------------- 1546c595b3dSSandrine Bailleux */ 1556c595b3dSSandrine Bailleux .align 7 1566c595b3dSSandrine BailleuxSynchronousExceptionA32: 1576c595b3dSSandrine Bailleux mov x0, #SYNC_EXCEPTION_AARCH32 1586c595b3dSSandrine Bailleux bl plat_report_exception 1596c595b3dSSandrine Bailleux b SynchronousExceptionA32 1606c595b3dSSandrine Bailleux check_vector_size SynchronousExceptionA32 1616c595b3dSSandrine Bailleux 1626c595b3dSSandrine Bailleux .align 7 1636c595b3dSSandrine BailleuxIrqA32: 1646c595b3dSSandrine Bailleux mov x0, #IRQ_AARCH32 1656c595b3dSSandrine Bailleux bl plat_report_exception 1666c595b3dSSandrine Bailleux b IrqA32 1676c595b3dSSandrine Bailleux check_vector_size IrqA32 1686c595b3dSSandrine Bailleux 1696c595b3dSSandrine Bailleux .align 7 1706c595b3dSSandrine BailleuxFiqA32: 1716c595b3dSSandrine Bailleux mov x0, #FIQ_AARCH32 1726c595b3dSSandrine Bailleux bl plat_report_exception 1736c595b3dSSandrine Bailleux b FiqA32 1746c595b3dSSandrine Bailleux check_vector_size FiqA32 1756c595b3dSSandrine Bailleux 1766c595b3dSSandrine Bailleux .align 7 1776c595b3dSSandrine BailleuxSErrorA32: 1786c595b3dSSandrine Bailleux mov x0, #SERROR_AARCH32 1796c595b3dSSandrine Bailleux bl plat_report_exception 1806c595b3dSSandrine Bailleux b SErrorA32 1816c595b3dSSandrine Bailleux check_vector_size SErrorA32 1821fe4d453SSandrine Bailleux 1831fe4d453SSandrine Bailleux 1841fe4d453SSandrine Bailleuxfunc smc_handler64 185*48bfb88eSYatharth Kochar 1867baff11fSYatharth Kochar /* ---------------------------------------------- 187*48bfb88eSYatharth Kochar * Detect if this is a RUN_IMAGE or other SMC. 188*48bfb88eSYatharth Kochar * ---------------------------------------------- 189*48bfb88eSYatharth Kochar */ 190*48bfb88eSYatharth Kochar mov x30, #BL1_SMC_RUN_IMAGE 191*48bfb88eSYatharth Kochar cmp x30, x0 192*48bfb88eSYatharth Kochar b.ne smc_handler 193*48bfb88eSYatharth Kochar 194*48bfb88eSYatharth Kochar /* ------------------------------------------------ 195*48bfb88eSYatharth Kochar * Make sure only Secure world reaches here. 196*48bfb88eSYatharth Kochar * ------------------------------------------------ 197*48bfb88eSYatharth Kochar */ 198*48bfb88eSYatharth Kochar mrs x30, scr_el3 199*48bfb88eSYatharth Kochar tst x30, #SCR_NS_BIT 200*48bfb88eSYatharth Kochar b.ne unexpected_sync_exception 201*48bfb88eSYatharth Kochar 202*48bfb88eSYatharth Kochar /* ---------------------------------------------- 203*48bfb88eSYatharth Kochar * Handling RUN_IMAGE SMC. First switch back to 204*48bfb88eSYatharth Kochar * SP_EL0 for the C runtime stack. 2057baff11fSYatharth Kochar * ---------------------------------------------- 2067baff11fSYatharth Kochar */ 2077baff11fSYatharth Kochar ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 2087baff11fSYatharth Kochar msr spsel, #0 2097baff11fSYatharth Kochar mov sp, x30 2107baff11fSYatharth Kochar 2111fe4d453SSandrine Bailleux /* --------------------------------------------------------------------- 212*48bfb88eSYatharth Kochar * Pass EL3 control to BL31. 213*48bfb88eSYatharth Kochar * Here it expects X1 with the address of a entry_point_info_t 214*48bfb88eSYatharth Kochar * structure describing the BL31 entrypoint. 2151fe4d453SSandrine Bailleux * --------------------------------------------------------------------- 2161fe4d453SSandrine Bailleux */ 2171fe4d453SSandrine Bailleux mov x20, x1 2181fe4d453SSandrine Bailleux 2191fe4d453SSandrine Bailleux mov x0, x20 220ee5c2b13SSandrine Bailleux bl bl1_print_bl31_ep_info 2211fe4d453SSandrine Bailleux 2221fe4d453SSandrine Bailleux ldp x0, x1, [x20, #ENTRY_POINT_INFO_PC_OFFSET] 2231fe4d453SSandrine Bailleux msr elr_el3, x0 2241fe4d453SSandrine Bailleux msr spsr_el3, x1 2251fe4d453SSandrine Bailleux ubfx x0, x1, #MODE_EL_SHIFT, #2 2261fe4d453SSandrine Bailleux cmp x0, #MODE_EL3 2271fe4d453SSandrine Bailleux b.ne unexpected_sync_exception 2281fe4d453SSandrine Bailleux 2291fe4d453SSandrine Bailleux bl disable_mmu_icache_el3 2301fe4d453SSandrine Bailleux tlbi alle3 2311fe4d453SSandrine Bailleux 23235e8c766SSandrine Bailleux#if SPIN_ON_BL1_EXIT 23335e8c766SSandrine Bailleux bl print_debug_loop_message 23435e8c766SSandrine Bailleuxdebug_loop: 23535e8c766SSandrine Bailleux b debug_loop 23635e8c766SSandrine Bailleux#endif 23735e8c766SSandrine Bailleux 238862b5dc2SSandrine Bailleux mov x0, x20 239e3f67124SJuan Castillo bl bl1_plat_prepare_exit 240e3f67124SJuan Castillo 2411fe4d453SSandrine Bailleux ldp x6, x7, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x30)] 2421fe4d453SSandrine Bailleux ldp x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)] 2431fe4d453SSandrine Bailleux ldp x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)] 2441fe4d453SSandrine Bailleux ldp x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)] 2451fe4d453SSandrine Bailleux eret 2461fe4d453SSandrine Bailleuxendfunc smc_handler64 2471fe4d453SSandrine Bailleux 2481fe4d453SSandrine Bailleuxunexpected_sync_exception: 2491fe4d453SSandrine Bailleux mov x0, #SYNC_EXCEPTION_AARCH64 2501fe4d453SSandrine Bailleux bl plat_report_exception 2511fe4d453SSandrine Bailleux wfi 2521fe4d453SSandrine Bailleux b unexpected_sync_exception 253*48bfb88eSYatharth Kochar 254*48bfb88eSYatharth Kochar /* ----------------------------------------------------- 255*48bfb88eSYatharth Kochar * Save Secure/Normal world context and jump to 256*48bfb88eSYatharth Kochar * BL1 SMC handler. 257*48bfb88eSYatharth Kochar * ----------------------------------------------------- 258*48bfb88eSYatharth Kochar */ 259*48bfb88eSYatharth Kocharsmc_handler: 260*48bfb88eSYatharth Kochar /* ----------------------------------------------------- 261*48bfb88eSYatharth Kochar * Save the GP registers x0-x29. 262*48bfb88eSYatharth Kochar * TODO: Revisit to store only SMCC specified registers. 263*48bfb88eSYatharth Kochar * ----------------------------------------------------- 264*48bfb88eSYatharth Kochar */ 265*48bfb88eSYatharth Kochar bl save_gp_registers 266*48bfb88eSYatharth Kochar 267*48bfb88eSYatharth Kochar /* ----------------------------------------------------- 268*48bfb88eSYatharth Kochar * Populate the parameters for the SMC handler. We 269*48bfb88eSYatharth Kochar * already have x0-x4 in place. x5 will point to a 270*48bfb88eSYatharth Kochar * cookie (not used now). x6 will point to the context 271*48bfb88eSYatharth Kochar * structure (SP_EL3) and x7 will contain flags we need 272*48bfb88eSYatharth Kochar * to pass to the handler. 273*48bfb88eSYatharth Kochar * ----------------------------------------------------- 274*48bfb88eSYatharth Kochar */ 275*48bfb88eSYatharth Kochar mov x5, xzr 276*48bfb88eSYatharth Kochar mov x6, sp 277*48bfb88eSYatharth Kochar 278*48bfb88eSYatharth Kochar /* ----------------------------------------------------- 279*48bfb88eSYatharth Kochar * Restore the saved C runtime stack value which will 280*48bfb88eSYatharth Kochar * become the new SP_EL0 i.e. EL3 runtime stack. It was 281*48bfb88eSYatharth Kochar * saved in the 'cpu_context' structure prior to the last 282*48bfb88eSYatharth Kochar * ERET from EL3. 283*48bfb88eSYatharth Kochar * ----------------------------------------------------- 284*48bfb88eSYatharth Kochar */ 285*48bfb88eSYatharth Kochar ldr x12, [x6, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 286*48bfb88eSYatharth Kochar 287*48bfb88eSYatharth Kochar /* --------------------------------------------- 288*48bfb88eSYatharth Kochar * Switch back to SP_EL0 for the C runtime stack. 289*48bfb88eSYatharth Kochar * --------------------------------------------- 290*48bfb88eSYatharth Kochar */ 291*48bfb88eSYatharth Kochar msr spsel, #0 292*48bfb88eSYatharth Kochar mov sp, x12 293*48bfb88eSYatharth Kochar 294*48bfb88eSYatharth Kochar /* ----------------------------------------------------- 295*48bfb88eSYatharth Kochar * Save the SPSR_EL3, ELR_EL3, & SCR_EL3 in case there 296*48bfb88eSYatharth Kochar * is a world switch during SMC handling. 297*48bfb88eSYatharth Kochar * ----------------------------------------------------- 298*48bfb88eSYatharth Kochar */ 299*48bfb88eSYatharth Kochar mrs x16, spsr_el3 300*48bfb88eSYatharth Kochar mrs x17, elr_el3 301*48bfb88eSYatharth Kochar mrs x18, scr_el3 302*48bfb88eSYatharth Kochar stp x16, x17, [x6, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 303*48bfb88eSYatharth Kochar str x18, [x6, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 304*48bfb88eSYatharth Kochar 305*48bfb88eSYatharth Kochar /* Copy SCR_EL3.NS bit to the flag to indicate caller's security */ 306*48bfb88eSYatharth Kochar bfi x7, x18, #0, #1 307*48bfb88eSYatharth Kochar 308*48bfb88eSYatharth Kochar /* ----------------------------------------------------- 309*48bfb88eSYatharth Kochar * Go to BL1 SMC handler. 310*48bfb88eSYatharth Kochar * ----------------------------------------------------- 311*48bfb88eSYatharth Kochar */ 312*48bfb88eSYatharth Kochar bl bl1_smc_handler 313*48bfb88eSYatharth Kochar 314*48bfb88eSYatharth Kochar /* ----------------------------------------------------- 315*48bfb88eSYatharth Kochar * Do the transition to next BL image. 316*48bfb88eSYatharth Kochar * ----------------------------------------------------- 317*48bfb88eSYatharth Kochar */ 318*48bfb88eSYatharth Kochar b el3_exit 319