1/* 2 * Copyright (c) 2013-2015, ARM Limited and Contributors. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions are met: 6 * 7 * Redistributions of source code must retain the above copyright notice, this 8 * list of conditions and the following disclaimer. 9 * 10 * Redistributions in binary form must reproduce the above copyright notice, 11 * this list of conditions and the following disclaimer in the documentation 12 * and/or other materials provided with the distribution. 13 * 14 * Neither the name of ARM nor the names of its contributors may be used 15 * to endorse or promote products derived from this software without specific 16 * prior written permission. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 * POSSIBILITY OF SUCH DAMAGE. 29 */ 30 31#include <arch.h> 32#include <asm_macros.S> 33#include <bl_common.h> 34#include <bl1.h> 35#include <context.h> 36 37 .globl bl1_exceptions 38 39 .section .vectors, "ax"; .align 11 40 41 /* ----------------------------------------------------- 42 * Very simple stackless exception handlers used by BL1. 43 * ----------------------------------------------------- 44 */ 45 .align 7 46bl1_exceptions: 47 /* ----------------------------------------------------- 48 * Current EL with SP0 : 0x0 - 0x200 49 * ----------------------------------------------------- 50 */ 51SynchronousExceptionSP0: 52 mov x0, #SYNC_EXCEPTION_SP_EL0 53 bl plat_report_exception 54 b SynchronousExceptionSP0 55 check_vector_size SynchronousExceptionSP0 56 57 .align 7 58IrqSP0: 59 mov x0, #IRQ_SP_EL0 60 bl plat_report_exception 61 b IrqSP0 62 check_vector_size IrqSP0 63 64 .align 7 65FiqSP0: 66 mov x0, #FIQ_SP_EL0 67 bl plat_report_exception 68 b FiqSP0 69 check_vector_size FiqSP0 70 71 .align 7 72SErrorSP0: 73 mov x0, #SERROR_SP_EL0 74 bl plat_report_exception 75 b SErrorSP0 76 check_vector_size SErrorSP0 77 78 /* ----------------------------------------------------- 79 * Current EL with SPx: 0x200 - 0x400 80 * ----------------------------------------------------- 81 */ 82 .align 7 83SynchronousExceptionSPx: 84 mov x0, #SYNC_EXCEPTION_SP_ELX 85 bl plat_report_exception 86 b SynchronousExceptionSPx 87 check_vector_size SynchronousExceptionSPx 88 89 .align 7 90IrqSPx: 91 mov x0, #IRQ_SP_ELX 92 bl plat_report_exception 93 b IrqSPx 94 check_vector_size IrqSPx 95 96 .align 7 97FiqSPx: 98 mov x0, #FIQ_SP_ELX 99 bl plat_report_exception 100 b FiqSPx 101 check_vector_size FiqSPx 102 103 .align 7 104SErrorSPx: 105 mov x0, #SERROR_SP_ELX 106 bl plat_report_exception 107 b SErrorSPx 108 check_vector_size SErrorSPx 109 110 /* ----------------------------------------------------- 111 * Lower EL using AArch64 : 0x400 - 0x600 112 * ----------------------------------------------------- 113 */ 114 .align 7 115SynchronousExceptionA64: 116 /* Enable the SError interrupt */ 117 msr daifclr, #DAIF_ABT_BIT 118 119 str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 120 121 /* Expect only SMC exceptions */ 122 mrs x30, esr_el3 123 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 124 cmp x30, #EC_AARCH64_SMC 125 b.ne unexpected_sync_exception 126 127 b smc_handler64 128 check_vector_size SynchronousExceptionA64 129 130 .align 7 131IrqA64: 132 mov x0, #IRQ_AARCH64 133 bl plat_report_exception 134 b IrqA64 135 check_vector_size IrqA64 136 137 .align 7 138FiqA64: 139 mov x0, #FIQ_AARCH64 140 bl plat_report_exception 141 b FiqA64 142 check_vector_size FiqA64 143 144 .align 7 145SErrorA64: 146 mov x0, #SERROR_AARCH64 147 bl plat_report_exception 148 b SErrorA64 149 check_vector_size SErrorA64 150 151 /* ----------------------------------------------------- 152 * Lower EL using AArch32 : 0x600 - 0x800 153 * ----------------------------------------------------- 154 */ 155 .align 7 156SynchronousExceptionA32: 157 mov x0, #SYNC_EXCEPTION_AARCH32 158 bl plat_report_exception 159 b SynchronousExceptionA32 160 check_vector_size SynchronousExceptionA32 161 162 .align 7 163IrqA32: 164 mov x0, #IRQ_AARCH32 165 bl plat_report_exception 166 b IrqA32 167 check_vector_size IrqA32 168 169 .align 7 170FiqA32: 171 mov x0, #FIQ_AARCH32 172 bl plat_report_exception 173 b FiqA32 174 check_vector_size FiqA32 175 176 .align 7 177SErrorA32: 178 mov x0, #SERROR_AARCH32 179 bl plat_report_exception 180 b SErrorA32 181 check_vector_size SErrorA32 182 183 184func smc_handler64 185 186 /* ---------------------------------------------- 187 * Detect if this is a RUN_IMAGE or other SMC. 188 * ---------------------------------------------- 189 */ 190 mov x30, #BL1_SMC_RUN_IMAGE 191 cmp x30, x0 192 b.ne smc_handler 193 194 /* ------------------------------------------------ 195 * Make sure only Secure world reaches here. 196 * ------------------------------------------------ 197 */ 198 mrs x30, scr_el3 199 tst x30, #SCR_NS_BIT 200 b.ne unexpected_sync_exception 201 202 /* ---------------------------------------------- 203 * Handling RUN_IMAGE SMC. First switch back to 204 * SP_EL0 for the C runtime stack. 205 * ---------------------------------------------- 206 */ 207 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 208 msr spsel, #0 209 mov sp, x30 210 211 /* --------------------------------------------------------------------- 212 * Pass EL3 control to BL31. 213 * Here it expects X1 with the address of a entry_point_info_t 214 * structure describing the BL31 entrypoint. 215 * --------------------------------------------------------------------- 216 */ 217 mov x20, x1 218 219 mov x0, x20 220 bl bl1_print_bl31_ep_info 221 222 ldp x0, x1, [x20, #ENTRY_POINT_INFO_PC_OFFSET] 223 msr elr_el3, x0 224 msr spsr_el3, x1 225 ubfx x0, x1, #MODE_EL_SHIFT, #2 226 cmp x0, #MODE_EL3 227 b.ne unexpected_sync_exception 228 229 bl disable_mmu_icache_el3 230 tlbi alle3 231 232#if SPIN_ON_BL1_EXIT 233 bl print_debug_loop_message 234debug_loop: 235 b debug_loop 236#endif 237 238 mov x0, x20 239 bl bl1_plat_prepare_exit 240 241 ldp x6, x7, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x30)] 242 ldp x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)] 243 ldp x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)] 244 ldp x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)] 245 eret 246endfunc smc_handler64 247 248unexpected_sync_exception: 249 mov x0, #SYNC_EXCEPTION_AARCH64 250 bl plat_report_exception 251 wfi 252 b unexpected_sync_exception 253 254 /* ----------------------------------------------------- 255 * Save Secure/Normal world context and jump to 256 * BL1 SMC handler. 257 * ----------------------------------------------------- 258 */ 259smc_handler: 260 /* ----------------------------------------------------- 261 * Save the GP registers x0-x29. 262 * TODO: Revisit to store only SMCC specified registers. 263 * ----------------------------------------------------- 264 */ 265 bl save_gp_registers 266 267 /* ----------------------------------------------------- 268 * Populate the parameters for the SMC handler. We 269 * already have x0-x4 in place. x5 will point to a 270 * cookie (not used now). x6 will point to the context 271 * structure (SP_EL3) and x7 will contain flags we need 272 * to pass to the handler. 273 * ----------------------------------------------------- 274 */ 275 mov x5, xzr 276 mov x6, sp 277 278 /* ----------------------------------------------------- 279 * Restore the saved C runtime stack value which will 280 * become the new SP_EL0 i.e. EL3 runtime stack. It was 281 * saved in the 'cpu_context' structure prior to the last 282 * ERET from EL3. 283 * ----------------------------------------------------- 284 */ 285 ldr x12, [x6, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 286 287 /* --------------------------------------------- 288 * Switch back to SP_EL0 for the C runtime stack. 289 * --------------------------------------------- 290 */ 291 msr spsel, #0 292 mov sp, x12 293 294 /* ----------------------------------------------------- 295 * Save the SPSR_EL3, ELR_EL3, & SCR_EL3 in case there 296 * is a world switch during SMC handling. 297 * ----------------------------------------------------- 298 */ 299 mrs x16, spsr_el3 300 mrs x17, elr_el3 301 mrs x18, scr_el3 302 stp x16, x17, [x6, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 303 str x18, [x6, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 304 305 /* Copy SCR_EL3.NS bit to the flag to indicate caller's security */ 306 bfi x7, x18, #0, #1 307 308 /* ----------------------------------------------------- 309 * Go to BL1 SMC handler. 310 * ----------------------------------------------------- 311 */ 312 bl bl1_smc_handler 313 314 /* ----------------------------------------------------- 315 * Do the transition to next BL image. 316 * ----------------------------------------------------- 317 */ 318 b el3_exit 319