1/* 2 * Copyright (c) 2014, STMicroelectronics International N.V. 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions are met: 7 * 8 * 1. Redistributions of source code must retain the above copyright notice, 9 * this list of conditions and the following disclaimer. 10 * 11 * 2. Redistributions in binary form must reproduce the above copyright notice, 12 * this list of conditions and the following disclaimer in the documentation 13 * and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 25 * POSSIBILITY OF SUCH DAMAGE. 26 */ 27 28#include <asm.S> 29#include <arm.h> 30#include <arm32_macros.S> 31#include <sm/teesmc.h> 32 33 .section .text.sm_asm 34 35LOCAL_FUNC sm_save_modes_regs , : 36 /* User mode registers has to be saved from system mode */ 37 cps #CPSR_MODE_SYS 38 stm r0!, {sp, lr} 39 40 cps #CPSR_MODE_IRQ 41 mrs r2, spsr 42 stm r0!, {r2, sp, lr} 43 44 cps #CPSR_MODE_SVC 45 mrs r2, spsr 46 stm r0!, {r2, sp, lr} 47 48 cps #CPSR_MODE_ABT 49 mrs r2, spsr 50 stm r0!, {r2, sp, lr} 51 52 cps #CPSR_MODE_UND 53 mrs r2, spsr 54 stm r0!, {r2, sp, lr} 55 56 cps #CPSR_MODE_MON 57 ldm r1, {r2-r3} /* Load SPSR and LR from the stack */ 58 stm r0!, {r2-r3} /* Store SPSR and LR in context */ 59 bx lr 60END_FUNC sm_save_modes_regs 61 62/* Restores the mode specific registers */ 63LOCAL_FUNC sm_restore_modes_regs , : 64 /* User mode registers has to be saved from system mode */ 65 cps #CPSR_MODE_SYS 66 ldm r0!, {sp, lr} 67 68 cps #CPSR_MODE_IRQ 69 ldm r0!, {r2, sp, lr} 70 msr spsr_fsxc, r2 71 72 cps #CPSR_MODE_SVC 73 ldm r0!, {r2, sp, lr} 74 msr spsr_fsxc, r2 75 76 cps #CPSR_MODE_ABT 77 ldm r0!, {r2, sp, lr} 78 msr spsr_fsxc, r2 79 80 cps #CPSR_MODE_UND 81 ldm r0!, {r2, sp, lr} 82 msr spsr_fsxc, r2 83 84 cps #CPSR_MODE_MON 85 ldm r0!, {r2-r3} /* Load SPSR and LR from context */ 86 stm r1, {r2-r3} /* Store SPSR and LR in stack */ 87 bx lr 88END_FUNC sm_restore_modes_regs 89 90LOCAL_FUNC sm_smc_entry , : 91 srsdb sp!, #CPSR_MODE_MON 92 push {r0-r3} 93/* Positions relative to stack pointer */ 94#define SMC_ENTRY_R0R3_OFFS 0 95#define SMC_ENTRY_SRS_OFFS (4 * 4 + SMC_ENTRY_R0R3_OFFS) 96 97 /* Clear the exclusive monitor */ 98 clrex 99 100 /* Find out if we're doing an secure or non-secure entry */ 101 read_scr r1 102 tst r1, #SCR_NS 103 bne .smc_ret_to_sec 104 105.smc_ret_to_nsec: 106 /* Save secure context */ 107 bl sm_get_sec_ctx 108 add r1, sp, #SMC_ENTRY_SRS_OFFS /* Where srsdb wrote */ 109 bl sm_save_modes_regs 110 111 mov r0, sp 112 mov r1, r4 113 bl sm_set_nsec_ret_vals 114 115 /* Restore non-secure context */ 116 bl sm_get_nsec_ctx 117 add r1, sp, #SMC_ENTRY_SRS_OFFS /* Where srsdb wrote */ 118 bl sm_restore_modes_regs 119 ldm r0!, {r4-r12} 120 121 /* Update SCR */ 122 read_scr r0 123 orr r0, r0, #(SCR_NS | SCR_FIQ) /* Set NS and FIQ bit in SCR */ 124 write_scr r0 125 126 b .smc_exit 127 128.smc_ret_to_sec: 129 bic r1, r1, #(SCR_NS | SCR_FIQ)/* Clear NS and FIQ bit in SCR */ 130 write_scr r1 131 132 /* Save non-secure context */ 133 push {r12, lr} 134 bl sm_get_nsec_ctx 135 pop {r12, lr} 136 add r1, sp, #SMC_ENTRY_SRS_OFFS /* Where srsdb wrote */ 137 bl sm_save_modes_regs 138 stm r0!, {r4-r12} 139 140 /* 141 * Update secure context with vector depending on SMC function, 142 * also updates entry reason 143 */ 144 mov r0, sp 145 bl sm_set_sec_smc_entry 146 147 /* Restore secure context */ 148 bl sm_get_sec_ctx 149 add r1, sp, #SMC_ENTRY_SRS_OFFS /* Where srsdb wrote */ 150 bl sm_restore_modes_regs 151 152.smc_exit: 153 pop {r0-r3} 154 rfefd sp! 155END_FUNC sm_smc_entry 156 157/* 158 * FIQ handling 159 * 160 * Saves CPU context in per core structure sm_pre_fiq_ctx which 161 * later will be restored in the smc handler when handling a return 162 * from FIQ. 163 */ 164LOCAL_FUNC sm_fiq_entry , : 165 /* FIQ has a +4 offset for lr compared to preferred return address */ 166 sub lr, lr, #4 167 srsdb sp!, #CPSR_MODE_MON 168 push {r0-r3} 169/* Positions relative to stack pointer */ 170#define FIQ_ENTRY_R0R3_OFFS 0 171#define FIQ_ENTRY_SRS_OFFS (4 * 4 + SMC_ENTRY_R0R3_OFFS) 172 173 /* Update SCR */ 174 read_scr r1 175 bic r1, r1, #(SCR_NS | SCR_FIQ) /* Set NS and FIQ bit in SCR */ 176 write_scr r1 177 178 /* Save non-secure context */ 179 push {r12, lr} 180 bl sm_get_nsec_ctx 181 pop {r12, lr} 182 add r1, sp, #FIQ_ENTRY_SRS_OFFS /* Where srsdb wrote */ 183 bl sm_save_modes_regs 184 stm r0!, {r4-r12} 185 pop {r1-r4} /* R0-R3 pushed at entry */ 186 stm r0!, {r1-r4} 187 188 /* Update secure context with vector for FIQ handling */ 189 bl sm_set_sec_fiq_entry 190 191 /* Restore secure context */ 192 bl sm_get_sec_ctx 193 mov r1, sp /* No offset from sp now that {R0-R3} are poped */ 194 bl sm_restore_modes_regs 195 196 rfefd sp! 197END_FUNC sm_fiq_entry 198 199 .align 5 200LOCAL_FUNC sm_vect_table , : 201 b . /* Reset */ 202 b . /* Undefined instruction */ 203 b sm_smc_entry /* Secure monitor call */ 204 b . /* Prefetch abort */ 205 b . /* Data abort */ 206 b . /* Reserved */ 207 b . /* IRQ */ 208 b sm_fiq_entry /* FIQ */ 209END_FUNC sm_vect_table 210 211/* void sm_init(vaddr_t stack_pointer); */ 212FUNC sm_init , : 213 push {r0, lr} 214 215 /* Set monitor stack */ 216 mrs r1, cpsr 217 cps #CPSR_MODE_MON 218 mov sp, r0 219 msr cpsr, r1 220 221 /* Set monitor vector (MVBAR) */ 222 ldr r0, =sm_vect_table 223 write_mvbar r0 224 225 pop {r0, pc} 226END_FUNC sm_init 227