1*532ed618SSoby Mathew /* 2*532ed618SSoby Mathew * Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved. 3*532ed618SSoby Mathew * 4*532ed618SSoby Mathew * Redistribution and use in source and binary forms, with or without 5*532ed618SSoby Mathew * modification, are permitted provided that the following conditions are met: 6*532ed618SSoby Mathew * 7*532ed618SSoby Mathew * Redistributions of source code must retain the above copyright notice, this 8*532ed618SSoby Mathew * list of conditions and the following disclaimer. 9*532ed618SSoby Mathew * 10*532ed618SSoby Mathew * Redistributions in binary form must reproduce the above copyright notice, 11*532ed618SSoby Mathew * this list of conditions and the following disclaimer in the documentation 12*532ed618SSoby Mathew * and/or other materials provided with the distribution. 13*532ed618SSoby Mathew * 14*532ed618SSoby Mathew * Neither the name of ARM nor the names of its contributors may be used 15*532ed618SSoby Mathew * to endorse or promote products derived from this software without specific 16*532ed618SSoby Mathew * prior written permission. 17*532ed618SSoby Mathew * 18*532ed618SSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19*532ed618SSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20*532ed618SSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21*532ed618SSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 22*532ed618SSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23*532ed618SSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24*532ed618SSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25*532ed618SSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26*532ed618SSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27*532ed618SSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28*532ed618SSoby Mathew * POSSIBILITY OF SUCH DAMAGE. 29*532ed618SSoby Mathew */ 30*532ed618SSoby Mathew 31*532ed618SSoby Mathew #ifndef __CM_H__ 32*532ed618SSoby Mathew #define __CM_H__ 33*532ed618SSoby Mathew 34*532ed618SSoby Mathew #include <arch.h> 35*532ed618SSoby Mathew 36*532ed618SSoby Mathew /******************************************************************************* 37*532ed618SSoby Mathew * Forward declarations 38*532ed618SSoby Mathew ******************************************************************************/ 39*532ed618SSoby Mathew struct entry_point_info; 40*532ed618SSoby Mathew 41*532ed618SSoby Mathew /******************************************************************************* 42*532ed618SSoby Mathew * Function & variable prototypes 43*532ed618SSoby Mathew ******************************************************************************/ 44*532ed618SSoby Mathew void cm_init(void); 45*532ed618SSoby Mathew void *cm_get_context_by_mpidr(uint64_t mpidr, 46*532ed618SSoby Mathew uint32_t security_state) __deprecated; 47*532ed618SSoby Mathew void cm_set_context_by_mpidr(uint64_t mpidr, 48*532ed618SSoby Mathew void *context, 49*532ed618SSoby Mathew uint32_t security_state) __deprecated; 50*532ed618SSoby Mathew void *cm_get_context_by_index(unsigned int cpu_idx, 51*532ed618SSoby Mathew unsigned int security_state); 52*532ed618SSoby Mathew void cm_set_context_by_index(unsigned int cpu_idx, 53*532ed618SSoby Mathew void *context, 54*532ed618SSoby Mathew unsigned int security_state); 55*532ed618SSoby Mathew void *cm_get_context(uint32_t security_state); 56*532ed618SSoby Mathew void cm_set_context(void *context, uint32_t security_state); 57*532ed618SSoby Mathew void cm_init_context(uint64_t mpidr, 58*532ed618SSoby Mathew const struct entry_point_info *ep) __deprecated; 59*532ed618SSoby Mathew void cm_init_my_context(const struct entry_point_info *ep); 60*532ed618SSoby Mathew void cm_init_context_by_index(unsigned int cpu_idx, 61*532ed618SSoby Mathew const struct entry_point_info *ep); 62*532ed618SSoby Mathew void cm_prepare_el3_exit(uint32_t security_state); 63*532ed618SSoby Mathew void cm_el1_sysregs_context_save(uint32_t security_state); 64*532ed618SSoby Mathew void cm_el1_sysregs_context_restore(uint32_t security_state); 65*532ed618SSoby Mathew void cm_set_elr_el3(uint32_t security_state, uintptr_t entrypoint); 66*532ed618SSoby Mathew void cm_set_elr_spsr_el3(uint32_t security_state, 67*532ed618SSoby Mathew uintptr_t entrypoint, uint32_t spsr); 68*532ed618SSoby Mathew void cm_write_scr_el3_bit(uint32_t security_state, 69*532ed618SSoby Mathew uint32_t bit_pos, 70*532ed618SSoby Mathew uint32_t value); 71*532ed618SSoby Mathew void cm_set_next_eret_context(uint32_t security_state); 72*532ed618SSoby Mathew uint32_t cm_get_scr_el3(uint32_t security_state); 73*532ed618SSoby Mathew 74*532ed618SSoby Mathew /* Inline definitions */ 75*532ed618SSoby Mathew 76*532ed618SSoby Mathew /******************************************************************************* 77*532ed618SSoby Mathew * This function is used to program the context that's used for exception 78*532ed618SSoby Mathew * return. This initializes the SP_EL3 to a pointer to a 'cpu_context' set for 79*532ed618SSoby Mathew * the required security state 80*532ed618SSoby Mathew ******************************************************************************/ 81*532ed618SSoby Mathew static inline void cm_set_next_context(void *context) 82*532ed618SSoby Mathew { 83*532ed618SSoby Mathew #if DEBUG 84*532ed618SSoby Mathew uint64_t sp_mode; 85*532ed618SSoby Mathew 86*532ed618SSoby Mathew /* 87*532ed618SSoby Mathew * Check that this function is called with SP_EL0 as the stack 88*532ed618SSoby Mathew * pointer 89*532ed618SSoby Mathew */ 90*532ed618SSoby Mathew __asm__ volatile("mrs %0, SPSel\n" 91*532ed618SSoby Mathew : "=r" (sp_mode)); 92*532ed618SSoby Mathew 93*532ed618SSoby Mathew assert(sp_mode == MODE_SP_EL0); 94*532ed618SSoby Mathew #endif 95*532ed618SSoby Mathew 96*532ed618SSoby Mathew __asm__ volatile("msr spsel, #1\n" 97*532ed618SSoby Mathew "mov sp, %0\n" 98*532ed618SSoby Mathew "msr spsel, #0\n" 99*532ed618SSoby Mathew : : "r" (context)); 100*532ed618SSoby Mathew } 101*532ed618SSoby Mathew #endif /* __CM_H__ */ 102