1532ed618SSoby Mathew /* 2*5dffb46cSSoby Mathew * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 4532ed618SSoby Mathew * Redistribution and use in source and binary forms, with or without 5532ed618SSoby Mathew * modification, are permitted provided that the following conditions are met: 6532ed618SSoby Mathew * 7532ed618SSoby Mathew * Redistributions of source code must retain the above copyright notice, this 8532ed618SSoby Mathew * list of conditions and the following disclaimer. 9532ed618SSoby Mathew * 10532ed618SSoby Mathew * Redistributions in binary form must reproduce the above copyright notice, 11532ed618SSoby Mathew * this list of conditions and the following disclaimer in the documentation 12532ed618SSoby Mathew * and/or other materials provided with the distribution. 13532ed618SSoby Mathew * 14532ed618SSoby Mathew * Neither the name of ARM nor the names of its contributors may be used 15532ed618SSoby Mathew * to endorse or promote products derived from this software without specific 16532ed618SSoby Mathew * prior written permission. 17532ed618SSoby Mathew * 18532ed618SSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19532ed618SSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20532ed618SSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21532ed618SSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 22532ed618SSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23532ed618SSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24532ed618SSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25532ed618SSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26532ed618SSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27532ed618SSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28532ed618SSoby Mathew * POSSIBILITY OF SUCH DAMAGE. 29532ed618SSoby Mathew */ 30532ed618SSoby Mathew 31532ed618SSoby Mathew #ifndef __CM_H__ 32532ed618SSoby Mathew #define __CM_H__ 33532ed618SSoby Mathew 34*5dffb46cSSoby Mathew #ifndef AARCH32 35532ed618SSoby Mathew #include <arch.h> 36*5dffb46cSSoby Mathew #endif 37532ed618SSoby Mathew 38532ed618SSoby Mathew /******************************************************************************* 39532ed618SSoby Mathew * Forward declarations 40532ed618SSoby Mathew ******************************************************************************/ 41532ed618SSoby Mathew struct entry_point_info; 42532ed618SSoby Mathew 43532ed618SSoby Mathew /******************************************************************************* 44532ed618SSoby Mathew * Function & variable prototypes 45532ed618SSoby Mathew ******************************************************************************/ 46532ed618SSoby Mathew void cm_init(void); 47532ed618SSoby Mathew void *cm_get_context_by_index(unsigned int cpu_idx, 48532ed618SSoby Mathew unsigned int security_state); 49532ed618SSoby Mathew void cm_set_context_by_index(unsigned int cpu_idx, 50532ed618SSoby Mathew void *context, 51532ed618SSoby Mathew unsigned int security_state); 52532ed618SSoby Mathew void *cm_get_context(uint32_t security_state); 53532ed618SSoby Mathew void cm_set_context(void *context, uint32_t security_state); 54532ed618SSoby Mathew void cm_init_my_context(const struct entry_point_info *ep); 55532ed618SSoby Mathew void cm_init_context_by_index(unsigned int cpu_idx, 56532ed618SSoby Mathew const struct entry_point_info *ep); 57532ed618SSoby Mathew void cm_prepare_el3_exit(uint32_t security_state); 58e33b78a6SSoby Mathew 59e33b78a6SSoby Mathew #ifndef AARCH32 60532ed618SSoby Mathew void cm_el1_sysregs_context_save(uint32_t security_state); 61532ed618SSoby Mathew void cm_el1_sysregs_context_restore(uint32_t security_state); 62532ed618SSoby Mathew void cm_set_elr_el3(uint32_t security_state, uintptr_t entrypoint); 63532ed618SSoby Mathew void cm_set_elr_spsr_el3(uint32_t security_state, 64532ed618SSoby Mathew uintptr_t entrypoint, uint32_t spsr); 65532ed618SSoby Mathew void cm_write_scr_el3_bit(uint32_t security_state, 66532ed618SSoby Mathew uint32_t bit_pos, 67532ed618SSoby Mathew uint32_t value); 68532ed618SSoby Mathew void cm_set_next_eret_context(uint32_t security_state); 69532ed618SSoby Mathew uint32_t cm_get_scr_el3(uint32_t security_state); 70532ed618SSoby Mathew 71e33b78a6SSoby Mathew 72e33b78a6SSoby Mathew void cm_init_context(uint64_t mpidr, 73e33b78a6SSoby Mathew const struct entry_point_info *ep) __deprecated; 74e33b78a6SSoby Mathew 75e33b78a6SSoby Mathew void *cm_get_context_by_mpidr(uint64_t mpidr, 76e33b78a6SSoby Mathew uint32_t security_state) __deprecated; 77e33b78a6SSoby Mathew void cm_set_context_by_mpidr(uint64_t mpidr, 78e33b78a6SSoby Mathew void *context, 79e33b78a6SSoby Mathew uint32_t security_state) __deprecated; 80e33b78a6SSoby Mathew 81532ed618SSoby Mathew /* Inline definitions */ 82532ed618SSoby Mathew 83532ed618SSoby Mathew /******************************************************************************* 84532ed618SSoby Mathew * This function is used to program the context that's used for exception 85532ed618SSoby Mathew * return. This initializes the SP_EL3 to a pointer to a 'cpu_context' set for 86532ed618SSoby Mathew * the required security state 87532ed618SSoby Mathew ******************************************************************************/ 88532ed618SSoby Mathew static inline void cm_set_next_context(void *context) 89532ed618SSoby Mathew { 90532ed618SSoby Mathew #if DEBUG 91532ed618SSoby Mathew uint64_t sp_mode; 92532ed618SSoby Mathew 93532ed618SSoby Mathew /* 94532ed618SSoby Mathew * Check that this function is called with SP_EL0 as the stack 95532ed618SSoby Mathew * pointer 96532ed618SSoby Mathew */ 97532ed618SSoby Mathew __asm__ volatile("mrs %0, SPSel\n" 98532ed618SSoby Mathew : "=r" (sp_mode)); 99532ed618SSoby Mathew 100532ed618SSoby Mathew assert(sp_mode == MODE_SP_EL0); 101532ed618SSoby Mathew #endif 102532ed618SSoby Mathew 103532ed618SSoby Mathew __asm__ volatile("msr spsel, #1\n" 104532ed618SSoby Mathew "mov sp, %0\n" 105532ed618SSoby Mathew "msr spsel, #0\n" 106532ed618SSoby Mathew : : "r" (context)); 107532ed618SSoby Mathew } 108f3b4914bSYatharth Kochar 109f3b4914bSYatharth Kochar #else 110f3b4914bSYatharth Kochar void *cm_get_next_context(void); 111e33b78a6SSoby Mathew #endif /* AARCH32 */ 112f3b4914bSYatharth Kochar 113532ed618SSoby Mathew #endif /* __CM_H__ */ 114