1532ed618SSoby Mathew /* 25dffb46cSSoby Mathew * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 4532ed618SSoby Mathew * Redistribution and use in source and binary forms, with or without 5532ed618SSoby Mathew * modification, are permitted provided that the following conditions are met: 6532ed618SSoby Mathew * 7532ed618SSoby Mathew * Redistributions of source code must retain the above copyright notice, this 8532ed618SSoby Mathew * list of conditions and the following disclaimer. 9532ed618SSoby Mathew * 10532ed618SSoby Mathew * Redistributions in binary form must reproduce the above copyright notice, 11532ed618SSoby Mathew * this list of conditions and the following disclaimer in the documentation 12532ed618SSoby Mathew * and/or other materials provided with the distribution. 13532ed618SSoby Mathew * 14532ed618SSoby Mathew * Neither the name of ARM nor the names of its contributors may be used 15532ed618SSoby Mathew * to endorse or promote products derived from this software without specific 16532ed618SSoby Mathew * prior written permission. 17532ed618SSoby Mathew * 18532ed618SSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19532ed618SSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20532ed618SSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21532ed618SSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 22532ed618SSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23532ed618SSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24532ed618SSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25532ed618SSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26532ed618SSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27532ed618SSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28532ed618SSoby Mathew * POSSIBILITY OF SUCH DAMAGE. 29532ed618SSoby Mathew */ 30532ed618SSoby Mathew 31532ed618SSoby Mathew #ifndef __CM_H__ 32532ed618SSoby Mathew #define __CM_H__ 33532ed618SSoby Mathew 345dffb46cSSoby Mathew #ifndef AARCH32 35532ed618SSoby Mathew #include <arch.h> 36*b10d4499SJeenu Viswambharan #include <assert.h> 37*b10d4499SJeenu Viswambharan #include <stdint.h> 385dffb46cSSoby Mathew #endif 39532ed618SSoby Mathew 40532ed618SSoby Mathew /******************************************************************************* 41532ed618SSoby Mathew * Forward declarations 42532ed618SSoby Mathew ******************************************************************************/ 43532ed618SSoby Mathew struct entry_point_info; 44532ed618SSoby Mathew 45532ed618SSoby Mathew /******************************************************************************* 46532ed618SSoby Mathew * Function & variable prototypes 47532ed618SSoby Mathew ******************************************************************************/ 48532ed618SSoby Mathew void cm_init(void); 49532ed618SSoby Mathew void *cm_get_context_by_index(unsigned int cpu_idx, 50532ed618SSoby Mathew unsigned int security_state); 51532ed618SSoby Mathew void cm_set_context_by_index(unsigned int cpu_idx, 52532ed618SSoby Mathew void *context, 53532ed618SSoby Mathew unsigned int security_state); 54532ed618SSoby Mathew void *cm_get_context(uint32_t security_state); 55532ed618SSoby Mathew void cm_set_context(void *context, uint32_t security_state); 56532ed618SSoby Mathew void cm_init_my_context(const struct entry_point_info *ep); 57532ed618SSoby Mathew void cm_init_context_by_index(unsigned int cpu_idx, 58532ed618SSoby Mathew const struct entry_point_info *ep); 59532ed618SSoby Mathew void cm_prepare_el3_exit(uint32_t security_state); 60e33b78a6SSoby Mathew 61e33b78a6SSoby Mathew #ifndef AARCH32 62532ed618SSoby Mathew void cm_el1_sysregs_context_save(uint32_t security_state); 63532ed618SSoby Mathew void cm_el1_sysregs_context_restore(uint32_t security_state); 64532ed618SSoby Mathew void cm_set_elr_el3(uint32_t security_state, uintptr_t entrypoint); 65532ed618SSoby Mathew void cm_set_elr_spsr_el3(uint32_t security_state, 66532ed618SSoby Mathew uintptr_t entrypoint, uint32_t spsr); 67532ed618SSoby Mathew void cm_write_scr_el3_bit(uint32_t security_state, 68532ed618SSoby Mathew uint32_t bit_pos, 69532ed618SSoby Mathew uint32_t value); 70532ed618SSoby Mathew void cm_set_next_eret_context(uint32_t security_state); 71532ed618SSoby Mathew uint32_t cm_get_scr_el3(uint32_t security_state); 72532ed618SSoby Mathew 73e33b78a6SSoby Mathew 74e33b78a6SSoby Mathew void cm_init_context(uint64_t mpidr, 75e33b78a6SSoby Mathew const struct entry_point_info *ep) __deprecated; 76e33b78a6SSoby Mathew 77e33b78a6SSoby Mathew void *cm_get_context_by_mpidr(uint64_t mpidr, 78e33b78a6SSoby Mathew uint32_t security_state) __deprecated; 79e33b78a6SSoby Mathew void cm_set_context_by_mpidr(uint64_t mpidr, 80e33b78a6SSoby Mathew void *context, 81e33b78a6SSoby Mathew uint32_t security_state) __deprecated; 82e33b78a6SSoby Mathew 83532ed618SSoby Mathew /* Inline definitions */ 84532ed618SSoby Mathew 85532ed618SSoby Mathew /******************************************************************************* 86532ed618SSoby Mathew * This function is used to program the context that's used for exception 87532ed618SSoby Mathew * return. This initializes the SP_EL3 to a pointer to a 'cpu_context' set for 88532ed618SSoby Mathew * the required security state 89532ed618SSoby Mathew ******************************************************************************/ 90532ed618SSoby Mathew static inline void cm_set_next_context(void *context) 91532ed618SSoby Mathew { 92aa61368eSAntonio Nino Diaz #if ENABLE_ASSERTIONS 93532ed618SSoby Mathew uint64_t sp_mode; 94532ed618SSoby Mathew 95532ed618SSoby Mathew /* 96532ed618SSoby Mathew * Check that this function is called with SP_EL0 as the stack 97532ed618SSoby Mathew * pointer 98532ed618SSoby Mathew */ 99532ed618SSoby Mathew __asm__ volatile("mrs %0, SPSel\n" 100532ed618SSoby Mathew : "=r" (sp_mode)); 101532ed618SSoby Mathew 102532ed618SSoby Mathew assert(sp_mode == MODE_SP_EL0); 103aa61368eSAntonio Nino Diaz #endif /* ENABLE_ASSERTIONS */ 104532ed618SSoby Mathew 105532ed618SSoby Mathew __asm__ volatile("msr spsel, #1\n" 106532ed618SSoby Mathew "mov sp, %0\n" 107532ed618SSoby Mathew "msr spsel, #0\n" 108532ed618SSoby Mathew : : "r" (context)); 109532ed618SSoby Mathew } 110f3b4914bSYatharth Kochar 111f3b4914bSYatharth Kochar #else 112f3b4914bSYatharth Kochar void *cm_get_next_context(void); 113e33b78a6SSoby Mathew #endif /* AARCH32 */ 114f3b4914bSYatharth Kochar 115532ed618SSoby Mathew #endif /* __CM_H__ */ 116