1 /* 2 * Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions are met: 6 * 7 * Redistributions of source code must retain the above copyright notice, this 8 * list of conditions and the following disclaimer. 9 * 10 * Redistributions in binary form must reproduce the above copyright notice, 11 * this list of conditions and the following disclaimer in the documentation 12 * and/or other materials provided with the distribution. 13 * 14 * Neither the name of ARM nor the names of its contributors may be used 15 * to endorse or promote products derived from this software without specific 16 * prior written permission. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 * POSSIBILITY OF SUCH DAMAGE. 29 */ 30 31 #ifndef __CM_H__ 32 #define __CM_H__ 33 34 #include <arch.h> 35 36 /******************************************************************************* 37 * Forward declarations 38 ******************************************************************************/ 39 struct entry_point_info; 40 41 /******************************************************************************* 42 * Function & variable prototypes 43 ******************************************************************************/ 44 void cm_init(void); 45 void *cm_get_context_by_index(unsigned int cpu_idx, 46 unsigned int security_state); 47 void cm_set_context_by_index(unsigned int cpu_idx, 48 void *context, 49 unsigned int security_state); 50 void *cm_get_context(uint32_t security_state); 51 void cm_set_context(void *context, uint32_t security_state); 52 void cm_init_my_context(const struct entry_point_info *ep); 53 void cm_init_context_by_index(unsigned int cpu_idx, 54 const struct entry_point_info *ep); 55 void cm_prepare_el3_exit(uint32_t security_state); 56 57 #ifndef AARCH32 58 void cm_el1_sysregs_context_save(uint32_t security_state); 59 void cm_el1_sysregs_context_restore(uint32_t security_state); 60 void cm_set_elr_el3(uint32_t security_state, uintptr_t entrypoint); 61 void cm_set_elr_spsr_el3(uint32_t security_state, 62 uintptr_t entrypoint, uint32_t spsr); 63 void cm_write_scr_el3_bit(uint32_t security_state, 64 uint32_t bit_pos, 65 uint32_t value); 66 void cm_set_next_eret_context(uint32_t security_state); 67 uint32_t cm_get_scr_el3(uint32_t security_state); 68 69 70 void cm_init_context(uint64_t mpidr, 71 const struct entry_point_info *ep) __deprecated; 72 73 void *cm_get_context_by_mpidr(uint64_t mpidr, 74 uint32_t security_state) __deprecated; 75 void cm_set_context_by_mpidr(uint64_t mpidr, 76 void *context, 77 uint32_t security_state) __deprecated; 78 79 /* Inline definitions */ 80 81 /******************************************************************************* 82 * This function is used to program the context that's used for exception 83 * return. This initializes the SP_EL3 to a pointer to a 'cpu_context' set for 84 * the required security state 85 ******************************************************************************/ 86 static inline void cm_set_next_context(void *context) 87 { 88 #if DEBUG 89 uint64_t sp_mode; 90 91 /* 92 * Check that this function is called with SP_EL0 as the stack 93 * pointer 94 */ 95 __asm__ volatile("mrs %0, SPSel\n" 96 : "=r" (sp_mode)); 97 98 assert(sp_mode == MODE_SP_EL0); 99 #endif 100 101 __asm__ volatile("msr spsel, #1\n" 102 "mov sp, %0\n" 103 "msr spsel, #0\n" 104 : : "r" (context)); 105 } 106 107 #else 108 void *cm_get_next_context(void); 109 #endif /* AARCH32 */ 110 111 #endif /* __CM_H__ */ 112