1*5dffb46cSSoby Mathew /* 2*5dffb46cSSoby Mathew * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. 3*5dffb46cSSoby Mathew * 4*5dffb46cSSoby Mathew * Redistribution and use in source and binary forms, with or without 5*5dffb46cSSoby Mathew * modification, are permitted provided that the following conditions are met: 6*5dffb46cSSoby Mathew * 7*5dffb46cSSoby Mathew * Redistributions of source code must retain the above copyright notice, this 8*5dffb46cSSoby Mathew * list of conditions and the following disclaimer. 9*5dffb46cSSoby Mathew * 10*5dffb46cSSoby Mathew * Redistributions in binary form must reproduce the above copyright notice, 11*5dffb46cSSoby Mathew * this list of conditions and the following disclaimer in the documentation 12*5dffb46cSSoby Mathew * and/or other materials provided with the distribution. 13*5dffb46cSSoby Mathew * 14*5dffb46cSSoby Mathew * Neither the name of ARM nor the names of its contributors may be used 15*5dffb46cSSoby Mathew * to endorse or promote products derived from this software without specific 16*5dffb46cSSoby Mathew * prior written permission. 17*5dffb46cSSoby Mathew * 18*5dffb46cSSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19*5dffb46cSSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20*5dffb46cSSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21*5dffb46cSSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 22*5dffb46cSSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23*5dffb46cSSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24*5dffb46cSSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25*5dffb46cSSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26*5dffb46cSSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27*5dffb46cSSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28*5dffb46cSSoby Mathew * POSSIBILITY OF SUCH DAMAGE. 29*5dffb46cSSoby Mathew */ 30*5dffb46cSSoby Mathew 31*5dffb46cSSoby Mathew #ifndef __EP_INFO_H__ 32*5dffb46cSSoby Mathew #define __EP_INFO_H__ 33*5dffb46cSSoby Mathew 34*5dffb46cSSoby Mathew #include <param_header.h> 35*5dffb46cSSoby Mathew 36*5dffb46cSSoby Mathew #define SECURE 0x0 37*5dffb46cSSoby Mathew #define NON_SECURE 0x1 38*5dffb46cSSoby Mathew #define sec_state_is_valid(s) (((s) == SECURE) || ((s) == NON_SECURE)) 39*5dffb46cSSoby Mathew 40*5dffb46cSSoby Mathew /******************************************************************************* 41*5dffb46cSSoby Mathew * Constants that allow assembler code to access members of and the 42*5dffb46cSSoby Mathew * 'entry_point_info' structure at their correct offsets. 43*5dffb46cSSoby Mathew ******************************************************************************/ 44*5dffb46cSSoby Mathew #define ENTRY_POINT_INFO_PC_OFFSET 0x08 45*5dffb46cSSoby Mathew #ifdef AARCH32 46*5dffb46cSSoby Mathew #define ENTRY_POINT_INFO_ARGS_OFFSET 0x10 47*5dffb46cSSoby Mathew #else 48*5dffb46cSSoby Mathew #define ENTRY_POINT_INFO_ARGS_OFFSET 0x18 49*5dffb46cSSoby Mathew #endif 50*5dffb46cSSoby Mathew 51*5dffb46cSSoby Mathew /* The following are used to set/get image attributes. */ 52*5dffb46cSSoby Mathew #define PARAM_EP_SECURITY_MASK (0x1) 53*5dffb46cSSoby Mathew 54*5dffb46cSSoby Mathew #define GET_SECURITY_STATE(x) (x & PARAM_EP_SECURITY_MASK) 55*5dffb46cSSoby Mathew #define SET_SECURITY_STATE(x, security) \ 56*5dffb46cSSoby Mathew ((x) = ((x) & ~PARAM_EP_SECURITY_MASK) | (security)) 57*5dffb46cSSoby Mathew 58*5dffb46cSSoby Mathew #define EP_EE_MASK 0x2 59*5dffb46cSSoby Mathew #define EP_EE_LITTLE 0x0 60*5dffb46cSSoby Mathew #define EP_EE_BIG 0x2 61*5dffb46cSSoby Mathew #define EP_GET_EE(x) (x & EP_EE_MASK) 62*5dffb46cSSoby Mathew #define EP_SET_EE(x, ee) ((x) = ((x) & ~EP_EE_MASK) | (ee)) 63*5dffb46cSSoby Mathew 64*5dffb46cSSoby Mathew #define EP_ST_MASK 0x4 65*5dffb46cSSoby Mathew #define EP_ST_DISABLE 0x0 66*5dffb46cSSoby Mathew #define EP_ST_ENABLE 0x4 67*5dffb46cSSoby Mathew #define EP_GET_ST(x) (x & EP_ST_MASK) 68*5dffb46cSSoby Mathew #define EP_SET_ST(x, ee) ((x) = ((x) & ~EP_ST_MASK) | (ee)) 69*5dffb46cSSoby Mathew 70*5dffb46cSSoby Mathew #define EP_EXE_MASK 0x8 71*5dffb46cSSoby Mathew #define NON_EXECUTABLE 0x0 72*5dffb46cSSoby Mathew #define EXECUTABLE 0x8 73*5dffb46cSSoby Mathew #define EP_GET_EXE(x) (x & EP_EXE_MASK) 74*5dffb46cSSoby Mathew #define EP_SET_EXE(x, ee) ((x) = ((x) & ~EP_EXE_MASK) | (ee)) 75*5dffb46cSSoby Mathew 76*5dffb46cSSoby Mathew #define EP_FIRST_EXE_MASK 0x10 77*5dffb46cSSoby Mathew #define EP_FIRST_EXE 0x10 78*5dffb46cSSoby Mathew #define EP_GET_FIRST_EXE(x) ((x) & EP_FIRST_EXE_MASK) 79*5dffb46cSSoby Mathew #define EP_SET_FIRST_EXE(x, ee) ((x) = ((x) & ~EP_FIRST_EXE_MASK) | (ee)) 80*5dffb46cSSoby Mathew 81*5dffb46cSSoby Mathew #ifndef __ASSEMBLY__ 82*5dffb46cSSoby Mathew 83*5dffb46cSSoby Mathew #include <cassert.h> 84*5dffb46cSSoby Mathew #include <types.h> 85*5dffb46cSSoby Mathew 86*5dffb46cSSoby Mathew typedef struct aapcs64_params { 87*5dffb46cSSoby Mathew u_register_t arg0; 88*5dffb46cSSoby Mathew u_register_t arg1; 89*5dffb46cSSoby Mathew u_register_t arg2; 90*5dffb46cSSoby Mathew u_register_t arg3; 91*5dffb46cSSoby Mathew u_register_t arg4; 92*5dffb46cSSoby Mathew u_register_t arg5; 93*5dffb46cSSoby Mathew u_register_t arg6; 94*5dffb46cSSoby Mathew u_register_t arg7; 95*5dffb46cSSoby Mathew } aapcs64_params_t; 96*5dffb46cSSoby Mathew 97*5dffb46cSSoby Mathew typedef struct aapcs32_params { 98*5dffb46cSSoby Mathew u_register_t arg0; 99*5dffb46cSSoby Mathew u_register_t arg1; 100*5dffb46cSSoby Mathew u_register_t arg2; 101*5dffb46cSSoby Mathew u_register_t arg3; 102*5dffb46cSSoby Mathew } aapcs32_params_t; 103*5dffb46cSSoby Mathew 104*5dffb46cSSoby Mathew /***************************************************************************** 105*5dffb46cSSoby Mathew * This structure represents the superset of information needed while 106*5dffb46cSSoby Mathew * switching exception levels. The only two mechanisms to do so are 107*5dffb46cSSoby Mathew * ERET & SMC. Security state is indicated using bit zero of header 108*5dffb46cSSoby Mathew * attribute 109*5dffb46cSSoby Mathew * NOTE: BL1 expects entrypoint followed by spsr at an offset from the start 110*5dffb46cSSoby Mathew * of this structure defined by the macro `ENTRY_POINT_INFO_PC_OFFSET` while 111*5dffb46cSSoby Mathew * processing SMC to jump to BL31. 112*5dffb46cSSoby Mathew *****************************************************************************/ 113*5dffb46cSSoby Mathew typedef struct entry_point_info { 114*5dffb46cSSoby Mathew param_header_t h; 115*5dffb46cSSoby Mathew uintptr_t pc; 116*5dffb46cSSoby Mathew uint32_t spsr; 117*5dffb46cSSoby Mathew #ifdef AARCH32 118*5dffb46cSSoby Mathew aapcs32_params_t args; 119*5dffb46cSSoby Mathew #else 120*5dffb46cSSoby Mathew aapcs64_params_t args; 121*5dffb46cSSoby Mathew #endif 122*5dffb46cSSoby Mathew } entry_point_info_t; 123*5dffb46cSSoby Mathew 124*5dffb46cSSoby Mathew /* 125*5dffb46cSSoby Mathew * Compile time assertions related to the 'entry_point_info' structure to 126*5dffb46cSSoby Mathew * ensure that the assembler and the compiler view of the offsets of 127*5dffb46cSSoby Mathew * the structure members is the same. 128*5dffb46cSSoby Mathew */ 129*5dffb46cSSoby Mathew CASSERT(ENTRY_POINT_INFO_PC_OFFSET == 130*5dffb46cSSoby Mathew __builtin_offsetof(entry_point_info_t, pc), \ 131*5dffb46cSSoby Mathew assert_BL31_pc_offset_mismatch); 132*5dffb46cSSoby Mathew 133*5dffb46cSSoby Mathew CASSERT(ENTRY_POINT_INFO_ARGS_OFFSET == \ 134*5dffb46cSSoby Mathew __builtin_offsetof(entry_point_info_t, args), \ 135*5dffb46cSSoby Mathew assert_BL31_args_offset_mismatch); 136*5dffb46cSSoby Mathew 137*5dffb46cSSoby Mathew CASSERT(sizeof(uintptr_t) == 138*5dffb46cSSoby Mathew __builtin_offsetof(entry_point_info_t, spsr) - \ 139*5dffb46cSSoby Mathew __builtin_offsetof(entry_point_info_t, pc), \ 140*5dffb46cSSoby Mathew assert_entrypoint_and_spsr_should_be_adjacent); 141*5dffb46cSSoby Mathew 142*5dffb46cSSoby Mathew #endif /*__ASSEMBLY__*/ 143*5dffb46cSSoby Mathew 144*5dffb46cSSoby Mathew #endif /* __EP_INFO_H__ */ 145*5dffb46cSSoby Mathew 146