1*4882a593Smuzhiyun /* 2*4882a593Smuzhiyun * tie-asm.h -- compile-time HAL assembler definitions dependent on CORE & TIE 3*4882a593Smuzhiyun * 4*4882a593Smuzhiyun * NOTE: This header file is not meant to be included directly. 5*4882a593Smuzhiyun */ 6*4882a593Smuzhiyun 7*4882a593Smuzhiyun /* This header file contains assembly-language definitions (assembly 8*4882a593Smuzhiyun macros, etc.) for this specific Xtensa processor's TIE extensions 9*4882a593Smuzhiyun and options. It is customized to this Xtensa processor configuration. 10*4882a593Smuzhiyun 11*4882a593Smuzhiyun Copyright (c) 1999-2010 Tensilica Inc. 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun Permission is hereby granted, free of charge, to any person obtaining 14*4882a593Smuzhiyun a copy of this software and associated documentation files (the 15*4882a593Smuzhiyun "Software"), to deal in the Software without restriction, including 16*4882a593Smuzhiyun without limitation the rights to use, copy, modify, merge, publish, 17*4882a593Smuzhiyun distribute, sublicense, and/or sell copies of the Software, and to 18*4882a593Smuzhiyun permit persons to whom the Software is furnished to do so, subject to 19*4882a593Smuzhiyun the following conditions: 20*4882a593Smuzhiyun 21*4882a593Smuzhiyun The above copyright notice and this permission notice shall be included 22*4882a593Smuzhiyun in all copies or substantial portions of the Software. 23*4882a593Smuzhiyun 24*4882a593Smuzhiyun THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 25*4882a593Smuzhiyun EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 26*4882a593Smuzhiyun MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 27*4882a593Smuzhiyun IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 28*4882a593Smuzhiyun CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 29*4882a593Smuzhiyun TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 30*4882a593Smuzhiyun SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ 31*4882a593Smuzhiyun 32*4882a593Smuzhiyun #ifndef _XTENSA_CORE_TIE_ASM_H 33*4882a593Smuzhiyun #define _XTENSA_CORE_TIE_ASM_H 34*4882a593Smuzhiyun 35*4882a593Smuzhiyun /* Selection parameter values for save-area save/restore macros: */ 36*4882a593Smuzhiyun /* Option vs. TIE: */ 37*4882a593Smuzhiyun #define XTHAL_SAS_TIE 0x0001 /* custom extension or coprocessor */ 38*4882a593Smuzhiyun #define XTHAL_SAS_OPT 0x0002 /* optional (and not a coprocessor) */ 39*4882a593Smuzhiyun #define XTHAL_SAS_ANYOT 0x0003 /* both of the above */ 40*4882a593Smuzhiyun /* Whether used automatically by compiler: */ 41*4882a593Smuzhiyun #define XTHAL_SAS_NOCC 0x0004 /* not used by compiler w/o special opts/code */ 42*4882a593Smuzhiyun #define XTHAL_SAS_CC 0x0008 /* used by compiler without special opts/code */ 43*4882a593Smuzhiyun #define XTHAL_SAS_ANYCC 0x000C /* both of the above */ 44*4882a593Smuzhiyun /* ABI handling across function calls: */ 45*4882a593Smuzhiyun #define XTHAL_SAS_CALR 0x0010 /* caller-saved */ 46*4882a593Smuzhiyun #define XTHAL_SAS_CALE 0x0020 /* callee-saved */ 47*4882a593Smuzhiyun #define XTHAL_SAS_GLOB 0x0040 /* global across function calls (in thread) */ 48*4882a593Smuzhiyun #define XTHAL_SAS_ANYABI 0x0070 /* all of the above three */ 49*4882a593Smuzhiyun /* Misc */ 50*4882a593Smuzhiyun #define XTHAL_SAS_ALL 0xFFFF /* include all default NCP contents */ 51*4882a593Smuzhiyun #define XTHAL_SAS3(optie,ccuse,abi) ( ((optie) & XTHAL_SAS_ANYOT) \ 52*4882a593Smuzhiyun | ((ccuse) & XTHAL_SAS_ANYCC) \ 53*4882a593Smuzhiyun | ((abi) & XTHAL_SAS_ANYABI) ) 54*4882a593Smuzhiyun 55*4882a593Smuzhiyun 56*4882a593Smuzhiyun 57*4882a593Smuzhiyun /* 58*4882a593Smuzhiyun * Macro to save all non-coprocessor (extra) custom TIE and optional state 59*4882a593Smuzhiyun * (not including zero-overhead loop registers). 60*4882a593Smuzhiyun * Required parameters: 61*4882a593Smuzhiyun * ptr Save area pointer address register (clobbered) 62*4882a593Smuzhiyun * (register must contain a 4 byte aligned address). 63*4882a593Smuzhiyun * at1..at4 Four temporary address registers (first XCHAL_NCP_NUM_ATMPS 64*4882a593Smuzhiyun * registers are clobbered, the remaining are unused). 65*4882a593Smuzhiyun * Optional parameters: 66*4882a593Smuzhiyun * continue If macro invoked as part of a larger store sequence, set to 1 67*4882a593Smuzhiyun * if this is not the first in the sequence. Defaults to 0. 68*4882a593Smuzhiyun * ofs Offset from start of larger sequence (from value of first ptr 69*4882a593Smuzhiyun * in sequence) at which to store. Defaults to next available space 70*4882a593Smuzhiyun * (or 0 if <continue> is 0). 71*4882a593Smuzhiyun * select Select what category(ies) of registers to store, as a bitmask 72*4882a593Smuzhiyun * (see XTHAL_SAS_xxx constants). Defaults to all registers. 73*4882a593Smuzhiyun * alloc Select what category(ies) of registers to allocate; if any 74*4882a593Smuzhiyun * category is selected here that is not in <select>, space for 75*4882a593Smuzhiyun * the corresponding registers is skipped without doing any store. 76*4882a593Smuzhiyun */ 77*4882a593Smuzhiyun .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 78*4882a593Smuzhiyun xchal_sa_start \continue, \ofs 79*4882a593Smuzhiyun // Optional global register used by default by the compiler: 80*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\select) 81*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 82*4882a593Smuzhiyun rur.THREADPTR \at1 // threadptr option 83*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+0 84*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 85*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 86*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 87*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 88*4882a593Smuzhiyun .endif 89*4882a593Smuzhiyun // Optional caller-saved registers used by default by the compiler: 90*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\select) 91*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 92*4882a593Smuzhiyun rsr \at1, ACCLO // MAC16 option 93*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+0 94*4882a593Smuzhiyun rsr \at1, ACCHI // MAC16 option 95*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+4 96*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 97*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 98*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 99*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 100*4882a593Smuzhiyun .endif 101*4882a593Smuzhiyun // Optional caller-saved registers not used by default by the compiler: 102*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 103*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 104*4882a593Smuzhiyun rsr \at1, M0 // MAC16 option 105*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+0 106*4882a593Smuzhiyun rsr \at1, M1 // MAC16 option 107*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+4 108*4882a593Smuzhiyun rsr \at1, M2 // MAC16 option 109*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+8 110*4882a593Smuzhiyun rsr \at1, M3 // MAC16 option 111*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+12 112*4882a593Smuzhiyun rsr \at1, SCOMPARE1 // conditional store option 113*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+16 114*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 115*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 116*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 117*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 118*4882a593Smuzhiyun .endif 119*4882a593Smuzhiyun .endm // xchal_ncp_store 120*4882a593Smuzhiyun 121*4882a593Smuzhiyun /* 122*4882a593Smuzhiyun * Macro to restore all non-coprocessor (extra) custom TIE and optional state 123*4882a593Smuzhiyun * (not including zero-overhead loop registers). 124*4882a593Smuzhiyun * Required parameters: 125*4882a593Smuzhiyun * ptr Save area pointer address register (clobbered) 126*4882a593Smuzhiyun * (register must contain a 4 byte aligned address). 127*4882a593Smuzhiyun * at1..at4 Four temporary address registers (first XCHAL_NCP_NUM_ATMPS 128*4882a593Smuzhiyun * registers are clobbered, the remaining are unused). 129*4882a593Smuzhiyun * Optional parameters: 130*4882a593Smuzhiyun * continue If macro invoked as part of a larger load sequence, set to 1 131*4882a593Smuzhiyun * if this is not the first in the sequence. Defaults to 0. 132*4882a593Smuzhiyun * ofs Offset from start of larger sequence (from value of first ptr 133*4882a593Smuzhiyun * in sequence) at which to load. Defaults to next available space 134*4882a593Smuzhiyun * (or 0 if <continue> is 0). 135*4882a593Smuzhiyun * select Select what category(ies) of registers to load, as a bitmask 136*4882a593Smuzhiyun * (see XTHAL_SAS_xxx constants). Defaults to all registers. 137*4882a593Smuzhiyun * alloc Select what category(ies) of registers to allocate; if any 138*4882a593Smuzhiyun * category is selected here that is not in <select>, space for 139*4882a593Smuzhiyun * the corresponding registers is skipped without doing any load. 140*4882a593Smuzhiyun */ 141*4882a593Smuzhiyun .macro xchal_ncp_load ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 142*4882a593Smuzhiyun xchal_sa_start \continue, \ofs 143*4882a593Smuzhiyun // Optional global register used by default by the compiler: 144*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\select) 145*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 146*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+0 147*4882a593Smuzhiyun wur.THREADPTR \at1 // threadptr option 148*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 149*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 150*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 151*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 152*4882a593Smuzhiyun .endif 153*4882a593Smuzhiyun // Optional caller-saved registers used by default by the compiler: 154*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\select) 155*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 156*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+0 157*4882a593Smuzhiyun wsr \at1, ACCLO // MAC16 option 158*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+4 159*4882a593Smuzhiyun wsr \at1, ACCHI // MAC16 option 160*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 161*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 162*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 163*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 164*4882a593Smuzhiyun .endif 165*4882a593Smuzhiyun // Optional caller-saved registers not used by default by the compiler: 166*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 167*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 168*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+0 169*4882a593Smuzhiyun wsr \at1, M0 // MAC16 option 170*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+4 171*4882a593Smuzhiyun wsr \at1, M1 // MAC16 option 172*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+8 173*4882a593Smuzhiyun wsr \at1, M2 // MAC16 option 174*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+12 175*4882a593Smuzhiyun wsr \at1, M3 // MAC16 option 176*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+16 177*4882a593Smuzhiyun wsr \at1, SCOMPARE1 // conditional store option 178*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 179*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 180*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 181*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 182*4882a593Smuzhiyun .endif 183*4882a593Smuzhiyun .endm // xchal_ncp_load 184*4882a593Smuzhiyun 185*4882a593Smuzhiyun 186*4882a593Smuzhiyun #define XCHAL_NCP_NUM_ATMPS 1 187*4882a593Smuzhiyun 188*4882a593Smuzhiyun 189*4882a593Smuzhiyun 190*4882a593Smuzhiyun #define XCHAL_SA_NUM_ATMPS 1 191*4882a593Smuzhiyun 192*4882a593Smuzhiyun #endif /*_XTENSA_CORE_TIE_ASM_H*/ 193*4882a593Smuzhiyun 194