1*4882a593Smuzhiyun /* 2*4882a593Smuzhiyun * This header file contains assembly-language definitions (assembly 3*4882a593Smuzhiyun * macros, etc.) for this specific Xtensa processor's TIE extensions 4*4882a593Smuzhiyun * and options. It is customized to this Xtensa processor configuration. 5*4882a593Smuzhiyun * This file is autogenerated, please do not edit. 6*4882a593Smuzhiyun * 7*4882a593Smuzhiyun * Copyright (C) 1999-2010 Tensilica Inc. 8*4882a593Smuzhiyun * 9*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+ 10*4882a593Smuzhiyun */ 11*4882a593Smuzhiyun 12*4882a593Smuzhiyun #ifndef _XTENSA_CORE_TIE_ASM_H 13*4882a593Smuzhiyun #define _XTENSA_CORE_TIE_ASM_H 14*4882a593Smuzhiyun 15*4882a593Smuzhiyun /* Selection parameter values for save-area save/restore macros: */ 16*4882a593Smuzhiyun /* Option vs. TIE: */ 17*4882a593Smuzhiyun #define XTHAL_SAS_TIE 0x0001 /* custom extension or coprocessor */ 18*4882a593Smuzhiyun #define XTHAL_SAS_OPT 0x0002 /* optional (and not a coprocessor) */ 19*4882a593Smuzhiyun #define XTHAL_SAS_ANYOT 0x0003 /* both of the above */ 20*4882a593Smuzhiyun /* Whether used automatically by compiler: */ 21*4882a593Smuzhiyun #define XTHAL_SAS_NOCC 0x0004 /* not used by compiler w/o special opts/code */ 22*4882a593Smuzhiyun #define XTHAL_SAS_CC 0x0008 /* used by compiler without special opts/code */ 23*4882a593Smuzhiyun #define XTHAL_SAS_ANYCC 0x000C /* both of the above */ 24*4882a593Smuzhiyun /* ABI handling across function calls: */ 25*4882a593Smuzhiyun #define XTHAL_SAS_CALR 0x0010 /* caller-saved */ 26*4882a593Smuzhiyun #define XTHAL_SAS_CALE 0x0020 /* callee-saved */ 27*4882a593Smuzhiyun #define XTHAL_SAS_GLOB 0x0040 /* global across function calls (in thread) */ 28*4882a593Smuzhiyun #define XTHAL_SAS_ANYABI 0x0070 /* all of the above three */ 29*4882a593Smuzhiyun /* Misc */ 30*4882a593Smuzhiyun #define XTHAL_SAS_ALL 0xFFFF /* include all default NCP contents */ 31*4882a593Smuzhiyun #define XTHAL_SAS3(optie,ccuse,abi) ( ((optie) & XTHAL_SAS_ANYOT) \ 32*4882a593Smuzhiyun | ((ccuse) & XTHAL_SAS_ANYCC) \ 33*4882a593Smuzhiyun | ((abi) & XTHAL_SAS_ANYABI) ) 34*4882a593Smuzhiyun 35*4882a593Smuzhiyun 36*4882a593Smuzhiyun 37*4882a593Smuzhiyun /* 38*4882a593Smuzhiyun * Macro to save all non-coprocessor (extra) custom TIE and optional state 39*4882a593Smuzhiyun * (not including zero-overhead loop registers). 40*4882a593Smuzhiyun * Required parameters: 41*4882a593Smuzhiyun * ptr Save area pointer address register (clobbered) 42*4882a593Smuzhiyun * (register must contain a 4 byte aligned address). 43*4882a593Smuzhiyun * at1..at4 Four temporary address registers (first XCHAL_NCP_NUM_ATMPS 44*4882a593Smuzhiyun * registers are clobbered, the remaining are unused). 45*4882a593Smuzhiyun * Optional parameters: 46*4882a593Smuzhiyun * continue If macro invoked as part of a larger store sequence, set to 1 47*4882a593Smuzhiyun * if this is not the first in the sequence. Defaults to 0. 48*4882a593Smuzhiyun * ofs Offset from start of larger sequence (from value of first ptr 49*4882a593Smuzhiyun * in sequence) at which to store. Defaults to next available space 50*4882a593Smuzhiyun * (or 0 if <continue> is 0). 51*4882a593Smuzhiyun * select Select what category(ies) of registers to store, as a bitmask 52*4882a593Smuzhiyun * (see XTHAL_SAS_xxx constants). Defaults to all registers. 53*4882a593Smuzhiyun * alloc Select what category(ies) of registers to allocate; if any 54*4882a593Smuzhiyun * category is selected here that is not in <select>, space for 55*4882a593Smuzhiyun * the corresponding registers is skipped without doing any store. 56*4882a593Smuzhiyun */ 57*4882a593Smuzhiyun .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 58*4882a593Smuzhiyun xchal_sa_start \continue, \ofs 59*4882a593Smuzhiyun // Optional global register used by default by the compiler: 60*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\select) 61*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 62*4882a593Smuzhiyun rur.THREADPTR \at1 // threadptr option 63*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+0 64*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 65*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 66*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 67*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 68*4882a593Smuzhiyun .endif 69*4882a593Smuzhiyun // Optional caller-saved registers used by default by the compiler: 70*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\select) 71*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 72*4882a593Smuzhiyun rsr \at1, ACCLO // MAC16 option 73*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+0 74*4882a593Smuzhiyun rsr \at1, ACCHI // MAC16 option 75*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+4 76*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 77*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 78*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 79*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 80*4882a593Smuzhiyun .endif 81*4882a593Smuzhiyun // Optional caller-saved registers not used by default by the compiler: 82*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 83*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 84*4882a593Smuzhiyun rsr \at1, M0 // MAC16 option 85*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+0 86*4882a593Smuzhiyun rsr \at1, M1 // MAC16 option 87*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+4 88*4882a593Smuzhiyun rsr \at1, M2 // MAC16 option 89*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+8 90*4882a593Smuzhiyun rsr \at1, M3 // MAC16 option 91*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+12 92*4882a593Smuzhiyun rsr \at1, SCOMPARE1 // conditional store option 93*4882a593Smuzhiyun s32i \at1, \ptr, .Lxchal_ofs_+16 94*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 95*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 96*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 97*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 98*4882a593Smuzhiyun .endif 99*4882a593Smuzhiyun .endm // xchal_ncp_store 100*4882a593Smuzhiyun 101*4882a593Smuzhiyun /* 102*4882a593Smuzhiyun * Macro to restore all non-coprocessor (extra) custom TIE and optional state 103*4882a593Smuzhiyun * (not including zero-overhead loop registers). 104*4882a593Smuzhiyun * Required parameters: 105*4882a593Smuzhiyun * ptr Save area pointer address register (clobbered) 106*4882a593Smuzhiyun * (register must contain a 4 byte aligned address). 107*4882a593Smuzhiyun * at1..at4 Four temporary address registers (first XCHAL_NCP_NUM_ATMPS 108*4882a593Smuzhiyun * registers are clobbered, the remaining are unused). 109*4882a593Smuzhiyun * Optional parameters: 110*4882a593Smuzhiyun * continue If macro invoked as part of a larger load sequence, set to 1 111*4882a593Smuzhiyun * if this is not the first in the sequence. Defaults to 0. 112*4882a593Smuzhiyun * ofs Offset from start of larger sequence (from value of first ptr 113*4882a593Smuzhiyun * in sequence) at which to load. Defaults to next available space 114*4882a593Smuzhiyun * (or 0 if <continue> is 0). 115*4882a593Smuzhiyun * select Select what category(ies) of registers to load, as a bitmask 116*4882a593Smuzhiyun * (see XTHAL_SAS_xxx constants). Defaults to all registers. 117*4882a593Smuzhiyun * alloc Select what category(ies) of registers to allocate; if any 118*4882a593Smuzhiyun * category is selected here that is not in <select>, space for 119*4882a593Smuzhiyun * the corresponding registers is skipped without doing any load. 120*4882a593Smuzhiyun */ 121*4882a593Smuzhiyun .macro xchal_ncp_load ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 122*4882a593Smuzhiyun xchal_sa_start \continue, \ofs 123*4882a593Smuzhiyun // Optional global register used by default by the compiler: 124*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\select) 125*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 126*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+0 127*4882a593Smuzhiyun wur.THREADPTR \at1 // threadptr option 128*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 129*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 130*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1020, 4, 4 131*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 132*4882a593Smuzhiyun .endif 133*4882a593Smuzhiyun // Optional caller-saved registers used by default by the compiler: 134*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\select) 135*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 136*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+0 137*4882a593Smuzhiyun wsr \at1, ACCLO // MAC16 option 138*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+4 139*4882a593Smuzhiyun wsr \at1, ACCHI // MAC16 option 140*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 141*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 142*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1016, 4, 4 143*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 144*4882a593Smuzhiyun .endif 145*4882a593Smuzhiyun // Optional caller-saved registers not used by default by the compiler: 146*4882a593Smuzhiyun .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 147*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 148*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+0 149*4882a593Smuzhiyun wsr \at1, M0 // MAC16 option 150*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+4 151*4882a593Smuzhiyun wsr \at1, M1 // MAC16 option 152*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+8 153*4882a593Smuzhiyun wsr \at1, M2 // MAC16 option 154*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+12 155*4882a593Smuzhiyun wsr \at1, M3 // MAC16 option 156*4882a593Smuzhiyun l32i \at1, \ptr, .Lxchal_ofs_+16 157*4882a593Smuzhiyun wsr \at1, SCOMPARE1 // conditional store option 158*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 159*4882a593Smuzhiyun .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 160*4882a593Smuzhiyun xchal_sa_align \ptr, 0, 1004, 4, 4 161*4882a593Smuzhiyun .set .Lxchal_ofs_, .Lxchal_ofs_ + 20 162*4882a593Smuzhiyun .endif 163*4882a593Smuzhiyun .endm // xchal_ncp_load 164*4882a593Smuzhiyun 165*4882a593Smuzhiyun 166*4882a593Smuzhiyun #define XCHAL_NCP_NUM_ATMPS 1 167*4882a593Smuzhiyun 168*4882a593Smuzhiyun 169*4882a593Smuzhiyun 170*4882a593Smuzhiyun #define XCHAL_SA_NUM_ATMPS 1 171*4882a593Smuzhiyun 172*4882a593Smuzhiyun #endif /*_XTENSA_CORE_TIE_ASM_H*/ 173*4882a593Smuzhiyun 174