1*4882a593Smuzhiyun /* 2*4882a593Smuzhiyun * Copyright (C) 2005 - 2013 Tensilica Inc. 3*4882a593Smuzhiyun * Copyright (C) 2014 - 2016 Cadence Design Systems Inc. 4*4882a593Smuzhiyun * 5*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+ 6*4882a593Smuzhiyun */ 7*4882a593Smuzhiyun 8*4882a593Smuzhiyun #ifndef _XTENSA_ASMMACRO_H 9*4882a593Smuzhiyun #define _XTENSA_ASMMACRO_H 10*4882a593Smuzhiyun 11*4882a593Smuzhiyun #include <asm/arch/core.h> 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun /* 14*4882a593Smuzhiyun * Function entry and return macros for supported ABIs. 15*4882a593Smuzhiyun */ 16*4882a593Smuzhiyun 17*4882a593Smuzhiyun #if defined(__XTENSA_WINDOWED_ABI__) 18*4882a593Smuzhiyun #define abi_entry entry sp, 16 19*4882a593Smuzhiyun #define abi_ret retw 20*4882a593Smuzhiyun #elif defined(__XTENSA_CALL0_ABI__) 21*4882a593Smuzhiyun #define abi_entry 22*4882a593Smuzhiyun #define abi_ret ret 23*4882a593Smuzhiyun #else 24*4882a593Smuzhiyun #error Unsupported Xtensa ABI 25*4882a593Smuzhiyun #endif 26*4882a593Smuzhiyun 27*4882a593Smuzhiyun /* 28*4882a593Smuzhiyun * Some little helpers for loops. Use zero-overhead-loops 29*4882a593Smuzhiyun * where applicable and if supported by the processor. 30*4882a593Smuzhiyun * 31*4882a593Smuzhiyun * __loopi ar, at, size, inc 32*4882a593Smuzhiyun * ar register initialized with the start address 33*4882a593Smuzhiyun * at scratch register used by macro 34*4882a593Smuzhiyun * size size immediate value 35*4882a593Smuzhiyun * inc increment 36*4882a593Smuzhiyun * 37*4882a593Smuzhiyun * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond] 38*4882a593Smuzhiyun * ar register initialized with the start address 39*4882a593Smuzhiyun * as register initialized with the size 40*4882a593Smuzhiyun * at scratch register use by macro 41*4882a593Smuzhiyun * inc_log2 increment [in log2] 42*4882a593Smuzhiyun * mask_log2 mask [in log2] 43*4882a593Smuzhiyun * cond true condition (used in loop'cond') 44*4882a593Smuzhiyun * ncond false condition (used in b'ncond') 45*4882a593Smuzhiyun * 46*4882a593Smuzhiyun * __loop as 47*4882a593Smuzhiyun * restart loop. 'as' register must not have been modified! 48*4882a593Smuzhiyun * 49*4882a593Smuzhiyun * __endla ar, as, incr 50*4882a593Smuzhiyun * ar start address (modified) 51*4882a593Smuzhiyun * as scratch register used by __loops/__loopi macros or 52*4882a593Smuzhiyun * end address used by __loopt macro 53*4882a593Smuzhiyun * inc increment 54*4882a593Smuzhiyun */ 55*4882a593Smuzhiyun 56*4882a593Smuzhiyun #if XCHAL_HAVE_LOOPS 57*4882a593Smuzhiyun 58*4882a593Smuzhiyun .macro __loopi ar, at, size, incr 59*4882a593Smuzhiyun movi \at, ((\size + \incr - 1) / (\incr)) 60*4882a593Smuzhiyun loop \at, 99f 61*4882a593Smuzhiyun .endm 62*4882a593Smuzhiyun 63*4882a593Smuzhiyun 64*4882a593Smuzhiyun .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond 65*4882a593Smuzhiyun .ifgt \incr_log2 - 1 66*4882a593Smuzhiyun addi \at, \as, (1 << \incr_log2) - 1 67*4882a593Smuzhiyun .ifnc \mask_log2, 68*4882a593Smuzhiyun extui \at, \at, \incr_log2, \mask_log2 69*4882a593Smuzhiyun .else 70*4882a593Smuzhiyun srli \at, \at, \incr_log2 71*4882a593Smuzhiyun .endif 72*4882a593Smuzhiyun .endif 73*4882a593Smuzhiyun loop\cond \at, 99f 74*4882a593Smuzhiyun .endm 75*4882a593Smuzhiyun 76*4882a593Smuzhiyun 77*4882a593Smuzhiyun .macro __loopt ar, as, at, incr_log2 78*4882a593Smuzhiyun sub \at, \as, \ar 79*4882a593Smuzhiyun .ifgt \incr_log2 - 1 80*4882a593Smuzhiyun addi \at, \at, (1 << \incr_log2) - 1 81*4882a593Smuzhiyun srli \at, \at, \incr_log2 82*4882a593Smuzhiyun .endif 83*4882a593Smuzhiyun loop \at, 99f 84*4882a593Smuzhiyun .endm 85*4882a593Smuzhiyun 86*4882a593Smuzhiyun 87*4882a593Smuzhiyun .macro __loop as 88*4882a593Smuzhiyun loop \as, 99f 89*4882a593Smuzhiyun .endm 90*4882a593Smuzhiyun 91*4882a593Smuzhiyun 92*4882a593Smuzhiyun .macro __endl ar, as 93*4882a593Smuzhiyun 99: 94*4882a593Smuzhiyun .endm 95*4882a593Smuzhiyun 96*4882a593Smuzhiyun 97*4882a593Smuzhiyun #else 98*4882a593Smuzhiyun 99*4882a593Smuzhiyun .macro __loopi ar, at, size, incr 100*4882a593Smuzhiyun movi \at, ((\size + \incr - 1) / (\incr)) 101*4882a593Smuzhiyun addi \at, \ar, \size 102*4882a593Smuzhiyun 98: 103*4882a593Smuzhiyun .endm 104*4882a593Smuzhiyun 105*4882a593Smuzhiyun 106*4882a593Smuzhiyun .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond 107*4882a593Smuzhiyun .ifnc \mask_log2, 108*4882a593Smuzhiyun extui \at, \as, \incr_log2, \mask_log2 109*4882a593Smuzhiyun .else 110*4882a593Smuzhiyun .ifnc \ncond, 111*4882a593Smuzhiyun srli \at, \as, \incr_log2 112*4882a593Smuzhiyun .endif 113*4882a593Smuzhiyun .endif 114*4882a593Smuzhiyun .ifnc \ncond, 115*4882a593Smuzhiyun b\ncond \at, 99f 116*4882a593Smuzhiyun 117*4882a593Smuzhiyun .endif 118*4882a593Smuzhiyun .ifnc \mask_log2, 119*4882a593Smuzhiyun slli \at, \at, \incr_log2 120*4882a593Smuzhiyun add \at, \ar, \at 121*4882a593Smuzhiyun .else 122*4882a593Smuzhiyun add \at, \ar, \as 123*4882a593Smuzhiyun .endif 124*4882a593Smuzhiyun 98: 125*4882a593Smuzhiyun .endm 126*4882a593Smuzhiyun 127*4882a593Smuzhiyun .macro __loopt ar, as, at, incr_log2 128*4882a593Smuzhiyun 98: 129*4882a593Smuzhiyun .endm 130*4882a593Smuzhiyun 131*4882a593Smuzhiyun 132*4882a593Smuzhiyun .macro __loop as 133*4882a593Smuzhiyun 98: 134*4882a593Smuzhiyun .endm 135*4882a593Smuzhiyun 136*4882a593Smuzhiyun 137*4882a593Smuzhiyun .macro __endl ar, as 138*4882a593Smuzhiyun bltu \ar, \as, 98b 139*4882a593Smuzhiyun 99: 140*4882a593Smuzhiyun .endm 141*4882a593Smuzhiyun 142*4882a593Smuzhiyun 143*4882a593Smuzhiyun #endif 144*4882a593Smuzhiyun 145*4882a593Smuzhiyun 146*4882a593Smuzhiyun .macro __endla ar, as, incr 147*4882a593Smuzhiyun addi \ar, \ar, \incr 148*4882a593Smuzhiyun __endl \ar \as 149*4882a593Smuzhiyun .endm 150*4882a593Smuzhiyun 151*4882a593Smuzhiyun 152*4882a593Smuzhiyun #endif /* _XTENSA_ASMMACRO_H */ 153