1*c978b524SChris Zankel /* 2*c978b524SChris Zankel * Copyright (C) 2005 - 2013 Tensilica Inc. 3*c978b524SChris Zankel * Copyright (C) 2014 - 2016 Cadence Design Systems Inc. 4*c978b524SChris Zankel * 5*c978b524SChris Zankel * SPDX-License-Identifier: GPL-2.0+ 6*c978b524SChris Zankel */ 7*c978b524SChris Zankel 8*c978b524SChris Zankel #ifndef _XTENSA_ASMMACRO_H 9*c978b524SChris Zankel #define _XTENSA_ASMMACRO_H 10*c978b524SChris Zankel 11*c978b524SChris Zankel #include <asm/arch/core.h> 12*c978b524SChris Zankel 13*c978b524SChris Zankel /* 14*c978b524SChris Zankel * Function entry and return macros for supported ABIs. 15*c978b524SChris Zankel */ 16*c978b524SChris Zankel 17*c978b524SChris Zankel #if defined(__XTENSA_WINDOWED_ABI__) 18*c978b524SChris Zankel #define abi_entry entry sp, 16 19*c978b524SChris Zankel #define abi_ret retw 20*c978b524SChris Zankel #elif defined(__XTENSA_CALL0_ABI__) 21*c978b524SChris Zankel #define abi_entry 22*c978b524SChris Zankel #define abi_ret ret 23*c978b524SChris Zankel #else 24*c978b524SChris Zankel #error Unsupported Xtensa ABI 25*c978b524SChris Zankel #endif 26*c978b524SChris Zankel 27*c978b524SChris Zankel /* 28*c978b524SChris Zankel * Some little helpers for loops. Use zero-overhead-loops 29*c978b524SChris Zankel * where applicable and if supported by the processor. 30*c978b524SChris Zankel * 31*c978b524SChris Zankel * __loopi ar, at, size, inc 32*c978b524SChris Zankel * ar register initialized with the start address 33*c978b524SChris Zankel * at scratch register used by macro 34*c978b524SChris Zankel * size size immediate value 35*c978b524SChris Zankel * inc increment 36*c978b524SChris Zankel * 37*c978b524SChris Zankel * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond] 38*c978b524SChris Zankel * ar register initialized with the start address 39*c978b524SChris Zankel * as register initialized with the size 40*c978b524SChris Zankel * at scratch register use by macro 41*c978b524SChris Zankel * inc_log2 increment [in log2] 42*c978b524SChris Zankel * mask_log2 mask [in log2] 43*c978b524SChris Zankel * cond true condition (used in loop'cond') 44*c978b524SChris Zankel * ncond false condition (used in b'ncond') 45*c978b524SChris Zankel * 46*c978b524SChris Zankel * __loop as 47*c978b524SChris Zankel * restart loop. 'as' register must not have been modified! 48*c978b524SChris Zankel * 49*c978b524SChris Zankel * __endla ar, as, incr 50*c978b524SChris Zankel * ar start address (modified) 51*c978b524SChris Zankel * as scratch register used by __loops/__loopi macros or 52*c978b524SChris Zankel * end address used by __loopt macro 53*c978b524SChris Zankel * inc increment 54*c978b524SChris Zankel */ 55*c978b524SChris Zankel 56*c978b524SChris Zankel #if XCHAL_HAVE_LOOPS 57*c978b524SChris Zankel 58*c978b524SChris Zankel .macro __loopi ar, at, size, incr 59*c978b524SChris Zankel movi \at, ((\size + \incr - 1) / (\incr)) 60*c978b524SChris Zankel loop \at, 99f 61*c978b524SChris Zankel .endm 62*c978b524SChris Zankel 63*c978b524SChris Zankel 64*c978b524SChris Zankel .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond 65*c978b524SChris Zankel .ifgt \incr_log2 - 1 66*c978b524SChris Zankel addi \at, \as, (1 << \incr_log2) - 1 67*c978b524SChris Zankel .ifnc \mask_log2, 68*c978b524SChris Zankel extui \at, \at, \incr_log2, \mask_log2 69*c978b524SChris Zankel .else 70*c978b524SChris Zankel srli \at, \at, \incr_log2 71*c978b524SChris Zankel .endif 72*c978b524SChris Zankel .endif 73*c978b524SChris Zankel loop\cond \at, 99f 74*c978b524SChris Zankel .endm 75*c978b524SChris Zankel 76*c978b524SChris Zankel 77*c978b524SChris Zankel .macro __loopt ar, as, at, incr_log2 78*c978b524SChris Zankel sub \at, \as, \ar 79*c978b524SChris Zankel .ifgt \incr_log2 - 1 80*c978b524SChris Zankel addi \at, \at, (1 << \incr_log2) - 1 81*c978b524SChris Zankel srli \at, \at, \incr_log2 82*c978b524SChris Zankel .endif 83*c978b524SChris Zankel loop \at, 99f 84*c978b524SChris Zankel .endm 85*c978b524SChris Zankel 86*c978b524SChris Zankel 87*c978b524SChris Zankel .macro __loop as 88*c978b524SChris Zankel loop \as, 99f 89*c978b524SChris Zankel .endm 90*c978b524SChris Zankel 91*c978b524SChris Zankel 92*c978b524SChris Zankel .macro __endl ar, as 93*c978b524SChris Zankel 99: 94*c978b524SChris Zankel .endm 95*c978b524SChris Zankel 96*c978b524SChris Zankel 97*c978b524SChris Zankel #else 98*c978b524SChris Zankel 99*c978b524SChris Zankel .macro __loopi ar, at, size, incr 100*c978b524SChris Zankel movi \at, ((\size + \incr - 1) / (\incr)) 101*c978b524SChris Zankel addi \at, \ar, \size 102*c978b524SChris Zankel 98: 103*c978b524SChris Zankel .endm 104*c978b524SChris Zankel 105*c978b524SChris Zankel 106*c978b524SChris Zankel .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond 107*c978b524SChris Zankel .ifnc \mask_log2, 108*c978b524SChris Zankel extui \at, \as, \incr_log2, \mask_log2 109*c978b524SChris Zankel .else 110*c978b524SChris Zankel .ifnc \ncond, 111*c978b524SChris Zankel srli \at, \as, \incr_log2 112*c978b524SChris Zankel .endif 113*c978b524SChris Zankel .endif 114*c978b524SChris Zankel .ifnc \ncond, 115*c978b524SChris Zankel b\ncond \at, 99f 116*c978b524SChris Zankel 117*c978b524SChris Zankel .endif 118*c978b524SChris Zankel .ifnc \mask_log2, 119*c978b524SChris Zankel slli \at, \at, \incr_log2 120*c978b524SChris Zankel add \at, \ar, \at 121*c978b524SChris Zankel .else 122*c978b524SChris Zankel add \at, \ar, \as 123*c978b524SChris Zankel .endif 124*c978b524SChris Zankel 98: 125*c978b524SChris Zankel .endm 126*c978b524SChris Zankel 127*c978b524SChris Zankel .macro __loopt ar, as, at, incr_log2 128*c978b524SChris Zankel 98: 129*c978b524SChris Zankel .endm 130*c978b524SChris Zankel 131*c978b524SChris Zankel 132*c978b524SChris Zankel .macro __loop as 133*c978b524SChris Zankel 98: 134*c978b524SChris Zankel .endm 135*c978b524SChris Zankel 136*c978b524SChris Zankel 137*c978b524SChris Zankel .macro __endl ar, as 138*c978b524SChris Zankel bltu \ar, \as, 98b 139*c978b524SChris Zankel 99: 140*c978b524SChris Zankel .endm 141*c978b524SChris Zankel 142*c978b524SChris Zankel 143*c978b524SChris Zankel #endif 144*c978b524SChris Zankel 145*c978b524SChris Zankel 146*c978b524SChris Zankel .macro __endla ar, as, incr 147*c978b524SChris Zankel addi \ar, \ar, \incr 148*c978b524SChris Zankel __endl \ar \as 149*c978b524SChris Zankel .endm 150*c978b524SChris Zankel 151*c978b524SChris Zankel 152*c978b524SChris Zankel #endif /* _XTENSA_ASMMACRO_H */ 153