1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ 2*4882a593Smuzhiyun #ifndef _UAPI__ALPHA_COMPILER_H 3*4882a593Smuzhiyun #define _UAPI__ALPHA_COMPILER_H 4*4882a593Smuzhiyun 5*4882a593Smuzhiyun /* 6*4882a593Smuzhiyun * Herein are macros we use when describing various patterns we want to GCC. 7*4882a593Smuzhiyun * In all cases we can get better schedules out of the compiler if we hide 8*4882a593Smuzhiyun * as little as possible inside inline assembly. However, we want to be 9*4882a593Smuzhiyun * able to know what we'll get out before giving up inline assembly. Thus 10*4882a593Smuzhiyun * these tests and macros. 11*4882a593Smuzhiyun */ 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun #if __GNUC__ == 3 && __GNUC_MINOR__ >= 4 || __GNUC__ > 3 14*4882a593Smuzhiyun # define __kernel_insbl(val, shift) __builtin_alpha_insbl(val, shift) 15*4882a593Smuzhiyun # define __kernel_inswl(val, shift) __builtin_alpha_inswl(val, shift) 16*4882a593Smuzhiyun # define __kernel_insql(val, shift) __builtin_alpha_insql(val, shift) 17*4882a593Smuzhiyun # define __kernel_inslh(val, shift) __builtin_alpha_inslh(val, shift) 18*4882a593Smuzhiyun # define __kernel_extbl(val, shift) __builtin_alpha_extbl(val, shift) 19*4882a593Smuzhiyun # define __kernel_extwl(val, shift) __builtin_alpha_extwl(val, shift) 20*4882a593Smuzhiyun # define __kernel_cmpbge(a, b) __builtin_alpha_cmpbge(a, b) 21*4882a593Smuzhiyun #else 22*4882a593Smuzhiyun # define __kernel_insbl(val, shift) \ 23*4882a593Smuzhiyun ({ unsigned long __kir; \ 24*4882a593Smuzhiyun __asm__("insbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \ 25*4882a593Smuzhiyun __kir; }) 26*4882a593Smuzhiyun # define __kernel_inswl(val, shift) \ 27*4882a593Smuzhiyun ({ unsigned long __kir; \ 28*4882a593Smuzhiyun __asm__("inswl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \ 29*4882a593Smuzhiyun __kir; }) 30*4882a593Smuzhiyun # define __kernel_insql(val, shift) \ 31*4882a593Smuzhiyun ({ unsigned long __kir; \ 32*4882a593Smuzhiyun __asm__("insql %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \ 33*4882a593Smuzhiyun __kir; }) 34*4882a593Smuzhiyun # define __kernel_inslh(val, shift) \ 35*4882a593Smuzhiyun ({ unsigned long __kir; \ 36*4882a593Smuzhiyun __asm__("inslh %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \ 37*4882a593Smuzhiyun __kir; }) 38*4882a593Smuzhiyun # define __kernel_extbl(val, shift) \ 39*4882a593Smuzhiyun ({ unsigned long __kir; \ 40*4882a593Smuzhiyun __asm__("extbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \ 41*4882a593Smuzhiyun __kir; }) 42*4882a593Smuzhiyun # define __kernel_extwl(val, shift) \ 43*4882a593Smuzhiyun ({ unsigned long __kir; \ 44*4882a593Smuzhiyun __asm__("extwl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \ 45*4882a593Smuzhiyun __kir; }) 46*4882a593Smuzhiyun # define __kernel_cmpbge(a, b) \ 47*4882a593Smuzhiyun ({ unsigned long __kir; \ 48*4882a593Smuzhiyun __asm__("cmpbge %r2,%1,%0" : "=r"(__kir) : "rI"(b), "rJ"(a)); \ 49*4882a593Smuzhiyun __kir; }) 50*4882a593Smuzhiyun #endif 51*4882a593Smuzhiyun 52*4882a593Smuzhiyun #ifdef __alpha_cix__ 53*4882a593Smuzhiyun # if __GNUC__ == 3 && __GNUC_MINOR__ >= 4 || __GNUC__ > 3 54*4882a593Smuzhiyun # define __kernel_cttz(x) __builtin_ctzl(x) 55*4882a593Smuzhiyun # define __kernel_ctlz(x) __builtin_clzl(x) 56*4882a593Smuzhiyun # define __kernel_ctpop(x) __builtin_popcountl(x) 57*4882a593Smuzhiyun # else 58*4882a593Smuzhiyun # define __kernel_cttz(x) \ 59*4882a593Smuzhiyun ({ unsigned long __kir; \ 60*4882a593Smuzhiyun __asm__("cttz %1,%0" : "=r"(__kir) : "r"(x)); \ 61*4882a593Smuzhiyun __kir; }) 62*4882a593Smuzhiyun # define __kernel_ctlz(x) \ 63*4882a593Smuzhiyun ({ unsigned long __kir; \ 64*4882a593Smuzhiyun __asm__("ctlz %1,%0" : "=r"(__kir) : "r"(x)); \ 65*4882a593Smuzhiyun __kir; }) 66*4882a593Smuzhiyun # define __kernel_ctpop(x) \ 67*4882a593Smuzhiyun ({ unsigned long __kir; \ 68*4882a593Smuzhiyun __asm__("ctpop %1,%0" : "=r"(__kir) : "r"(x)); \ 69*4882a593Smuzhiyun __kir; }) 70*4882a593Smuzhiyun # endif 71*4882a593Smuzhiyun #else 72*4882a593Smuzhiyun # define __kernel_cttz(x) \ 73*4882a593Smuzhiyun ({ unsigned long __kir; \ 74*4882a593Smuzhiyun __asm__(".arch ev67; cttz %1,%0" : "=r"(__kir) : "r"(x)); \ 75*4882a593Smuzhiyun __kir; }) 76*4882a593Smuzhiyun # define __kernel_ctlz(x) \ 77*4882a593Smuzhiyun ({ unsigned long __kir; \ 78*4882a593Smuzhiyun __asm__(".arch ev67; ctlz %1,%0" : "=r"(__kir) : "r"(x)); \ 79*4882a593Smuzhiyun __kir; }) 80*4882a593Smuzhiyun # define __kernel_ctpop(x) \ 81*4882a593Smuzhiyun ({ unsigned long __kir; \ 82*4882a593Smuzhiyun __asm__(".arch ev67; ctpop %1,%0" : "=r"(__kir) : "r"(x)); \ 83*4882a593Smuzhiyun __kir; }) 84*4882a593Smuzhiyun #endif 85*4882a593Smuzhiyun 86*4882a593Smuzhiyun 87*4882a593Smuzhiyun /* 88*4882a593Smuzhiyun * Beginning with EGCS 1.1, GCC defines __alpha_bwx__ when the BWX 89*4882a593Smuzhiyun * extension is enabled. Previous versions did not define anything 90*4882a593Smuzhiyun * we could test during compilation -- too bad, so sad. 91*4882a593Smuzhiyun */ 92*4882a593Smuzhiyun 93*4882a593Smuzhiyun #if defined(__alpha_bwx__) 94*4882a593Smuzhiyun #define __kernel_ldbu(mem) (mem) 95*4882a593Smuzhiyun #define __kernel_ldwu(mem) (mem) 96*4882a593Smuzhiyun #define __kernel_stb(val,mem) ((mem) = (val)) 97*4882a593Smuzhiyun #define __kernel_stw(val,mem) ((mem) = (val)) 98*4882a593Smuzhiyun #else 99*4882a593Smuzhiyun #define __kernel_ldbu(mem) \ 100*4882a593Smuzhiyun ({ unsigned char __kir; \ 101*4882a593Smuzhiyun __asm__(".arch ev56; \ 102*4882a593Smuzhiyun ldbu %0,%1" : "=r"(__kir) : "m"(mem)); \ 103*4882a593Smuzhiyun __kir; }) 104*4882a593Smuzhiyun #define __kernel_ldwu(mem) \ 105*4882a593Smuzhiyun ({ unsigned short __kir; \ 106*4882a593Smuzhiyun __asm__(".arch ev56; \ 107*4882a593Smuzhiyun ldwu %0,%1" : "=r"(__kir) : "m"(mem)); \ 108*4882a593Smuzhiyun __kir; }) 109*4882a593Smuzhiyun #define __kernel_stb(val,mem) \ 110*4882a593Smuzhiyun __asm__(".arch ev56; \ 111*4882a593Smuzhiyun stb %1,%0" : "=m"(mem) : "r"(val)) 112*4882a593Smuzhiyun #define __kernel_stw(val,mem) \ 113*4882a593Smuzhiyun __asm__(".arch ev56; \ 114*4882a593Smuzhiyun stw %1,%0" : "=m"(mem) : "r"(val)) 115*4882a593Smuzhiyun #endif 116*4882a593Smuzhiyun 117*4882a593Smuzhiyun 118*4882a593Smuzhiyun #endif /* _UAPI__ALPHA_COMPILER_H */ 119