1*c978b524SChris Zankel /*
2*c978b524SChris Zankel * Based on Linux/Xtensa kernel version
3*c978b524SChris Zankel *
4*c978b524SChris Zankel * Copyright (C) 2001 - 2007 Tensilica Inc.
5*c978b524SChris Zankel *
6*c978b524SChris Zankel * SPDX-License-Identifier: GPL-2.0+
7*c978b524SChris Zankel */
8*c978b524SChris Zankel
9*c978b524SChris Zankel #ifndef _XTENSA_BYTEORDER_H
10*c978b524SChris Zankel #define _XTENSA_BYTEORDER_H
11*c978b524SChris Zankel
12*c978b524SChris Zankel #include <asm/types.h>
13*c978b524SChris Zankel
___arch__swab32(__u32 x)14*c978b524SChris Zankel static inline __attribute__((const)) __u32 ___arch__swab32(__u32 x)
15*c978b524SChris Zankel {
16*c978b524SChris Zankel __u32 res;
17*c978b524SChris Zankel
18*c978b524SChris Zankel /* instruction sequence from Xtensa ISA release 2/2000 */
19*c978b524SChris Zankel __asm__("ssai 8\n\t"
20*c978b524SChris Zankel "srli %0, %1, 16\n\t"
21*c978b524SChris Zankel "src %0, %0, %1\n\t"
22*c978b524SChris Zankel "src %0, %0, %0\n\t"
23*c978b524SChris Zankel "src %0, %1, %0\n"
24*c978b524SChris Zankel : "=&a" (res)
25*c978b524SChris Zankel : "a" (x)
26*c978b524SChris Zankel );
27*c978b524SChris Zankel return res;
28*c978b524SChris Zankel }
29*c978b524SChris Zankel
___arch__swab16(__u16 x)30*c978b524SChris Zankel static inline __attribute__((const)) __u16 ___arch__swab16(__u16 x)
31*c978b524SChris Zankel {
32*c978b524SChris Zankel /*
33*c978b524SChris Zankel * Given that 'short' values are signed (i.e., can be negative),
34*c978b524SChris Zankel * we cannot assume that the upper 16-bits of the register are
35*c978b524SChris Zankel * zero. We are careful to mask values after shifting.
36*c978b524SChris Zankel */
37*c978b524SChris Zankel
38*c978b524SChris Zankel /*
39*c978b524SChris Zankel * There exists an anomaly between xt-gcc and xt-xcc. xt-gcc
40*c978b524SChris Zankel * inserts an extui instruction after putting this function inline
41*c978b524SChris Zankel * to ensure that it uses only the least-significant 16 bits of
42*c978b524SChris Zankel * the result. xt-xcc doesn't use an extui, but assumes the
43*c978b524SChris Zankel * __asm__ macro follows convention that the upper 16 bits of an
44*c978b524SChris Zankel * 'unsigned short' result are still zero. This macro doesn't
45*c978b524SChris Zankel * follow convention; indeed, it leaves garbage in the upport 16
46*c978b524SChris Zankel * bits of the register.
47*c978b524SChris Zankel *
48*c978b524SChris Zankel * Declaring the temporary variables 'res' and 'tmp' to be 32-bit
49*c978b524SChris Zankel * types while the return type of the function is a 16-bit type
50*c978b524SChris Zankel * forces both compilers to insert exactly one extui instruction
51*c978b524SChris Zankel * (or equivalent) to mask off the upper 16 bits.
52*c978b524SChris Zankel */
53*c978b524SChris Zankel
54*c978b524SChris Zankel __u32 res;
55*c978b524SChris Zankel __u32 tmp;
56*c978b524SChris Zankel
57*c978b524SChris Zankel __asm__("extui %1, %2, 8, 8\n\t"
58*c978b524SChris Zankel "slli %0, %2, 8\n\t"
59*c978b524SChris Zankel "or %0, %0, %1\n"
60*c978b524SChris Zankel : "=&a" (res), "=&a" (tmp)
61*c978b524SChris Zankel : "a" (x)
62*c978b524SChris Zankel );
63*c978b524SChris Zankel
64*c978b524SChris Zankel return res;
65*c978b524SChris Zankel }
66*c978b524SChris Zankel
67*c978b524SChris Zankel #define __arch__swab32(x) ___arch__swab32(x)
68*c978b524SChris Zankel #define __arch__swab16(x) ___arch__swab16(x)
69*c978b524SChris Zankel
70*c978b524SChris Zankel #if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
71*c978b524SChris Zankel # define __BYTEORDER_HAS_U64__
72*c978b524SChris Zankel # define __SWAB_64_THRU_32__
73*c978b524SChris Zankel #endif
74*c978b524SChris Zankel
75*c978b524SChris Zankel #ifdef __XTENSA_EL__
76*c978b524SChris Zankel # include <linux/byteorder/little_endian.h>
77*c978b524SChris Zankel #elif defined(__XTENSA_EB__)
78*c978b524SChris Zankel # include <linux/byteorder/big_endian.h>
79*c978b524SChris Zankel #else
80*c978b524SChris Zankel # error processor byte order undefined!
81*c978b524SChris Zankel #endif
82*c978b524SChris Zankel
83*c978b524SChris Zankel #endif /* _XTENSA_BYTEORDER_H */
84