1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * include/asm-xtensa/bitops.h
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * This file is subject to the terms and conditions of the GNU General Public
7*4882a593Smuzhiyun * License. See the file "COPYING" in the main directory of this archive
8*4882a593Smuzhiyun * for more details.
9*4882a593Smuzhiyun *
10*4882a593Smuzhiyun * Copyright (C) 2001 - 2007 Tensilica Inc.
11*4882a593Smuzhiyun */
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun #ifndef _XTENSA_BITOPS_H
14*4882a593Smuzhiyun #define _XTENSA_BITOPS_H
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun #ifndef _LINUX_BITOPS_H
17*4882a593Smuzhiyun #error only <linux/bitops.h> can be included directly
18*4882a593Smuzhiyun #endif
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun #include <asm/processor.h>
21*4882a593Smuzhiyun #include <asm/byteorder.h>
22*4882a593Smuzhiyun #include <asm/barrier.h>
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun #include <asm-generic/bitops/non-atomic.h>
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun #if XCHAL_HAVE_NSA
27*4882a593Smuzhiyun
__cntlz(unsigned long x)28*4882a593Smuzhiyun static inline unsigned long __cntlz (unsigned long x)
29*4882a593Smuzhiyun {
30*4882a593Smuzhiyun int lz;
31*4882a593Smuzhiyun asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
32*4882a593Smuzhiyun return lz;
33*4882a593Smuzhiyun }
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun /*
36*4882a593Smuzhiyun * ffz: Find first zero in word. Undefined if no zero exists.
37*4882a593Smuzhiyun * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
38*4882a593Smuzhiyun */
39*4882a593Smuzhiyun
ffz(unsigned long x)40*4882a593Smuzhiyun static inline int ffz(unsigned long x)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun return 31 - __cntlz(~x & -~x);
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun /*
46*4882a593Smuzhiyun * __ffs: Find first bit set in word. Return 0 for bit 0
47*4882a593Smuzhiyun */
48*4882a593Smuzhiyun
__ffs(unsigned long x)49*4882a593Smuzhiyun static inline unsigned long __ffs(unsigned long x)
50*4882a593Smuzhiyun {
51*4882a593Smuzhiyun return 31 - __cntlz(x & -x);
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun /*
55*4882a593Smuzhiyun * ffs: Find first bit set in word. This is defined the same way as
56*4882a593Smuzhiyun * the libc and compiler builtin ffs routines, therefore
57*4882a593Smuzhiyun * differs in spirit from the above ffz (man ffs).
58*4882a593Smuzhiyun */
59*4882a593Smuzhiyun
ffs(unsigned long x)60*4882a593Smuzhiyun static inline int ffs(unsigned long x)
61*4882a593Smuzhiyun {
62*4882a593Smuzhiyun return 32 - __cntlz(x & -x);
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun /*
66*4882a593Smuzhiyun * fls: Find last (most-significant) bit set in word.
67*4882a593Smuzhiyun * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
68*4882a593Smuzhiyun */
69*4882a593Smuzhiyun
fls(unsigned int x)70*4882a593Smuzhiyun static inline int fls (unsigned int x)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun return 32 - __cntlz(x);
73*4882a593Smuzhiyun }
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun /**
76*4882a593Smuzhiyun * __fls - find last (most-significant) set bit in a long word
77*4882a593Smuzhiyun * @word: the word to search
78*4882a593Smuzhiyun *
79*4882a593Smuzhiyun * Undefined if no set bit exists, so code should check against 0 first.
80*4882a593Smuzhiyun */
__fls(unsigned long word)81*4882a593Smuzhiyun static inline unsigned long __fls(unsigned long word)
82*4882a593Smuzhiyun {
83*4882a593Smuzhiyun return 31 - __cntlz(word);
84*4882a593Smuzhiyun }
85*4882a593Smuzhiyun #else
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun /* Use the generic implementation if we don't have the nsa/nsau instructions. */
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun # include <asm-generic/bitops/ffs.h>
90*4882a593Smuzhiyun # include <asm-generic/bitops/__ffs.h>
91*4882a593Smuzhiyun # include <asm-generic/bitops/ffz.h>
92*4882a593Smuzhiyun # include <asm-generic/bitops/fls.h>
93*4882a593Smuzhiyun # include <asm-generic/bitops/__fls.h>
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun #endif
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun #include <asm-generic/bitops/fls64.h>
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun #if XCHAL_HAVE_EXCLUSIVE
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun #define BIT_OP(op, insn, inv) \
102*4882a593Smuzhiyun static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
103*4882a593Smuzhiyun { \
104*4882a593Smuzhiyun unsigned long tmp; \
105*4882a593Smuzhiyun unsigned long mask = 1UL << (bit & 31); \
106*4882a593Smuzhiyun \
107*4882a593Smuzhiyun p += bit >> 5; \
108*4882a593Smuzhiyun \
109*4882a593Smuzhiyun __asm__ __volatile__( \
110*4882a593Smuzhiyun "1: l32ex %[tmp], %[addr]\n" \
111*4882a593Smuzhiyun " "insn" %[tmp], %[tmp], %[mask]\n" \
112*4882a593Smuzhiyun " s32ex %[tmp], %[addr]\n" \
113*4882a593Smuzhiyun " getex %[tmp]\n" \
114*4882a593Smuzhiyun " beqz %[tmp], 1b\n" \
115*4882a593Smuzhiyun : [tmp] "=&a" (tmp) \
116*4882a593Smuzhiyun : [mask] "a" (inv mask), [addr] "a" (p) \
117*4882a593Smuzhiyun : "memory"); \
118*4882a593Smuzhiyun }
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun #define TEST_AND_BIT_OP(op, insn, inv) \
121*4882a593Smuzhiyun static inline int \
122*4882a593Smuzhiyun test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
123*4882a593Smuzhiyun { \
124*4882a593Smuzhiyun unsigned long tmp, value; \
125*4882a593Smuzhiyun unsigned long mask = 1UL << (bit & 31); \
126*4882a593Smuzhiyun \
127*4882a593Smuzhiyun p += bit >> 5; \
128*4882a593Smuzhiyun \
129*4882a593Smuzhiyun __asm__ __volatile__( \
130*4882a593Smuzhiyun "1: l32ex %[value], %[addr]\n" \
131*4882a593Smuzhiyun " "insn" %[tmp], %[value], %[mask]\n" \
132*4882a593Smuzhiyun " s32ex %[tmp], %[addr]\n" \
133*4882a593Smuzhiyun " getex %[tmp]\n" \
134*4882a593Smuzhiyun " beqz %[tmp], 1b\n" \
135*4882a593Smuzhiyun : [tmp] "=&a" (tmp), [value] "=&a" (value) \
136*4882a593Smuzhiyun : [mask] "a" (inv mask), [addr] "a" (p) \
137*4882a593Smuzhiyun : "memory"); \
138*4882a593Smuzhiyun \
139*4882a593Smuzhiyun return value & mask; \
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun #elif XCHAL_HAVE_S32C1I
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun #define BIT_OP(op, insn, inv) \
145*4882a593Smuzhiyun static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
146*4882a593Smuzhiyun { \
147*4882a593Smuzhiyun unsigned long tmp, value; \
148*4882a593Smuzhiyun unsigned long mask = 1UL << (bit & 31); \
149*4882a593Smuzhiyun \
150*4882a593Smuzhiyun p += bit >> 5; \
151*4882a593Smuzhiyun \
152*4882a593Smuzhiyun __asm__ __volatile__( \
153*4882a593Smuzhiyun "1: l32i %[value], %[mem]\n" \
154*4882a593Smuzhiyun " wsr %[value], scompare1\n" \
155*4882a593Smuzhiyun " "insn" %[tmp], %[value], %[mask]\n" \
156*4882a593Smuzhiyun " s32c1i %[tmp], %[mem]\n" \
157*4882a593Smuzhiyun " bne %[tmp], %[value], 1b\n" \
158*4882a593Smuzhiyun : [tmp] "=&a" (tmp), [value] "=&a" (value), \
159*4882a593Smuzhiyun [mem] "+m" (*p) \
160*4882a593Smuzhiyun : [mask] "a" (inv mask) \
161*4882a593Smuzhiyun : "memory"); \
162*4882a593Smuzhiyun }
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun #define TEST_AND_BIT_OP(op, insn, inv) \
165*4882a593Smuzhiyun static inline int \
166*4882a593Smuzhiyun test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
167*4882a593Smuzhiyun { \
168*4882a593Smuzhiyun unsigned long tmp, value; \
169*4882a593Smuzhiyun unsigned long mask = 1UL << (bit & 31); \
170*4882a593Smuzhiyun \
171*4882a593Smuzhiyun p += bit >> 5; \
172*4882a593Smuzhiyun \
173*4882a593Smuzhiyun __asm__ __volatile__( \
174*4882a593Smuzhiyun "1: l32i %[value], %[mem]\n" \
175*4882a593Smuzhiyun " wsr %[value], scompare1\n" \
176*4882a593Smuzhiyun " "insn" %[tmp], %[value], %[mask]\n" \
177*4882a593Smuzhiyun " s32c1i %[tmp], %[mem]\n" \
178*4882a593Smuzhiyun " bne %[tmp], %[value], 1b\n" \
179*4882a593Smuzhiyun : [tmp] "=&a" (tmp), [value] "=&a" (value), \
180*4882a593Smuzhiyun [mem] "+m" (*p) \
181*4882a593Smuzhiyun : [mask] "a" (inv mask) \
182*4882a593Smuzhiyun : "memory"); \
183*4882a593Smuzhiyun \
184*4882a593Smuzhiyun return tmp & mask; \
185*4882a593Smuzhiyun }
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun #else
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun #define BIT_OP(op, insn, inv)
190*4882a593Smuzhiyun #define TEST_AND_BIT_OP(op, insn, inv)
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun #include <asm-generic/bitops/atomic.h>
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun #endif /* XCHAL_HAVE_S32C1I */
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun #define BIT_OPS(op, insn, inv) \
197*4882a593Smuzhiyun BIT_OP(op, insn, inv) \
198*4882a593Smuzhiyun TEST_AND_BIT_OP(op, insn, inv)
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun BIT_OPS(set, "or", )
201*4882a593Smuzhiyun BIT_OPS(clear, "and", ~)
202*4882a593Smuzhiyun BIT_OPS(change, "xor", )
203*4882a593Smuzhiyun
204*4882a593Smuzhiyun #undef BIT_OPS
205*4882a593Smuzhiyun #undef BIT_OP
206*4882a593Smuzhiyun #undef TEST_AND_BIT_OP
207*4882a593Smuzhiyun
208*4882a593Smuzhiyun #include <asm-generic/bitops/find.h>
209*4882a593Smuzhiyun #include <asm-generic/bitops/le.h>
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun #include <asm-generic/bitops/ext2-atomic-setbit.h>
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun #include <asm-generic/bitops/hweight.h>
214*4882a593Smuzhiyun #include <asm-generic/bitops/lock.h>
215*4882a593Smuzhiyun #include <asm-generic/bitops/sched.h>
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun #endif /* _XTENSA_BITOPS_H */
218