1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * This file is subject to the terms and conditions of the GNU General Public
3*4882a593Smuzhiyun * License. See the file "COPYING" in the main directory of this archive
4*4882a593Smuzhiyun * for more details.
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org)
7*4882a593Smuzhiyun * Copyright (c) 1999, 2000 Silicon Graphics, Inc.
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun #ifndef _ASM_BITOPS_H
10*4882a593Smuzhiyun #define _ASM_BITOPS_H
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #ifndef _LINUX_BITOPS_H
13*4882a593Smuzhiyun #error only <linux/bitops.h> can be included directly
14*4882a593Smuzhiyun #endif
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun #include <linux/bits.h>
17*4882a593Smuzhiyun #include <linux/compiler.h>
18*4882a593Smuzhiyun #include <linux/types.h>
19*4882a593Smuzhiyun #include <asm/barrier.h>
20*4882a593Smuzhiyun #include <asm/byteorder.h> /* sigh ... */
21*4882a593Smuzhiyun #include <asm/compiler.h>
22*4882a593Smuzhiyun #include <asm/cpu-features.h>
23*4882a593Smuzhiyun #include <asm/isa-rev.h>
24*4882a593Smuzhiyun #include <asm/llsc.h>
25*4882a593Smuzhiyun #include <asm/sgidefs.h>
26*4882a593Smuzhiyun #include <asm/war.h>
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun #define __bit_op(mem, insn, inputs...) do { \
29*4882a593Smuzhiyun unsigned long temp; \
30*4882a593Smuzhiyun \
31*4882a593Smuzhiyun asm volatile( \
32*4882a593Smuzhiyun " .set push \n" \
33*4882a593Smuzhiyun " .set " MIPS_ISA_LEVEL " \n" \
34*4882a593Smuzhiyun " " __SYNC(full, loongson3_war) " \n" \
35*4882a593Smuzhiyun "1: " __LL "%0, %1 \n" \
36*4882a593Smuzhiyun " " insn " \n" \
37*4882a593Smuzhiyun " " __SC "%0, %1 \n" \
38*4882a593Smuzhiyun " " __SC_BEQZ "%0, 1b \n" \
39*4882a593Smuzhiyun " .set pop \n" \
40*4882a593Smuzhiyun : "=&r"(temp), "+" GCC_OFF_SMALL_ASM()(mem) \
41*4882a593Smuzhiyun : inputs \
42*4882a593Smuzhiyun : __LLSC_CLOBBER); \
43*4882a593Smuzhiyun } while (0)
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun #define __test_bit_op(mem, ll_dst, insn, inputs...) ({ \
46*4882a593Smuzhiyun unsigned long orig, temp; \
47*4882a593Smuzhiyun \
48*4882a593Smuzhiyun asm volatile( \
49*4882a593Smuzhiyun " .set push \n" \
50*4882a593Smuzhiyun " .set " MIPS_ISA_LEVEL " \n" \
51*4882a593Smuzhiyun " " __SYNC(full, loongson3_war) " \n" \
52*4882a593Smuzhiyun "1: " __LL ll_dst ", %2 \n" \
53*4882a593Smuzhiyun " " insn " \n" \
54*4882a593Smuzhiyun " " __SC "%1, %2 \n" \
55*4882a593Smuzhiyun " " __SC_BEQZ "%1, 1b \n" \
56*4882a593Smuzhiyun " .set pop \n" \
57*4882a593Smuzhiyun : "=&r"(orig), "=&r"(temp), \
58*4882a593Smuzhiyun "+" GCC_OFF_SMALL_ASM()(mem) \
59*4882a593Smuzhiyun : inputs \
60*4882a593Smuzhiyun : __LLSC_CLOBBER); \
61*4882a593Smuzhiyun \
62*4882a593Smuzhiyun orig; \
63*4882a593Smuzhiyun })
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun /*
66*4882a593Smuzhiyun * These are the "slower" versions of the functions and are in bitops.c.
67*4882a593Smuzhiyun * These functions call raw_local_irq_{save,restore}().
68*4882a593Smuzhiyun */
69*4882a593Smuzhiyun void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
70*4882a593Smuzhiyun void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
71*4882a593Smuzhiyun void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
72*4882a593Smuzhiyun int __mips_test_and_set_bit_lock(unsigned long nr,
73*4882a593Smuzhiyun volatile unsigned long *addr);
74*4882a593Smuzhiyun int __mips_test_and_clear_bit(unsigned long nr,
75*4882a593Smuzhiyun volatile unsigned long *addr);
76*4882a593Smuzhiyun int __mips_test_and_change_bit(unsigned long nr,
77*4882a593Smuzhiyun volatile unsigned long *addr);
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun /*
81*4882a593Smuzhiyun * set_bit - Atomically set a bit in memory
82*4882a593Smuzhiyun * @nr: the bit to set
83*4882a593Smuzhiyun * @addr: the address to start counting from
84*4882a593Smuzhiyun *
85*4882a593Smuzhiyun * This function is atomic and may not be reordered. See __set_bit()
86*4882a593Smuzhiyun * if you do not require the atomic guarantees.
87*4882a593Smuzhiyun * Note that @nr may be almost arbitrarily large; this function is not
88*4882a593Smuzhiyun * restricted to acting on a single-word quantity.
89*4882a593Smuzhiyun */
set_bit(unsigned long nr,volatile unsigned long * addr)90*4882a593Smuzhiyun static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun volatile unsigned long *m = &addr[BIT_WORD(nr)];
93*4882a593Smuzhiyun int bit = nr % BITS_PER_LONG;
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun if (!kernel_uses_llsc) {
96*4882a593Smuzhiyun __mips_set_bit(nr, addr);
97*4882a593Smuzhiyun return;
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
101*4882a593Smuzhiyun __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0));
102*4882a593Smuzhiyun return;
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit)));
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun /*
109*4882a593Smuzhiyun * clear_bit - Clears a bit in memory
110*4882a593Smuzhiyun * @nr: Bit to clear
111*4882a593Smuzhiyun * @addr: Address to start counting from
112*4882a593Smuzhiyun *
113*4882a593Smuzhiyun * clear_bit() is atomic and may not be reordered. However, it does
114*4882a593Smuzhiyun * not contain a memory barrier, so if it is used for locking purposes,
115*4882a593Smuzhiyun * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
116*4882a593Smuzhiyun * in order to ensure changes are visible on other processors.
117*4882a593Smuzhiyun */
clear_bit(unsigned long nr,volatile unsigned long * addr)118*4882a593Smuzhiyun static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
119*4882a593Smuzhiyun {
120*4882a593Smuzhiyun volatile unsigned long *m = &addr[BIT_WORD(nr)];
121*4882a593Smuzhiyun int bit = nr % BITS_PER_LONG;
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun if (!kernel_uses_llsc) {
124*4882a593Smuzhiyun __mips_clear_bit(nr, addr);
125*4882a593Smuzhiyun return;
126*4882a593Smuzhiyun }
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
129*4882a593Smuzhiyun __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit));
130*4882a593Smuzhiyun return;
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun
133*4882a593Smuzhiyun __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit)));
134*4882a593Smuzhiyun }
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun /*
137*4882a593Smuzhiyun * clear_bit_unlock - Clears a bit in memory
138*4882a593Smuzhiyun * @nr: Bit to clear
139*4882a593Smuzhiyun * @addr: Address to start counting from
140*4882a593Smuzhiyun *
141*4882a593Smuzhiyun * clear_bit() is atomic and implies release semantics before the memory
142*4882a593Smuzhiyun * operation. It can be used for an unlock.
143*4882a593Smuzhiyun */
clear_bit_unlock(unsigned long nr,volatile unsigned long * addr)144*4882a593Smuzhiyun static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
145*4882a593Smuzhiyun {
146*4882a593Smuzhiyun smp_mb__before_atomic();
147*4882a593Smuzhiyun clear_bit(nr, addr);
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun
150*4882a593Smuzhiyun /*
151*4882a593Smuzhiyun * change_bit - Toggle a bit in memory
152*4882a593Smuzhiyun * @nr: Bit to change
153*4882a593Smuzhiyun * @addr: Address to start counting from
154*4882a593Smuzhiyun *
155*4882a593Smuzhiyun * change_bit() is atomic and may not be reordered.
156*4882a593Smuzhiyun * Note that @nr may be almost arbitrarily large; this function is not
157*4882a593Smuzhiyun * restricted to acting on a single-word quantity.
158*4882a593Smuzhiyun */
change_bit(unsigned long nr,volatile unsigned long * addr)159*4882a593Smuzhiyun static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
160*4882a593Smuzhiyun {
161*4882a593Smuzhiyun volatile unsigned long *m = &addr[BIT_WORD(nr)];
162*4882a593Smuzhiyun int bit = nr % BITS_PER_LONG;
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun if (!kernel_uses_llsc) {
165*4882a593Smuzhiyun __mips_change_bit(nr, addr);
166*4882a593Smuzhiyun return;
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit)));
170*4882a593Smuzhiyun }
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun /*
173*4882a593Smuzhiyun * test_and_set_bit_lock - Set a bit and return its old value
174*4882a593Smuzhiyun * @nr: Bit to set
175*4882a593Smuzhiyun * @addr: Address to count from
176*4882a593Smuzhiyun *
177*4882a593Smuzhiyun * This operation is atomic and implies acquire ordering semantics
178*4882a593Smuzhiyun * after the memory operation.
179*4882a593Smuzhiyun */
test_and_set_bit_lock(unsigned long nr,volatile unsigned long * addr)180*4882a593Smuzhiyun static inline int test_and_set_bit_lock(unsigned long nr,
181*4882a593Smuzhiyun volatile unsigned long *addr)
182*4882a593Smuzhiyun {
183*4882a593Smuzhiyun volatile unsigned long *m = &addr[BIT_WORD(nr)];
184*4882a593Smuzhiyun int bit = nr % BITS_PER_LONG;
185*4882a593Smuzhiyun unsigned long res, orig;
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun if (!kernel_uses_llsc) {
188*4882a593Smuzhiyun res = __mips_test_and_set_bit_lock(nr, addr);
189*4882a593Smuzhiyun } else {
190*4882a593Smuzhiyun orig = __test_bit_op(*m, "%0",
191*4882a593Smuzhiyun "or\t%1, %0, %3",
192*4882a593Smuzhiyun "ir"(BIT(bit)));
193*4882a593Smuzhiyun res = (orig & BIT(bit)) != 0;
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun smp_llsc_mb();
197*4882a593Smuzhiyun
198*4882a593Smuzhiyun return res;
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun /*
202*4882a593Smuzhiyun * test_and_set_bit - Set a bit and return its old value
203*4882a593Smuzhiyun * @nr: Bit to set
204*4882a593Smuzhiyun * @addr: Address to count from
205*4882a593Smuzhiyun *
206*4882a593Smuzhiyun * This operation is atomic and cannot be reordered.
207*4882a593Smuzhiyun * It also implies a memory barrier.
208*4882a593Smuzhiyun */
test_and_set_bit(unsigned long nr,volatile unsigned long * addr)209*4882a593Smuzhiyun static inline int test_and_set_bit(unsigned long nr,
210*4882a593Smuzhiyun volatile unsigned long *addr)
211*4882a593Smuzhiyun {
212*4882a593Smuzhiyun smp_mb__before_atomic();
213*4882a593Smuzhiyun return test_and_set_bit_lock(nr, addr);
214*4882a593Smuzhiyun }
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun /*
217*4882a593Smuzhiyun * test_and_clear_bit - Clear a bit and return its old value
218*4882a593Smuzhiyun * @nr: Bit to clear
219*4882a593Smuzhiyun * @addr: Address to count from
220*4882a593Smuzhiyun *
221*4882a593Smuzhiyun * This operation is atomic and cannot be reordered.
222*4882a593Smuzhiyun * It also implies a memory barrier.
223*4882a593Smuzhiyun */
test_and_clear_bit(unsigned long nr,volatile unsigned long * addr)224*4882a593Smuzhiyun static inline int test_and_clear_bit(unsigned long nr,
225*4882a593Smuzhiyun volatile unsigned long *addr)
226*4882a593Smuzhiyun {
227*4882a593Smuzhiyun volatile unsigned long *m = &addr[BIT_WORD(nr)];
228*4882a593Smuzhiyun int bit = nr % BITS_PER_LONG;
229*4882a593Smuzhiyun unsigned long res, orig;
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun smp_mb__before_atomic();
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun if (!kernel_uses_llsc) {
234*4882a593Smuzhiyun res = __mips_test_and_clear_bit(nr, addr);
235*4882a593Smuzhiyun } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
236*4882a593Smuzhiyun res = __test_bit_op(*m, "%1",
237*4882a593Smuzhiyun __EXT "%0, %1, %3, 1;"
238*4882a593Smuzhiyun __INS "%1, $0, %3, 1",
239*4882a593Smuzhiyun "i"(bit));
240*4882a593Smuzhiyun } else {
241*4882a593Smuzhiyun orig = __test_bit_op(*m, "%0",
242*4882a593Smuzhiyun "or\t%1, %0, %3;"
243*4882a593Smuzhiyun "xor\t%1, %1, %3",
244*4882a593Smuzhiyun "ir"(BIT(bit)));
245*4882a593Smuzhiyun res = (orig & BIT(bit)) != 0;
246*4882a593Smuzhiyun }
247*4882a593Smuzhiyun
248*4882a593Smuzhiyun smp_llsc_mb();
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun return res;
251*4882a593Smuzhiyun }
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun /*
254*4882a593Smuzhiyun * test_and_change_bit - Change a bit and return its old value
255*4882a593Smuzhiyun * @nr: Bit to change
256*4882a593Smuzhiyun * @addr: Address to count from
257*4882a593Smuzhiyun *
258*4882a593Smuzhiyun * This operation is atomic and cannot be reordered.
259*4882a593Smuzhiyun * It also implies a memory barrier.
260*4882a593Smuzhiyun */
test_and_change_bit(unsigned long nr,volatile unsigned long * addr)261*4882a593Smuzhiyun static inline int test_and_change_bit(unsigned long nr,
262*4882a593Smuzhiyun volatile unsigned long *addr)
263*4882a593Smuzhiyun {
264*4882a593Smuzhiyun volatile unsigned long *m = &addr[BIT_WORD(nr)];
265*4882a593Smuzhiyun int bit = nr % BITS_PER_LONG;
266*4882a593Smuzhiyun unsigned long res, orig;
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun smp_mb__before_atomic();
269*4882a593Smuzhiyun
270*4882a593Smuzhiyun if (!kernel_uses_llsc) {
271*4882a593Smuzhiyun res = __mips_test_and_change_bit(nr, addr);
272*4882a593Smuzhiyun } else {
273*4882a593Smuzhiyun orig = __test_bit_op(*m, "%0",
274*4882a593Smuzhiyun "xor\t%1, %0, %3",
275*4882a593Smuzhiyun "ir"(BIT(bit)));
276*4882a593Smuzhiyun res = (orig & BIT(bit)) != 0;
277*4882a593Smuzhiyun }
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun smp_llsc_mb();
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun return res;
282*4882a593Smuzhiyun }
283*4882a593Smuzhiyun
284*4882a593Smuzhiyun #undef __bit_op
285*4882a593Smuzhiyun #undef __test_bit_op
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun #include <asm-generic/bitops/non-atomic.h>
288*4882a593Smuzhiyun
289*4882a593Smuzhiyun /*
290*4882a593Smuzhiyun * __clear_bit_unlock - Clears a bit in memory
291*4882a593Smuzhiyun * @nr: Bit to clear
292*4882a593Smuzhiyun * @addr: Address to start counting from
293*4882a593Smuzhiyun *
294*4882a593Smuzhiyun * __clear_bit() is non-atomic and implies release semantics before the memory
295*4882a593Smuzhiyun * operation. It can be used for an unlock if no other CPUs can concurrently
296*4882a593Smuzhiyun * modify other bits in the word.
297*4882a593Smuzhiyun */
__clear_bit_unlock(unsigned long nr,volatile unsigned long * addr)298*4882a593Smuzhiyun static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
299*4882a593Smuzhiyun {
300*4882a593Smuzhiyun smp_mb__before_llsc();
301*4882a593Smuzhiyun __clear_bit(nr, addr);
302*4882a593Smuzhiyun nudge_writes();
303*4882a593Smuzhiyun }
304*4882a593Smuzhiyun
305*4882a593Smuzhiyun /*
306*4882a593Smuzhiyun * Return the bit position (0..63) of the most significant 1 bit in a word
307*4882a593Smuzhiyun * Returns -1 if no 1 bit exists
308*4882a593Smuzhiyun */
__fls(unsigned long word)309*4882a593Smuzhiyun static __always_inline unsigned long __fls(unsigned long word)
310*4882a593Smuzhiyun {
311*4882a593Smuzhiyun int num;
312*4882a593Smuzhiyun
313*4882a593Smuzhiyun if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
314*4882a593Smuzhiyun __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
315*4882a593Smuzhiyun __asm__(
316*4882a593Smuzhiyun " .set push \n"
317*4882a593Smuzhiyun " .set "MIPS_ISA_LEVEL" \n"
318*4882a593Smuzhiyun " clz %0, %1 \n"
319*4882a593Smuzhiyun " .set pop \n"
320*4882a593Smuzhiyun : "=r" (num)
321*4882a593Smuzhiyun : "r" (word));
322*4882a593Smuzhiyun
323*4882a593Smuzhiyun return 31 - num;
324*4882a593Smuzhiyun }
325*4882a593Smuzhiyun
326*4882a593Smuzhiyun if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
327*4882a593Smuzhiyun __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
328*4882a593Smuzhiyun __asm__(
329*4882a593Smuzhiyun " .set push \n"
330*4882a593Smuzhiyun " .set "MIPS_ISA_LEVEL" \n"
331*4882a593Smuzhiyun " dclz %0, %1 \n"
332*4882a593Smuzhiyun " .set pop \n"
333*4882a593Smuzhiyun : "=r" (num)
334*4882a593Smuzhiyun : "r" (word));
335*4882a593Smuzhiyun
336*4882a593Smuzhiyun return 63 - num;
337*4882a593Smuzhiyun }
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun num = BITS_PER_LONG - 1;
340*4882a593Smuzhiyun
341*4882a593Smuzhiyun #if BITS_PER_LONG == 64
342*4882a593Smuzhiyun if (!(word & (~0ul << 32))) {
343*4882a593Smuzhiyun num -= 32;
344*4882a593Smuzhiyun word <<= 32;
345*4882a593Smuzhiyun }
346*4882a593Smuzhiyun #endif
347*4882a593Smuzhiyun if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
348*4882a593Smuzhiyun num -= 16;
349*4882a593Smuzhiyun word <<= 16;
350*4882a593Smuzhiyun }
351*4882a593Smuzhiyun if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
352*4882a593Smuzhiyun num -= 8;
353*4882a593Smuzhiyun word <<= 8;
354*4882a593Smuzhiyun }
355*4882a593Smuzhiyun if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
356*4882a593Smuzhiyun num -= 4;
357*4882a593Smuzhiyun word <<= 4;
358*4882a593Smuzhiyun }
359*4882a593Smuzhiyun if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
360*4882a593Smuzhiyun num -= 2;
361*4882a593Smuzhiyun word <<= 2;
362*4882a593Smuzhiyun }
363*4882a593Smuzhiyun if (!(word & (~0ul << (BITS_PER_LONG-1))))
364*4882a593Smuzhiyun num -= 1;
365*4882a593Smuzhiyun return num;
366*4882a593Smuzhiyun }
367*4882a593Smuzhiyun
368*4882a593Smuzhiyun /*
369*4882a593Smuzhiyun * __ffs - find first bit in word.
370*4882a593Smuzhiyun * @word: The word to search
371*4882a593Smuzhiyun *
372*4882a593Smuzhiyun * Returns 0..SZLONG-1
373*4882a593Smuzhiyun * Undefined if no bit exists, so code should check against 0 first.
374*4882a593Smuzhiyun */
__ffs(unsigned long word)375*4882a593Smuzhiyun static __always_inline unsigned long __ffs(unsigned long word)
376*4882a593Smuzhiyun {
377*4882a593Smuzhiyun return __fls(word & -word);
378*4882a593Smuzhiyun }
379*4882a593Smuzhiyun
380*4882a593Smuzhiyun /*
381*4882a593Smuzhiyun * fls - find last bit set.
382*4882a593Smuzhiyun * @word: The word to search
383*4882a593Smuzhiyun *
384*4882a593Smuzhiyun * This is defined the same way as ffs.
385*4882a593Smuzhiyun * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
386*4882a593Smuzhiyun */
fls(unsigned int x)387*4882a593Smuzhiyun static inline int fls(unsigned int x)
388*4882a593Smuzhiyun {
389*4882a593Smuzhiyun int r;
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun if (!__builtin_constant_p(x) &&
392*4882a593Smuzhiyun __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
393*4882a593Smuzhiyun __asm__(
394*4882a593Smuzhiyun " .set push \n"
395*4882a593Smuzhiyun " .set "MIPS_ISA_LEVEL" \n"
396*4882a593Smuzhiyun " clz %0, %1 \n"
397*4882a593Smuzhiyun " .set pop \n"
398*4882a593Smuzhiyun : "=r" (x)
399*4882a593Smuzhiyun : "r" (x));
400*4882a593Smuzhiyun
401*4882a593Smuzhiyun return 32 - x;
402*4882a593Smuzhiyun }
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun r = 32;
405*4882a593Smuzhiyun if (!x)
406*4882a593Smuzhiyun return 0;
407*4882a593Smuzhiyun if (!(x & 0xffff0000u)) {
408*4882a593Smuzhiyun x <<= 16;
409*4882a593Smuzhiyun r -= 16;
410*4882a593Smuzhiyun }
411*4882a593Smuzhiyun if (!(x & 0xff000000u)) {
412*4882a593Smuzhiyun x <<= 8;
413*4882a593Smuzhiyun r -= 8;
414*4882a593Smuzhiyun }
415*4882a593Smuzhiyun if (!(x & 0xf0000000u)) {
416*4882a593Smuzhiyun x <<= 4;
417*4882a593Smuzhiyun r -= 4;
418*4882a593Smuzhiyun }
419*4882a593Smuzhiyun if (!(x & 0xc0000000u)) {
420*4882a593Smuzhiyun x <<= 2;
421*4882a593Smuzhiyun r -= 2;
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun if (!(x & 0x80000000u)) {
424*4882a593Smuzhiyun x <<= 1;
425*4882a593Smuzhiyun r -= 1;
426*4882a593Smuzhiyun }
427*4882a593Smuzhiyun return r;
428*4882a593Smuzhiyun }
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun #include <asm-generic/bitops/fls64.h>
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun /*
433*4882a593Smuzhiyun * ffs - find first bit set.
434*4882a593Smuzhiyun * @word: The word to search
435*4882a593Smuzhiyun *
436*4882a593Smuzhiyun * This is defined the same way as
437*4882a593Smuzhiyun * the libc and compiler builtin ffs routines, therefore
438*4882a593Smuzhiyun * differs in spirit from the above ffz (man ffs).
439*4882a593Smuzhiyun */
ffs(int word)440*4882a593Smuzhiyun static inline int ffs(int word)
441*4882a593Smuzhiyun {
442*4882a593Smuzhiyun if (!word)
443*4882a593Smuzhiyun return 0;
444*4882a593Smuzhiyun
445*4882a593Smuzhiyun return fls(word & -word);
446*4882a593Smuzhiyun }
447*4882a593Smuzhiyun
448*4882a593Smuzhiyun #include <asm-generic/bitops/ffz.h>
449*4882a593Smuzhiyun #include <asm-generic/bitops/find.h>
450*4882a593Smuzhiyun
451*4882a593Smuzhiyun #ifdef __KERNEL__
452*4882a593Smuzhiyun
453*4882a593Smuzhiyun #include <asm-generic/bitops/sched.h>
454*4882a593Smuzhiyun
455*4882a593Smuzhiyun #include <asm/arch_hweight.h>
456*4882a593Smuzhiyun #include <asm-generic/bitops/const_hweight.h>
457*4882a593Smuzhiyun
458*4882a593Smuzhiyun #include <asm-generic/bitops/le.h>
459*4882a593Smuzhiyun #include <asm-generic/bitops/ext2-atomic.h>
460*4882a593Smuzhiyun
461*4882a593Smuzhiyun #endif /* __KERNEL__ */
462*4882a593Smuzhiyun
463*4882a593Smuzhiyun #endif /* _ASM_BITOPS_H */
464