1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright IBM Corp. 1999,2013
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * The description below was taken in large parts from the powerpc
8*4882a593Smuzhiyun * bitops header file:
9*4882a593Smuzhiyun * Within a word, bits are numbered LSB first. Lot's of places make
10*4882a593Smuzhiyun * this assumption by directly testing bits with (val & (1<<nr)).
11*4882a593Smuzhiyun * This can cause confusion for large (> 1 word) bitmaps on a
12*4882a593Smuzhiyun * big-endian system because, unlike little endian, the number of each
13*4882a593Smuzhiyun * bit depends on the word size.
14*4882a593Smuzhiyun *
15*4882a593Smuzhiyun * The bitop functions are defined to work on unsigned longs, so the bits
16*4882a593Smuzhiyun * end up numbered:
17*4882a593Smuzhiyun * |63..............0|127............64|191...........128|255...........192|
18*4882a593Smuzhiyun *
19*4882a593Smuzhiyun * We also have special functions which work with an MSB0 encoding.
20*4882a593Smuzhiyun * The bits are numbered:
21*4882a593Smuzhiyun * |0..............63|64............127|128...........191|192...........255|
22*4882a593Smuzhiyun *
23*4882a593Smuzhiyun * The main difference is that bit 0-63 in the bit number field needs to be
24*4882a593Smuzhiyun * reversed compared to the LSB0 encoded bit fields. This can be achieved by
25*4882a593Smuzhiyun * XOR with 0x3f.
26*4882a593Smuzhiyun *
27*4882a593Smuzhiyun */
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #ifndef _S390_BITOPS_H
30*4882a593Smuzhiyun #define _S390_BITOPS_H
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun #ifndef _LINUX_BITOPS_H
33*4882a593Smuzhiyun #error only <linux/bitops.h> can be included directly
34*4882a593Smuzhiyun #endif
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun #include <linux/typecheck.h>
37*4882a593Smuzhiyun #include <linux/compiler.h>
38*4882a593Smuzhiyun #include <linux/types.h>
39*4882a593Smuzhiyun #include <asm/atomic_ops.h>
40*4882a593Smuzhiyun #include <asm/barrier.h>
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun static inline unsigned long *
__bitops_word(unsigned long nr,volatile unsigned long * ptr)45*4882a593Smuzhiyun __bitops_word(unsigned long nr, volatile unsigned long *ptr)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun unsigned long addr;
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
50*4882a593Smuzhiyun return (unsigned long *)addr;
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun static inline unsigned char *
__bitops_byte(unsigned long nr,volatile unsigned long * ptr)54*4882a593Smuzhiyun __bitops_byte(unsigned long nr, volatile unsigned long *ptr)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun
arch_set_bit(unsigned long nr,volatile unsigned long * ptr)59*4882a593Smuzhiyun static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun unsigned long *addr = __bitops_word(nr, ptr);
62*4882a593Smuzhiyun unsigned long mask;
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
65*4882a593Smuzhiyun if (__builtin_constant_p(nr)) {
66*4882a593Smuzhiyun unsigned char *caddr = __bitops_byte(nr, ptr);
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun asm volatile(
69*4882a593Smuzhiyun "oi %0,%b1\n"
70*4882a593Smuzhiyun : "+Q" (*caddr)
71*4882a593Smuzhiyun : "i" (1 << (nr & 7))
72*4882a593Smuzhiyun : "cc", "memory");
73*4882a593Smuzhiyun return;
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun #endif
76*4882a593Smuzhiyun mask = 1UL << (nr & (BITS_PER_LONG - 1));
77*4882a593Smuzhiyun __atomic64_or(mask, (long *)addr);
78*4882a593Smuzhiyun }
79*4882a593Smuzhiyun
arch_clear_bit(unsigned long nr,volatile unsigned long * ptr)80*4882a593Smuzhiyun static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
81*4882a593Smuzhiyun {
82*4882a593Smuzhiyun unsigned long *addr = __bitops_word(nr, ptr);
83*4882a593Smuzhiyun unsigned long mask;
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
86*4882a593Smuzhiyun if (__builtin_constant_p(nr)) {
87*4882a593Smuzhiyun unsigned char *caddr = __bitops_byte(nr, ptr);
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun asm volatile(
90*4882a593Smuzhiyun "ni %0,%b1\n"
91*4882a593Smuzhiyun : "+Q" (*caddr)
92*4882a593Smuzhiyun : "i" (~(1 << (nr & 7)))
93*4882a593Smuzhiyun : "cc", "memory");
94*4882a593Smuzhiyun return;
95*4882a593Smuzhiyun }
96*4882a593Smuzhiyun #endif
97*4882a593Smuzhiyun mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
98*4882a593Smuzhiyun __atomic64_and(mask, (long *)addr);
99*4882a593Smuzhiyun }
100*4882a593Smuzhiyun
arch_change_bit(unsigned long nr,volatile unsigned long * ptr)101*4882a593Smuzhiyun static __always_inline void arch_change_bit(unsigned long nr,
102*4882a593Smuzhiyun volatile unsigned long *ptr)
103*4882a593Smuzhiyun {
104*4882a593Smuzhiyun unsigned long *addr = __bitops_word(nr, ptr);
105*4882a593Smuzhiyun unsigned long mask;
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
108*4882a593Smuzhiyun if (__builtin_constant_p(nr)) {
109*4882a593Smuzhiyun unsigned char *caddr = __bitops_byte(nr, ptr);
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun asm volatile(
112*4882a593Smuzhiyun "xi %0,%b1\n"
113*4882a593Smuzhiyun : "+Q" (*caddr)
114*4882a593Smuzhiyun : "i" (1 << (nr & 7))
115*4882a593Smuzhiyun : "cc", "memory");
116*4882a593Smuzhiyun return;
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun #endif
119*4882a593Smuzhiyun mask = 1UL << (nr & (BITS_PER_LONG - 1));
120*4882a593Smuzhiyun __atomic64_xor(mask, (long *)addr);
121*4882a593Smuzhiyun }
122*4882a593Smuzhiyun
arch_test_and_set_bit(unsigned long nr,volatile unsigned long * ptr)123*4882a593Smuzhiyun static inline bool arch_test_and_set_bit(unsigned long nr,
124*4882a593Smuzhiyun volatile unsigned long *ptr)
125*4882a593Smuzhiyun {
126*4882a593Smuzhiyun unsigned long *addr = __bitops_word(nr, ptr);
127*4882a593Smuzhiyun unsigned long old, mask;
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun mask = 1UL << (nr & (BITS_PER_LONG - 1));
130*4882a593Smuzhiyun old = __atomic64_or_barrier(mask, (long *)addr);
131*4882a593Smuzhiyun return (old & mask) != 0;
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun
arch_test_and_clear_bit(unsigned long nr,volatile unsigned long * ptr)134*4882a593Smuzhiyun static inline bool arch_test_and_clear_bit(unsigned long nr,
135*4882a593Smuzhiyun volatile unsigned long *ptr)
136*4882a593Smuzhiyun {
137*4882a593Smuzhiyun unsigned long *addr = __bitops_word(nr, ptr);
138*4882a593Smuzhiyun unsigned long old, mask;
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
141*4882a593Smuzhiyun old = __atomic64_and_barrier(mask, (long *)addr);
142*4882a593Smuzhiyun return (old & ~mask) != 0;
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun
arch_test_and_change_bit(unsigned long nr,volatile unsigned long * ptr)145*4882a593Smuzhiyun static inline bool arch_test_and_change_bit(unsigned long nr,
146*4882a593Smuzhiyun volatile unsigned long *ptr)
147*4882a593Smuzhiyun {
148*4882a593Smuzhiyun unsigned long *addr = __bitops_word(nr, ptr);
149*4882a593Smuzhiyun unsigned long old, mask;
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun mask = 1UL << (nr & (BITS_PER_LONG - 1));
152*4882a593Smuzhiyun old = __atomic64_xor_barrier(mask, (long *)addr);
153*4882a593Smuzhiyun return (old & mask) != 0;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun
arch___set_bit(unsigned long nr,volatile unsigned long * ptr)156*4882a593Smuzhiyun static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr)
157*4882a593Smuzhiyun {
158*4882a593Smuzhiyun unsigned char *addr = __bitops_byte(nr, ptr);
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun *addr |= 1 << (nr & 7);
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun
arch___clear_bit(unsigned long nr,volatile unsigned long * ptr)163*4882a593Smuzhiyun static inline void arch___clear_bit(unsigned long nr,
164*4882a593Smuzhiyun volatile unsigned long *ptr)
165*4882a593Smuzhiyun {
166*4882a593Smuzhiyun unsigned char *addr = __bitops_byte(nr, ptr);
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun *addr &= ~(1 << (nr & 7));
169*4882a593Smuzhiyun }
170*4882a593Smuzhiyun
arch___change_bit(unsigned long nr,volatile unsigned long * ptr)171*4882a593Smuzhiyun static inline void arch___change_bit(unsigned long nr,
172*4882a593Smuzhiyun volatile unsigned long *ptr)
173*4882a593Smuzhiyun {
174*4882a593Smuzhiyun unsigned char *addr = __bitops_byte(nr, ptr);
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun *addr ^= 1 << (nr & 7);
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun
arch___test_and_set_bit(unsigned long nr,volatile unsigned long * ptr)179*4882a593Smuzhiyun static inline bool arch___test_and_set_bit(unsigned long nr,
180*4882a593Smuzhiyun volatile unsigned long *ptr)
181*4882a593Smuzhiyun {
182*4882a593Smuzhiyun unsigned char *addr = __bitops_byte(nr, ptr);
183*4882a593Smuzhiyun unsigned char ch;
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun ch = *addr;
186*4882a593Smuzhiyun *addr |= 1 << (nr & 7);
187*4882a593Smuzhiyun return (ch >> (nr & 7)) & 1;
188*4882a593Smuzhiyun }
189*4882a593Smuzhiyun
arch___test_and_clear_bit(unsigned long nr,volatile unsigned long * ptr)190*4882a593Smuzhiyun static inline bool arch___test_and_clear_bit(unsigned long nr,
191*4882a593Smuzhiyun volatile unsigned long *ptr)
192*4882a593Smuzhiyun {
193*4882a593Smuzhiyun unsigned char *addr = __bitops_byte(nr, ptr);
194*4882a593Smuzhiyun unsigned char ch;
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun ch = *addr;
197*4882a593Smuzhiyun *addr &= ~(1 << (nr & 7));
198*4882a593Smuzhiyun return (ch >> (nr & 7)) & 1;
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun
arch___test_and_change_bit(unsigned long nr,volatile unsigned long * ptr)201*4882a593Smuzhiyun static inline bool arch___test_and_change_bit(unsigned long nr,
202*4882a593Smuzhiyun volatile unsigned long *ptr)
203*4882a593Smuzhiyun {
204*4882a593Smuzhiyun unsigned char *addr = __bitops_byte(nr, ptr);
205*4882a593Smuzhiyun unsigned char ch;
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun ch = *addr;
208*4882a593Smuzhiyun *addr ^= 1 << (nr & 7);
209*4882a593Smuzhiyun return (ch >> (nr & 7)) & 1;
210*4882a593Smuzhiyun }
211*4882a593Smuzhiyun
arch_test_bit(unsigned long nr,const volatile unsigned long * ptr)212*4882a593Smuzhiyun static inline bool arch_test_bit(unsigned long nr,
213*4882a593Smuzhiyun const volatile unsigned long *ptr)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun const volatile unsigned char *addr;
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun addr = ((const volatile unsigned char *)ptr);
218*4882a593Smuzhiyun addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
219*4882a593Smuzhiyun return (*addr >> (nr & 7)) & 1;
220*4882a593Smuzhiyun }
221*4882a593Smuzhiyun
arch_test_and_set_bit_lock(unsigned long nr,volatile unsigned long * ptr)222*4882a593Smuzhiyun static inline bool arch_test_and_set_bit_lock(unsigned long nr,
223*4882a593Smuzhiyun volatile unsigned long *ptr)
224*4882a593Smuzhiyun {
225*4882a593Smuzhiyun if (arch_test_bit(nr, ptr))
226*4882a593Smuzhiyun return 1;
227*4882a593Smuzhiyun return arch_test_and_set_bit(nr, ptr);
228*4882a593Smuzhiyun }
229*4882a593Smuzhiyun
arch_clear_bit_unlock(unsigned long nr,volatile unsigned long * ptr)230*4882a593Smuzhiyun static inline void arch_clear_bit_unlock(unsigned long nr,
231*4882a593Smuzhiyun volatile unsigned long *ptr)
232*4882a593Smuzhiyun {
233*4882a593Smuzhiyun smp_mb__before_atomic();
234*4882a593Smuzhiyun arch_clear_bit(nr, ptr);
235*4882a593Smuzhiyun }
236*4882a593Smuzhiyun
arch___clear_bit_unlock(unsigned long nr,volatile unsigned long * ptr)237*4882a593Smuzhiyun static inline void arch___clear_bit_unlock(unsigned long nr,
238*4882a593Smuzhiyun volatile unsigned long *ptr)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun smp_mb();
241*4882a593Smuzhiyun arch___clear_bit(nr, ptr);
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun #include <asm-generic/bitops/instrumented-atomic.h>
245*4882a593Smuzhiyun #include <asm-generic/bitops/instrumented-non-atomic.h>
246*4882a593Smuzhiyun #include <asm-generic/bitops/instrumented-lock.h>
247*4882a593Smuzhiyun
248*4882a593Smuzhiyun /*
249*4882a593Smuzhiyun * Functions which use MSB0 bit numbering.
250*4882a593Smuzhiyun * The bits are numbered:
251*4882a593Smuzhiyun * |0..............63|64............127|128...........191|192...........255|
252*4882a593Smuzhiyun */
253*4882a593Smuzhiyun unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
254*4882a593Smuzhiyun unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
255*4882a593Smuzhiyun unsigned long offset);
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun #define for_each_set_bit_inv(bit, addr, size) \
258*4882a593Smuzhiyun for ((bit) = find_first_bit_inv((addr), (size)); \
259*4882a593Smuzhiyun (bit) < (size); \
260*4882a593Smuzhiyun (bit) = find_next_bit_inv((addr), (size), (bit) + 1))
261*4882a593Smuzhiyun
set_bit_inv(unsigned long nr,volatile unsigned long * ptr)262*4882a593Smuzhiyun static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
263*4882a593Smuzhiyun {
264*4882a593Smuzhiyun return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
265*4882a593Smuzhiyun }
266*4882a593Smuzhiyun
clear_bit_inv(unsigned long nr,volatile unsigned long * ptr)267*4882a593Smuzhiyun static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
268*4882a593Smuzhiyun {
269*4882a593Smuzhiyun return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
270*4882a593Smuzhiyun }
271*4882a593Smuzhiyun
test_and_clear_bit_inv(unsigned long nr,volatile unsigned long * ptr)272*4882a593Smuzhiyun static inline bool test_and_clear_bit_inv(unsigned long nr,
273*4882a593Smuzhiyun volatile unsigned long *ptr)
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
276*4882a593Smuzhiyun }
277*4882a593Smuzhiyun
__set_bit_inv(unsigned long nr,volatile unsigned long * ptr)278*4882a593Smuzhiyun static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
279*4882a593Smuzhiyun {
280*4882a593Smuzhiyun return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun
__clear_bit_inv(unsigned long nr,volatile unsigned long * ptr)283*4882a593Smuzhiyun static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
286*4882a593Smuzhiyun }
287*4882a593Smuzhiyun
test_bit_inv(unsigned long nr,const volatile unsigned long * ptr)288*4882a593Smuzhiyun static inline bool test_bit_inv(unsigned long nr,
289*4882a593Smuzhiyun const volatile unsigned long *ptr)
290*4882a593Smuzhiyun {
291*4882a593Smuzhiyun return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
292*4882a593Smuzhiyun }
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun #ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun /**
297*4882a593Smuzhiyun * __flogr - find leftmost one
298*4882a593Smuzhiyun * @word - The word to search
299*4882a593Smuzhiyun *
300*4882a593Smuzhiyun * Returns the bit number of the most significant bit set,
301*4882a593Smuzhiyun * where the most significant bit has bit number 0.
302*4882a593Smuzhiyun * If no bit is set this function returns 64.
303*4882a593Smuzhiyun */
__flogr(unsigned long word)304*4882a593Smuzhiyun static inline unsigned char __flogr(unsigned long word)
305*4882a593Smuzhiyun {
306*4882a593Smuzhiyun if (__builtin_constant_p(word)) {
307*4882a593Smuzhiyun unsigned long bit = 0;
308*4882a593Smuzhiyun
309*4882a593Smuzhiyun if (!word)
310*4882a593Smuzhiyun return 64;
311*4882a593Smuzhiyun if (!(word & 0xffffffff00000000UL)) {
312*4882a593Smuzhiyun word <<= 32;
313*4882a593Smuzhiyun bit += 32;
314*4882a593Smuzhiyun }
315*4882a593Smuzhiyun if (!(word & 0xffff000000000000UL)) {
316*4882a593Smuzhiyun word <<= 16;
317*4882a593Smuzhiyun bit += 16;
318*4882a593Smuzhiyun }
319*4882a593Smuzhiyun if (!(word & 0xff00000000000000UL)) {
320*4882a593Smuzhiyun word <<= 8;
321*4882a593Smuzhiyun bit += 8;
322*4882a593Smuzhiyun }
323*4882a593Smuzhiyun if (!(word & 0xf000000000000000UL)) {
324*4882a593Smuzhiyun word <<= 4;
325*4882a593Smuzhiyun bit += 4;
326*4882a593Smuzhiyun }
327*4882a593Smuzhiyun if (!(word & 0xc000000000000000UL)) {
328*4882a593Smuzhiyun word <<= 2;
329*4882a593Smuzhiyun bit += 2;
330*4882a593Smuzhiyun }
331*4882a593Smuzhiyun if (!(word & 0x8000000000000000UL)) {
332*4882a593Smuzhiyun word <<= 1;
333*4882a593Smuzhiyun bit += 1;
334*4882a593Smuzhiyun }
335*4882a593Smuzhiyun return bit;
336*4882a593Smuzhiyun } else {
337*4882a593Smuzhiyun register unsigned long bit asm("4") = word;
338*4882a593Smuzhiyun register unsigned long out asm("5");
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun asm volatile(
341*4882a593Smuzhiyun " flogr %[bit],%[bit]\n"
342*4882a593Smuzhiyun : [bit] "+d" (bit), [out] "=d" (out) : : "cc");
343*4882a593Smuzhiyun return bit;
344*4882a593Smuzhiyun }
345*4882a593Smuzhiyun }
346*4882a593Smuzhiyun
347*4882a593Smuzhiyun /**
348*4882a593Smuzhiyun * __ffs - find first bit in word.
349*4882a593Smuzhiyun * @word: The word to search
350*4882a593Smuzhiyun *
351*4882a593Smuzhiyun * Undefined if no bit exists, so code should check against 0 first.
352*4882a593Smuzhiyun */
__ffs(unsigned long word)353*4882a593Smuzhiyun static inline unsigned long __ffs(unsigned long word)
354*4882a593Smuzhiyun {
355*4882a593Smuzhiyun return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
356*4882a593Smuzhiyun }
357*4882a593Smuzhiyun
358*4882a593Smuzhiyun /**
359*4882a593Smuzhiyun * ffs - find first bit set
360*4882a593Smuzhiyun * @word: the word to search
361*4882a593Smuzhiyun *
362*4882a593Smuzhiyun * This is defined the same way as the libc and
363*4882a593Smuzhiyun * compiler builtin ffs routines (man ffs).
364*4882a593Smuzhiyun */
ffs(int word)365*4882a593Smuzhiyun static inline int ffs(int word)
366*4882a593Smuzhiyun {
367*4882a593Smuzhiyun unsigned long mask = 2 * BITS_PER_LONG - 1;
368*4882a593Smuzhiyun unsigned int val = (unsigned int)word;
369*4882a593Smuzhiyun
370*4882a593Smuzhiyun return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
371*4882a593Smuzhiyun }
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun /**
374*4882a593Smuzhiyun * __fls - find last (most-significant) set bit in a long word
375*4882a593Smuzhiyun * @word: the word to search
376*4882a593Smuzhiyun *
377*4882a593Smuzhiyun * Undefined if no set bit exists, so code should check against 0 first.
378*4882a593Smuzhiyun */
__fls(unsigned long word)379*4882a593Smuzhiyun static inline unsigned long __fls(unsigned long word)
380*4882a593Smuzhiyun {
381*4882a593Smuzhiyun return __flogr(word) ^ (BITS_PER_LONG - 1);
382*4882a593Smuzhiyun }
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun /**
385*4882a593Smuzhiyun * fls64 - find last set bit in a 64-bit word
386*4882a593Smuzhiyun * @word: the word to search
387*4882a593Smuzhiyun *
388*4882a593Smuzhiyun * This is defined in a similar way as the libc and compiler builtin
389*4882a593Smuzhiyun * ffsll, but returns the position of the most significant set bit.
390*4882a593Smuzhiyun *
391*4882a593Smuzhiyun * fls64(value) returns 0 if value is 0 or the position of the last
392*4882a593Smuzhiyun * set bit if value is nonzero. The last (most significant) bit is
393*4882a593Smuzhiyun * at position 64.
394*4882a593Smuzhiyun */
fls64(unsigned long word)395*4882a593Smuzhiyun static inline int fls64(unsigned long word)
396*4882a593Smuzhiyun {
397*4882a593Smuzhiyun unsigned long mask = 2 * BITS_PER_LONG - 1;
398*4882a593Smuzhiyun
399*4882a593Smuzhiyun return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
400*4882a593Smuzhiyun }
401*4882a593Smuzhiyun
402*4882a593Smuzhiyun /**
403*4882a593Smuzhiyun * fls - find last (most-significant) bit set
404*4882a593Smuzhiyun * @word: the word to search
405*4882a593Smuzhiyun *
406*4882a593Smuzhiyun * This is defined the same way as ffs.
407*4882a593Smuzhiyun * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
408*4882a593Smuzhiyun */
fls(unsigned int word)409*4882a593Smuzhiyun static inline int fls(unsigned int word)
410*4882a593Smuzhiyun {
411*4882a593Smuzhiyun return fls64(word);
412*4882a593Smuzhiyun }
413*4882a593Smuzhiyun
414*4882a593Smuzhiyun #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
415*4882a593Smuzhiyun
416*4882a593Smuzhiyun #include <asm-generic/bitops/__ffs.h>
417*4882a593Smuzhiyun #include <asm-generic/bitops/ffs.h>
418*4882a593Smuzhiyun #include <asm-generic/bitops/__fls.h>
419*4882a593Smuzhiyun #include <asm-generic/bitops/fls.h>
420*4882a593Smuzhiyun #include <asm-generic/bitops/fls64.h>
421*4882a593Smuzhiyun
422*4882a593Smuzhiyun #endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
423*4882a593Smuzhiyun
424*4882a593Smuzhiyun #include <asm-generic/bitops/ffz.h>
425*4882a593Smuzhiyun #include <asm-generic/bitops/find.h>
426*4882a593Smuzhiyun #include <asm-generic/bitops/hweight.h>
427*4882a593Smuzhiyun #include <asm-generic/bitops/sched.h>
428*4882a593Smuzhiyun #include <asm-generic/bitops/le.h>
429*4882a593Smuzhiyun #include <asm-generic/bitops/ext2-atomic-setbit.h>
430*4882a593Smuzhiyun
431*4882a593Smuzhiyun #endif /* _S390_BITOPS_H */
432