1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _LINUX_BITOPS_H
3*4882a593Smuzhiyun #define _LINUX_BITOPS_H
4*4882a593Smuzhiyun #include <asm/types.h>
5*4882a593Smuzhiyun #include <linux/bits.h>
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun /* Set bits in the first 'n' bytes when loaded from memory */
8*4882a593Smuzhiyun #ifdef __LITTLE_ENDIAN
9*4882a593Smuzhiyun # define aligned_byte_mask(n) ((1UL << 8*(n))-1)
10*4882a593Smuzhiyun #else
11*4882a593Smuzhiyun # define aligned_byte_mask(n) (~0xffUL << (BITS_PER_LONG - 8 - 8*(n)))
12*4882a593Smuzhiyun #endif
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun #define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE)
15*4882a593Smuzhiyun #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(long))
16*4882a593Smuzhiyun #define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u64))
17*4882a593Smuzhiyun #define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u32))
18*4882a593Smuzhiyun #define BITS_TO_BYTES(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(char))
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun extern unsigned int __sw_hweight8(unsigned int w);
21*4882a593Smuzhiyun extern unsigned int __sw_hweight16(unsigned int w);
22*4882a593Smuzhiyun extern unsigned int __sw_hweight32(unsigned int w);
23*4882a593Smuzhiyun extern unsigned long __sw_hweight64(__u64 w);
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun /*
26*4882a593Smuzhiyun * Include this here because some architectures need generic_ffs/fls in
27*4882a593Smuzhiyun * scope
28*4882a593Smuzhiyun */
29*4882a593Smuzhiyun #include <asm/bitops.h>
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun #define for_each_set_bit(bit, addr, size) \
32*4882a593Smuzhiyun for ((bit) = find_first_bit((addr), (size)); \
33*4882a593Smuzhiyun (bit) < (size); \
34*4882a593Smuzhiyun (bit) = find_next_bit((addr), (size), (bit) + 1))
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun /* same as for_each_set_bit() but use bit as value to start with */
37*4882a593Smuzhiyun #define for_each_set_bit_from(bit, addr, size) \
38*4882a593Smuzhiyun for ((bit) = find_next_bit((addr), (size), (bit)); \
39*4882a593Smuzhiyun (bit) < (size); \
40*4882a593Smuzhiyun (bit) = find_next_bit((addr), (size), (bit) + 1))
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun #define for_each_clear_bit(bit, addr, size) \
43*4882a593Smuzhiyun for ((bit) = find_first_zero_bit((addr), (size)); \
44*4882a593Smuzhiyun (bit) < (size); \
45*4882a593Smuzhiyun (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun /* same as for_each_clear_bit() but use bit as value to start with */
48*4882a593Smuzhiyun #define for_each_clear_bit_from(bit, addr, size) \
49*4882a593Smuzhiyun for ((bit) = find_next_zero_bit((addr), (size), (bit)); \
50*4882a593Smuzhiyun (bit) < (size); \
51*4882a593Smuzhiyun (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun /**
54*4882a593Smuzhiyun * for_each_set_clump8 - iterate over bitmap for each 8-bit clump with set bits
55*4882a593Smuzhiyun * @start: bit offset to start search and to store the current iteration offset
56*4882a593Smuzhiyun * @clump: location to store copy of current 8-bit clump
57*4882a593Smuzhiyun * @bits: bitmap address to base the search on
58*4882a593Smuzhiyun * @size: bitmap size in number of bits
59*4882a593Smuzhiyun */
60*4882a593Smuzhiyun #define for_each_set_clump8(start, clump, bits, size) \
61*4882a593Smuzhiyun for ((start) = find_first_clump8(&(clump), (bits), (size)); \
62*4882a593Smuzhiyun (start) < (size); \
63*4882a593Smuzhiyun (start) = find_next_clump8(&(clump), (bits), (size), (start) + 8))
64*4882a593Smuzhiyun
get_bitmask_order(unsigned int count)65*4882a593Smuzhiyun static inline int get_bitmask_order(unsigned int count)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun int order;
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun order = fls(count);
70*4882a593Smuzhiyun return order; /* We could be slightly more clever with -1 here... */
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun
hweight_long(unsigned long w)73*4882a593Smuzhiyun static __always_inline unsigned long hweight_long(unsigned long w)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun return sizeof(w) == 4 ? hweight32(w) : hweight64((__u64)w);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun /**
79*4882a593Smuzhiyun * rol64 - rotate a 64-bit value left
80*4882a593Smuzhiyun * @word: value to rotate
81*4882a593Smuzhiyun * @shift: bits to roll
82*4882a593Smuzhiyun */
rol64(__u64 word,unsigned int shift)83*4882a593Smuzhiyun static inline __u64 rol64(__u64 word, unsigned int shift)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun return (word << (shift & 63)) | (word >> ((-shift) & 63));
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun /**
89*4882a593Smuzhiyun * ror64 - rotate a 64-bit value right
90*4882a593Smuzhiyun * @word: value to rotate
91*4882a593Smuzhiyun * @shift: bits to roll
92*4882a593Smuzhiyun */
ror64(__u64 word,unsigned int shift)93*4882a593Smuzhiyun static inline __u64 ror64(__u64 word, unsigned int shift)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun return (word >> (shift & 63)) | (word << ((-shift) & 63));
96*4882a593Smuzhiyun }
97*4882a593Smuzhiyun
98*4882a593Smuzhiyun /**
99*4882a593Smuzhiyun * rol32 - rotate a 32-bit value left
100*4882a593Smuzhiyun * @word: value to rotate
101*4882a593Smuzhiyun * @shift: bits to roll
102*4882a593Smuzhiyun */
rol32(__u32 word,unsigned int shift)103*4882a593Smuzhiyun static inline __u32 rol32(__u32 word, unsigned int shift)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun return (word << (shift & 31)) | (word >> ((-shift) & 31));
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun /**
109*4882a593Smuzhiyun * ror32 - rotate a 32-bit value right
110*4882a593Smuzhiyun * @word: value to rotate
111*4882a593Smuzhiyun * @shift: bits to roll
112*4882a593Smuzhiyun */
ror32(__u32 word,unsigned int shift)113*4882a593Smuzhiyun static inline __u32 ror32(__u32 word, unsigned int shift)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun return (word >> (shift & 31)) | (word << ((-shift) & 31));
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun /**
119*4882a593Smuzhiyun * rol16 - rotate a 16-bit value left
120*4882a593Smuzhiyun * @word: value to rotate
121*4882a593Smuzhiyun * @shift: bits to roll
122*4882a593Smuzhiyun */
rol16(__u16 word,unsigned int shift)123*4882a593Smuzhiyun static inline __u16 rol16(__u16 word, unsigned int shift)
124*4882a593Smuzhiyun {
125*4882a593Smuzhiyun return (word << (shift & 15)) | (word >> ((-shift) & 15));
126*4882a593Smuzhiyun }
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun /**
129*4882a593Smuzhiyun * ror16 - rotate a 16-bit value right
130*4882a593Smuzhiyun * @word: value to rotate
131*4882a593Smuzhiyun * @shift: bits to roll
132*4882a593Smuzhiyun */
ror16(__u16 word,unsigned int shift)133*4882a593Smuzhiyun static inline __u16 ror16(__u16 word, unsigned int shift)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun return (word >> (shift & 15)) | (word << ((-shift) & 15));
136*4882a593Smuzhiyun }
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun /**
139*4882a593Smuzhiyun * rol8 - rotate an 8-bit value left
140*4882a593Smuzhiyun * @word: value to rotate
141*4882a593Smuzhiyun * @shift: bits to roll
142*4882a593Smuzhiyun */
rol8(__u8 word,unsigned int shift)143*4882a593Smuzhiyun static inline __u8 rol8(__u8 word, unsigned int shift)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun return (word << (shift & 7)) | (word >> ((-shift) & 7));
146*4882a593Smuzhiyun }
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun /**
149*4882a593Smuzhiyun * ror8 - rotate an 8-bit value right
150*4882a593Smuzhiyun * @word: value to rotate
151*4882a593Smuzhiyun * @shift: bits to roll
152*4882a593Smuzhiyun */
ror8(__u8 word,unsigned int shift)153*4882a593Smuzhiyun static inline __u8 ror8(__u8 word, unsigned int shift)
154*4882a593Smuzhiyun {
155*4882a593Smuzhiyun return (word >> (shift & 7)) | (word << ((-shift) & 7));
156*4882a593Smuzhiyun }
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun /**
159*4882a593Smuzhiyun * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit
160*4882a593Smuzhiyun * @value: value to sign extend
161*4882a593Smuzhiyun * @index: 0 based bit index (0<=index<32) to sign bit
162*4882a593Smuzhiyun *
163*4882a593Smuzhiyun * This is safe to use for 16- and 8-bit types as well.
164*4882a593Smuzhiyun */
sign_extend32(__u32 value,int index)165*4882a593Smuzhiyun static __always_inline __s32 sign_extend32(__u32 value, int index)
166*4882a593Smuzhiyun {
167*4882a593Smuzhiyun __u8 shift = 31 - index;
168*4882a593Smuzhiyun return (__s32)(value << shift) >> shift;
169*4882a593Smuzhiyun }
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun /**
172*4882a593Smuzhiyun * sign_extend64 - sign extend a 64-bit value using specified bit as sign-bit
173*4882a593Smuzhiyun * @value: value to sign extend
174*4882a593Smuzhiyun * @index: 0 based bit index (0<=index<64) to sign bit
175*4882a593Smuzhiyun */
sign_extend64(__u64 value,int index)176*4882a593Smuzhiyun static __always_inline __s64 sign_extend64(__u64 value, int index)
177*4882a593Smuzhiyun {
178*4882a593Smuzhiyun __u8 shift = 63 - index;
179*4882a593Smuzhiyun return (__s64)(value << shift) >> shift;
180*4882a593Smuzhiyun }
181*4882a593Smuzhiyun
fls_long(unsigned long l)182*4882a593Smuzhiyun static inline unsigned fls_long(unsigned long l)
183*4882a593Smuzhiyun {
184*4882a593Smuzhiyun if (sizeof(l) == 4)
185*4882a593Smuzhiyun return fls(l);
186*4882a593Smuzhiyun return fls64(l);
187*4882a593Smuzhiyun }
188*4882a593Smuzhiyun
get_count_order(unsigned int count)189*4882a593Smuzhiyun static inline int get_count_order(unsigned int count)
190*4882a593Smuzhiyun {
191*4882a593Smuzhiyun if (count == 0)
192*4882a593Smuzhiyun return -1;
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun return fls(--count);
195*4882a593Smuzhiyun }
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun /**
198*4882a593Smuzhiyun * get_count_order_long - get order after rounding @l up to power of 2
199*4882a593Smuzhiyun * @l: parameter
200*4882a593Smuzhiyun *
201*4882a593Smuzhiyun * it is same as get_count_order() but with long type parameter
202*4882a593Smuzhiyun */
get_count_order_long(unsigned long l)203*4882a593Smuzhiyun static inline int get_count_order_long(unsigned long l)
204*4882a593Smuzhiyun {
205*4882a593Smuzhiyun if (l == 0UL)
206*4882a593Smuzhiyun return -1;
207*4882a593Smuzhiyun return (int)fls_long(--l);
208*4882a593Smuzhiyun }
209*4882a593Smuzhiyun
210*4882a593Smuzhiyun /**
211*4882a593Smuzhiyun * __ffs64 - find first set bit in a 64 bit word
212*4882a593Smuzhiyun * @word: The 64 bit word
213*4882a593Smuzhiyun *
214*4882a593Smuzhiyun * On 64 bit arches this is a synomyn for __ffs
215*4882a593Smuzhiyun * The result is not defined if no bits are set, so check that @word
216*4882a593Smuzhiyun * is non-zero before calling this.
217*4882a593Smuzhiyun */
__ffs64(u64 word)218*4882a593Smuzhiyun static inline unsigned long __ffs64(u64 word)
219*4882a593Smuzhiyun {
220*4882a593Smuzhiyun #if BITS_PER_LONG == 32
221*4882a593Smuzhiyun if (((u32)word) == 0UL)
222*4882a593Smuzhiyun return __ffs((u32)(word >> 32)) + 32;
223*4882a593Smuzhiyun #elif BITS_PER_LONG != 64
224*4882a593Smuzhiyun #error BITS_PER_LONG not 32 or 64
225*4882a593Smuzhiyun #endif
226*4882a593Smuzhiyun return __ffs((unsigned long)word);
227*4882a593Smuzhiyun }
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun /**
230*4882a593Smuzhiyun * assign_bit - Assign value to a bit in memory
231*4882a593Smuzhiyun * @nr: the bit to set
232*4882a593Smuzhiyun * @addr: the address to start counting from
233*4882a593Smuzhiyun * @value: the value to assign
234*4882a593Smuzhiyun */
assign_bit(long nr,volatile unsigned long * addr,bool value)235*4882a593Smuzhiyun static __always_inline void assign_bit(long nr, volatile unsigned long *addr,
236*4882a593Smuzhiyun bool value)
237*4882a593Smuzhiyun {
238*4882a593Smuzhiyun if (value)
239*4882a593Smuzhiyun set_bit(nr, addr);
240*4882a593Smuzhiyun else
241*4882a593Smuzhiyun clear_bit(nr, addr);
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
__assign_bit(long nr,volatile unsigned long * addr,bool value)244*4882a593Smuzhiyun static __always_inline void __assign_bit(long nr, volatile unsigned long *addr,
245*4882a593Smuzhiyun bool value)
246*4882a593Smuzhiyun {
247*4882a593Smuzhiyun if (value)
248*4882a593Smuzhiyun __set_bit(nr, addr);
249*4882a593Smuzhiyun else
250*4882a593Smuzhiyun __clear_bit(nr, addr);
251*4882a593Smuzhiyun }
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun #ifdef __KERNEL__
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun #ifndef set_mask_bits
256*4882a593Smuzhiyun #define set_mask_bits(ptr, mask, bits) \
257*4882a593Smuzhiyun ({ \
258*4882a593Smuzhiyun const typeof(*(ptr)) mask__ = (mask), bits__ = (bits); \
259*4882a593Smuzhiyun typeof(*(ptr)) old__, new__; \
260*4882a593Smuzhiyun \
261*4882a593Smuzhiyun do { \
262*4882a593Smuzhiyun old__ = READ_ONCE(*(ptr)); \
263*4882a593Smuzhiyun new__ = (old__ & ~mask__) | bits__; \
264*4882a593Smuzhiyun } while (cmpxchg(ptr, old__, new__) != old__); \
265*4882a593Smuzhiyun \
266*4882a593Smuzhiyun old__; \
267*4882a593Smuzhiyun })
268*4882a593Smuzhiyun #endif
269*4882a593Smuzhiyun
270*4882a593Smuzhiyun #ifndef bit_clear_unless
271*4882a593Smuzhiyun #define bit_clear_unless(ptr, clear, test) \
272*4882a593Smuzhiyun ({ \
273*4882a593Smuzhiyun const typeof(*(ptr)) clear__ = (clear), test__ = (test);\
274*4882a593Smuzhiyun typeof(*(ptr)) old__, new__; \
275*4882a593Smuzhiyun \
276*4882a593Smuzhiyun do { \
277*4882a593Smuzhiyun old__ = READ_ONCE(*(ptr)); \
278*4882a593Smuzhiyun new__ = old__ & ~clear__; \
279*4882a593Smuzhiyun } while (!(old__ & test__) && \
280*4882a593Smuzhiyun cmpxchg(ptr, old__, new__) != old__); \
281*4882a593Smuzhiyun \
282*4882a593Smuzhiyun !(old__ & test__); \
283*4882a593Smuzhiyun })
284*4882a593Smuzhiyun #endif
285*4882a593Smuzhiyun
286*4882a593Smuzhiyun #ifndef find_last_bit
287*4882a593Smuzhiyun /**
288*4882a593Smuzhiyun * find_last_bit - find the last set bit in a memory region
289*4882a593Smuzhiyun * @addr: The address to start the search at
290*4882a593Smuzhiyun * @size: The number of bits to search
291*4882a593Smuzhiyun *
292*4882a593Smuzhiyun * Returns the bit number of the last set bit, or size.
293*4882a593Smuzhiyun */
294*4882a593Smuzhiyun extern unsigned long find_last_bit(const unsigned long *addr,
295*4882a593Smuzhiyun unsigned long size);
296*4882a593Smuzhiyun #endif
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun #endif /* __KERNEL__ */
299*4882a593Smuzhiyun #endif
300