1*4882a593Smuzhiyun #ifndef _M68K_BITOPS_H
2*4882a593Smuzhiyun #define _M68K_BITOPS_H
3*4882a593Smuzhiyun /*
4*4882a593Smuzhiyun * Copyright 1992, Linus Torvalds.
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * This file is subject to the terms and conditions of the GNU General Public
7*4882a593Smuzhiyun * License. See the file COPYING in the main directory of this archive
8*4882a593Smuzhiyun * for more details.
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #ifndef _LINUX_BITOPS_H
12*4882a593Smuzhiyun #error only <linux/bitops.h> can be included directly
13*4882a593Smuzhiyun #endif
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #include <linux/compiler.h>
16*4882a593Smuzhiyun #include <asm/barrier.h>
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun /*
19*4882a593Smuzhiyun * Bit access functions vary across the ColdFire and 68k families.
20*4882a593Smuzhiyun * So we will break them out here, and then macro in the ones we want.
21*4882a593Smuzhiyun *
22*4882a593Smuzhiyun * ColdFire - supports standard bset/bclr/bchg with register operand only
23*4882a593Smuzhiyun * 68000 - supports standard bset/bclr/bchg with memory operand
24*4882a593Smuzhiyun * >= 68020 - also supports the bfset/bfclr/bfchg instructions
25*4882a593Smuzhiyun *
26*4882a593Smuzhiyun * Although it is possible to use only the bset/bclr/bchg with register
27*4882a593Smuzhiyun * operands on all platforms you end up with larger generated code.
28*4882a593Smuzhiyun * So we use the best form possible on a given platform.
29*4882a593Smuzhiyun */
30*4882a593Smuzhiyun
bset_reg_set_bit(int nr,volatile unsigned long * vaddr)31*4882a593Smuzhiyun static inline void bset_reg_set_bit(int nr, volatile unsigned long *vaddr)
32*4882a593Smuzhiyun {
33*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun __asm__ __volatile__ ("bset %1,(%0)"
36*4882a593Smuzhiyun :
37*4882a593Smuzhiyun : "a" (p), "di" (nr & 7)
38*4882a593Smuzhiyun : "memory");
39*4882a593Smuzhiyun }
40*4882a593Smuzhiyun
bset_mem_set_bit(int nr,volatile unsigned long * vaddr)41*4882a593Smuzhiyun static inline void bset_mem_set_bit(int nr, volatile unsigned long *vaddr)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun __asm__ __volatile__ ("bset %1,%0"
46*4882a593Smuzhiyun : "+m" (*p)
47*4882a593Smuzhiyun : "di" (nr & 7));
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun
bfset_mem_set_bit(int nr,volatile unsigned long * vaddr)50*4882a593Smuzhiyun static inline void bfset_mem_set_bit(int nr, volatile unsigned long *vaddr)
51*4882a593Smuzhiyun {
52*4882a593Smuzhiyun __asm__ __volatile__ ("bfset %1{%0:#1}"
53*4882a593Smuzhiyun :
54*4882a593Smuzhiyun : "d" (nr ^ 31), "o" (*vaddr)
55*4882a593Smuzhiyun : "memory");
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun #if defined(CONFIG_COLDFIRE)
59*4882a593Smuzhiyun #define set_bit(nr, vaddr) bset_reg_set_bit(nr, vaddr)
60*4882a593Smuzhiyun #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
61*4882a593Smuzhiyun #define set_bit(nr, vaddr) bset_mem_set_bit(nr, vaddr)
62*4882a593Smuzhiyun #else
63*4882a593Smuzhiyun #define set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
64*4882a593Smuzhiyun bset_mem_set_bit(nr, vaddr) : \
65*4882a593Smuzhiyun bfset_mem_set_bit(nr, vaddr))
66*4882a593Smuzhiyun #endif
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun #define __set_bit(nr, vaddr) set_bit(nr, vaddr)
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun
bclr_reg_clear_bit(int nr,volatile unsigned long * vaddr)71*4882a593Smuzhiyun static inline void bclr_reg_clear_bit(int nr, volatile unsigned long *vaddr)
72*4882a593Smuzhiyun {
73*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun __asm__ __volatile__ ("bclr %1,(%0)"
76*4882a593Smuzhiyun :
77*4882a593Smuzhiyun : "a" (p), "di" (nr & 7)
78*4882a593Smuzhiyun : "memory");
79*4882a593Smuzhiyun }
80*4882a593Smuzhiyun
bclr_mem_clear_bit(int nr,volatile unsigned long * vaddr)81*4882a593Smuzhiyun static inline void bclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
82*4882a593Smuzhiyun {
83*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun __asm__ __volatile__ ("bclr %1,%0"
86*4882a593Smuzhiyun : "+m" (*p)
87*4882a593Smuzhiyun : "di" (nr & 7));
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun
bfclr_mem_clear_bit(int nr,volatile unsigned long * vaddr)90*4882a593Smuzhiyun static inline void bfclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun __asm__ __volatile__ ("bfclr %1{%0:#1}"
93*4882a593Smuzhiyun :
94*4882a593Smuzhiyun : "d" (nr ^ 31), "o" (*vaddr)
95*4882a593Smuzhiyun : "memory");
96*4882a593Smuzhiyun }
97*4882a593Smuzhiyun
98*4882a593Smuzhiyun #if defined(CONFIG_COLDFIRE)
99*4882a593Smuzhiyun #define clear_bit(nr, vaddr) bclr_reg_clear_bit(nr, vaddr)
100*4882a593Smuzhiyun #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
101*4882a593Smuzhiyun #define clear_bit(nr, vaddr) bclr_mem_clear_bit(nr, vaddr)
102*4882a593Smuzhiyun #else
103*4882a593Smuzhiyun #define clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
104*4882a593Smuzhiyun bclr_mem_clear_bit(nr, vaddr) : \
105*4882a593Smuzhiyun bfclr_mem_clear_bit(nr, vaddr))
106*4882a593Smuzhiyun #endif
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun #define __clear_bit(nr, vaddr) clear_bit(nr, vaddr)
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun
bchg_reg_change_bit(int nr,volatile unsigned long * vaddr)111*4882a593Smuzhiyun static inline void bchg_reg_change_bit(int nr, volatile unsigned long *vaddr)
112*4882a593Smuzhiyun {
113*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun __asm__ __volatile__ ("bchg %1,(%0)"
116*4882a593Smuzhiyun :
117*4882a593Smuzhiyun : "a" (p), "di" (nr & 7)
118*4882a593Smuzhiyun : "memory");
119*4882a593Smuzhiyun }
120*4882a593Smuzhiyun
bchg_mem_change_bit(int nr,volatile unsigned long * vaddr)121*4882a593Smuzhiyun static inline void bchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
122*4882a593Smuzhiyun {
123*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun __asm__ __volatile__ ("bchg %1,%0"
126*4882a593Smuzhiyun : "+m" (*p)
127*4882a593Smuzhiyun : "di" (nr & 7));
128*4882a593Smuzhiyun }
129*4882a593Smuzhiyun
bfchg_mem_change_bit(int nr,volatile unsigned long * vaddr)130*4882a593Smuzhiyun static inline void bfchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun __asm__ __volatile__ ("bfchg %1{%0:#1}"
133*4882a593Smuzhiyun :
134*4882a593Smuzhiyun : "d" (nr ^ 31), "o" (*vaddr)
135*4882a593Smuzhiyun : "memory");
136*4882a593Smuzhiyun }
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun #if defined(CONFIG_COLDFIRE)
139*4882a593Smuzhiyun #define change_bit(nr, vaddr) bchg_reg_change_bit(nr, vaddr)
140*4882a593Smuzhiyun #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
141*4882a593Smuzhiyun #define change_bit(nr, vaddr) bchg_mem_change_bit(nr, vaddr)
142*4882a593Smuzhiyun #else
143*4882a593Smuzhiyun #define change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
144*4882a593Smuzhiyun bchg_mem_change_bit(nr, vaddr) : \
145*4882a593Smuzhiyun bfchg_mem_change_bit(nr, vaddr))
146*4882a593Smuzhiyun #endif
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun #define __change_bit(nr, vaddr) change_bit(nr, vaddr)
149*4882a593Smuzhiyun
150*4882a593Smuzhiyun
test_bit(int nr,const volatile unsigned long * vaddr)151*4882a593Smuzhiyun static inline int test_bit(int nr, const volatile unsigned long *vaddr)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun return (vaddr[nr >> 5] & (1UL << (nr & 31))) != 0;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun
156*4882a593Smuzhiyun
bset_reg_test_and_set_bit(int nr,volatile unsigned long * vaddr)157*4882a593Smuzhiyun static inline int bset_reg_test_and_set_bit(int nr,
158*4882a593Smuzhiyun volatile unsigned long *vaddr)
159*4882a593Smuzhiyun {
160*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
161*4882a593Smuzhiyun char retval;
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun __asm__ __volatile__ ("bset %2,(%1); sne %0"
164*4882a593Smuzhiyun : "=d" (retval)
165*4882a593Smuzhiyun : "a" (p), "di" (nr & 7)
166*4882a593Smuzhiyun : "memory");
167*4882a593Smuzhiyun return retval;
168*4882a593Smuzhiyun }
169*4882a593Smuzhiyun
bset_mem_test_and_set_bit(int nr,volatile unsigned long * vaddr)170*4882a593Smuzhiyun static inline int bset_mem_test_and_set_bit(int nr,
171*4882a593Smuzhiyun volatile unsigned long *vaddr)
172*4882a593Smuzhiyun {
173*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
174*4882a593Smuzhiyun char retval;
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun __asm__ __volatile__ ("bset %2,%1; sne %0"
177*4882a593Smuzhiyun : "=d" (retval), "+m" (*p)
178*4882a593Smuzhiyun : "di" (nr & 7));
179*4882a593Smuzhiyun return retval;
180*4882a593Smuzhiyun }
181*4882a593Smuzhiyun
bfset_mem_test_and_set_bit(int nr,volatile unsigned long * vaddr)182*4882a593Smuzhiyun static inline int bfset_mem_test_and_set_bit(int nr,
183*4882a593Smuzhiyun volatile unsigned long *vaddr)
184*4882a593Smuzhiyun {
185*4882a593Smuzhiyun char retval;
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun __asm__ __volatile__ ("bfset %2{%1:#1}; sne %0"
188*4882a593Smuzhiyun : "=d" (retval)
189*4882a593Smuzhiyun : "d" (nr ^ 31), "o" (*vaddr)
190*4882a593Smuzhiyun : "memory");
191*4882a593Smuzhiyun return retval;
192*4882a593Smuzhiyun }
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun #if defined(CONFIG_COLDFIRE)
195*4882a593Smuzhiyun #define test_and_set_bit(nr, vaddr) bset_reg_test_and_set_bit(nr, vaddr)
196*4882a593Smuzhiyun #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
197*4882a593Smuzhiyun #define test_and_set_bit(nr, vaddr) bset_mem_test_and_set_bit(nr, vaddr)
198*4882a593Smuzhiyun #else
199*4882a593Smuzhiyun #define test_and_set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
200*4882a593Smuzhiyun bset_mem_test_and_set_bit(nr, vaddr) : \
201*4882a593Smuzhiyun bfset_mem_test_and_set_bit(nr, vaddr))
202*4882a593Smuzhiyun #endif
203*4882a593Smuzhiyun
204*4882a593Smuzhiyun #define __test_and_set_bit(nr, vaddr) test_and_set_bit(nr, vaddr)
205*4882a593Smuzhiyun
206*4882a593Smuzhiyun
bclr_reg_test_and_clear_bit(int nr,volatile unsigned long * vaddr)207*4882a593Smuzhiyun static inline int bclr_reg_test_and_clear_bit(int nr,
208*4882a593Smuzhiyun volatile unsigned long *vaddr)
209*4882a593Smuzhiyun {
210*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
211*4882a593Smuzhiyun char retval;
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun __asm__ __volatile__ ("bclr %2,(%1); sne %0"
214*4882a593Smuzhiyun : "=d" (retval)
215*4882a593Smuzhiyun : "a" (p), "di" (nr & 7)
216*4882a593Smuzhiyun : "memory");
217*4882a593Smuzhiyun return retval;
218*4882a593Smuzhiyun }
219*4882a593Smuzhiyun
bclr_mem_test_and_clear_bit(int nr,volatile unsigned long * vaddr)220*4882a593Smuzhiyun static inline int bclr_mem_test_and_clear_bit(int nr,
221*4882a593Smuzhiyun volatile unsigned long *vaddr)
222*4882a593Smuzhiyun {
223*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
224*4882a593Smuzhiyun char retval;
225*4882a593Smuzhiyun
226*4882a593Smuzhiyun __asm__ __volatile__ ("bclr %2,%1; sne %0"
227*4882a593Smuzhiyun : "=d" (retval), "+m" (*p)
228*4882a593Smuzhiyun : "di" (nr & 7));
229*4882a593Smuzhiyun return retval;
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun
bfclr_mem_test_and_clear_bit(int nr,volatile unsigned long * vaddr)232*4882a593Smuzhiyun static inline int bfclr_mem_test_and_clear_bit(int nr,
233*4882a593Smuzhiyun volatile unsigned long *vaddr)
234*4882a593Smuzhiyun {
235*4882a593Smuzhiyun char retval;
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun __asm__ __volatile__ ("bfclr %2{%1:#1}; sne %0"
238*4882a593Smuzhiyun : "=d" (retval)
239*4882a593Smuzhiyun : "d" (nr ^ 31), "o" (*vaddr)
240*4882a593Smuzhiyun : "memory");
241*4882a593Smuzhiyun return retval;
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun #if defined(CONFIG_COLDFIRE)
245*4882a593Smuzhiyun #define test_and_clear_bit(nr, vaddr) bclr_reg_test_and_clear_bit(nr, vaddr)
246*4882a593Smuzhiyun #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
247*4882a593Smuzhiyun #define test_and_clear_bit(nr, vaddr) bclr_mem_test_and_clear_bit(nr, vaddr)
248*4882a593Smuzhiyun #else
249*4882a593Smuzhiyun #define test_and_clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
250*4882a593Smuzhiyun bclr_mem_test_and_clear_bit(nr, vaddr) : \
251*4882a593Smuzhiyun bfclr_mem_test_and_clear_bit(nr, vaddr))
252*4882a593Smuzhiyun #endif
253*4882a593Smuzhiyun
254*4882a593Smuzhiyun #define __test_and_clear_bit(nr, vaddr) test_and_clear_bit(nr, vaddr)
255*4882a593Smuzhiyun
256*4882a593Smuzhiyun
bchg_reg_test_and_change_bit(int nr,volatile unsigned long * vaddr)257*4882a593Smuzhiyun static inline int bchg_reg_test_and_change_bit(int nr,
258*4882a593Smuzhiyun volatile unsigned long *vaddr)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
261*4882a593Smuzhiyun char retval;
262*4882a593Smuzhiyun
263*4882a593Smuzhiyun __asm__ __volatile__ ("bchg %2,(%1); sne %0"
264*4882a593Smuzhiyun : "=d" (retval)
265*4882a593Smuzhiyun : "a" (p), "di" (nr & 7)
266*4882a593Smuzhiyun : "memory");
267*4882a593Smuzhiyun return retval;
268*4882a593Smuzhiyun }
269*4882a593Smuzhiyun
bchg_mem_test_and_change_bit(int nr,volatile unsigned long * vaddr)270*4882a593Smuzhiyun static inline int bchg_mem_test_and_change_bit(int nr,
271*4882a593Smuzhiyun volatile unsigned long *vaddr)
272*4882a593Smuzhiyun {
273*4882a593Smuzhiyun char *p = (char *)vaddr + (nr ^ 31) / 8;
274*4882a593Smuzhiyun char retval;
275*4882a593Smuzhiyun
276*4882a593Smuzhiyun __asm__ __volatile__ ("bchg %2,%1; sne %0"
277*4882a593Smuzhiyun : "=d" (retval), "+m" (*p)
278*4882a593Smuzhiyun : "di" (nr & 7));
279*4882a593Smuzhiyun return retval;
280*4882a593Smuzhiyun }
281*4882a593Smuzhiyun
bfchg_mem_test_and_change_bit(int nr,volatile unsigned long * vaddr)282*4882a593Smuzhiyun static inline int bfchg_mem_test_and_change_bit(int nr,
283*4882a593Smuzhiyun volatile unsigned long *vaddr)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun char retval;
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun __asm__ __volatile__ ("bfchg %2{%1:#1}; sne %0"
288*4882a593Smuzhiyun : "=d" (retval)
289*4882a593Smuzhiyun : "d" (nr ^ 31), "o" (*vaddr)
290*4882a593Smuzhiyun : "memory");
291*4882a593Smuzhiyun return retval;
292*4882a593Smuzhiyun }
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun #if defined(CONFIG_COLDFIRE)
295*4882a593Smuzhiyun #define test_and_change_bit(nr, vaddr) bchg_reg_test_and_change_bit(nr, vaddr)
296*4882a593Smuzhiyun #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
297*4882a593Smuzhiyun #define test_and_change_bit(nr, vaddr) bchg_mem_test_and_change_bit(nr, vaddr)
298*4882a593Smuzhiyun #else
299*4882a593Smuzhiyun #define test_and_change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
300*4882a593Smuzhiyun bchg_mem_test_and_change_bit(nr, vaddr) : \
301*4882a593Smuzhiyun bfchg_mem_test_and_change_bit(nr, vaddr))
302*4882a593Smuzhiyun #endif
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun #define __test_and_change_bit(nr, vaddr) test_and_change_bit(nr, vaddr)
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun
307*4882a593Smuzhiyun /*
308*4882a593Smuzhiyun * The true 68020 and more advanced processors support the "bfffo"
309*4882a593Smuzhiyun * instruction for finding bits. ColdFire and simple 68000 parts
310*4882a593Smuzhiyun * (including CPU32) do not support this. They simply use the generic
311*4882a593Smuzhiyun * functions.
312*4882a593Smuzhiyun */
313*4882a593Smuzhiyun #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
314*4882a593Smuzhiyun #include <asm-generic/bitops/ffz.h>
315*4882a593Smuzhiyun #else
316*4882a593Smuzhiyun
find_first_zero_bit(const unsigned long * vaddr,unsigned size)317*4882a593Smuzhiyun static inline int find_first_zero_bit(const unsigned long *vaddr,
318*4882a593Smuzhiyun unsigned size)
319*4882a593Smuzhiyun {
320*4882a593Smuzhiyun const unsigned long *p = vaddr;
321*4882a593Smuzhiyun int res = 32;
322*4882a593Smuzhiyun unsigned int words;
323*4882a593Smuzhiyun unsigned long num;
324*4882a593Smuzhiyun
325*4882a593Smuzhiyun if (!size)
326*4882a593Smuzhiyun return 0;
327*4882a593Smuzhiyun
328*4882a593Smuzhiyun words = (size + 31) >> 5;
329*4882a593Smuzhiyun while (!(num = ~*p++)) {
330*4882a593Smuzhiyun if (!--words)
331*4882a593Smuzhiyun goto out;
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
335*4882a593Smuzhiyun : "=d" (res) : "d" (num & -num));
336*4882a593Smuzhiyun res ^= 31;
337*4882a593Smuzhiyun out:
338*4882a593Smuzhiyun res += ((long)p - (long)vaddr - 4) * 8;
339*4882a593Smuzhiyun return res < size ? res : size;
340*4882a593Smuzhiyun }
341*4882a593Smuzhiyun #define find_first_zero_bit find_first_zero_bit
342*4882a593Smuzhiyun
find_next_zero_bit(const unsigned long * vaddr,int size,int offset)343*4882a593Smuzhiyun static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
344*4882a593Smuzhiyun int offset)
345*4882a593Smuzhiyun {
346*4882a593Smuzhiyun const unsigned long *p = vaddr + (offset >> 5);
347*4882a593Smuzhiyun int bit = offset & 31UL, res;
348*4882a593Smuzhiyun
349*4882a593Smuzhiyun if (offset >= size)
350*4882a593Smuzhiyun return size;
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun if (bit) {
353*4882a593Smuzhiyun unsigned long num = ~*p++ & (~0UL << bit);
354*4882a593Smuzhiyun offset -= bit;
355*4882a593Smuzhiyun
356*4882a593Smuzhiyun /* Look for zero in first longword */
357*4882a593Smuzhiyun __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
358*4882a593Smuzhiyun : "=d" (res) : "d" (num & -num));
359*4882a593Smuzhiyun if (res < 32) {
360*4882a593Smuzhiyun offset += res ^ 31;
361*4882a593Smuzhiyun return offset < size ? offset : size;
362*4882a593Smuzhiyun }
363*4882a593Smuzhiyun offset += 32;
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun if (offset >= size)
366*4882a593Smuzhiyun return size;
367*4882a593Smuzhiyun }
368*4882a593Smuzhiyun /* No zero yet, search remaining full bytes for a zero */
369*4882a593Smuzhiyun return offset + find_first_zero_bit(p, size - offset);
370*4882a593Smuzhiyun }
371*4882a593Smuzhiyun #define find_next_zero_bit find_next_zero_bit
372*4882a593Smuzhiyun
find_first_bit(const unsigned long * vaddr,unsigned size)373*4882a593Smuzhiyun static inline int find_first_bit(const unsigned long *vaddr, unsigned size)
374*4882a593Smuzhiyun {
375*4882a593Smuzhiyun const unsigned long *p = vaddr;
376*4882a593Smuzhiyun int res = 32;
377*4882a593Smuzhiyun unsigned int words;
378*4882a593Smuzhiyun unsigned long num;
379*4882a593Smuzhiyun
380*4882a593Smuzhiyun if (!size)
381*4882a593Smuzhiyun return 0;
382*4882a593Smuzhiyun
383*4882a593Smuzhiyun words = (size + 31) >> 5;
384*4882a593Smuzhiyun while (!(num = *p++)) {
385*4882a593Smuzhiyun if (!--words)
386*4882a593Smuzhiyun goto out;
387*4882a593Smuzhiyun }
388*4882a593Smuzhiyun
389*4882a593Smuzhiyun __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
390*4882a593Smuzhiyun : "=d" (res) : "d" (num & -num));
391*4882a593Smuzhiyun res ^= 31;
392*4882a593Smuzhiyun out:
393*4882a593Smuzhiyun res += ((long)p - (long)vaddr - 4) * 8;
394*4882a593Smuzhiyun return res < size ? res : size;
395*4882a593Smuzhiyun }
396*4882a593Smuzhiyun #define find_first_bit find_first_bit
397*4882a593Smuzhiyun
find_next_bit(const unsigned long * vaddr,int size,int offset)398*4882a593Smuzhiyun static inline int find_next_bit(const unsigned long *vaddr, int size,
399*4882a593Smuzhiyun int offset)
400*4882a593Smuzhiyun {
401*4882a593Smuzhiyun const unsigned long *p = vaddr + (offset >> 5);
402*4882a593Smuzhiyun int bit = offset & 31UL, res;
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun if (offset >= size)
405*4882a593Smuzhiyun return size;
406*4882a593Smuzhiyun
407*4882a593Smuzhiyun if (bit) {
408*4882a593Smuzhiyun unsigned long num = *p++ & (~0UL << bit);
409*4882a593Smuzhiyun offset -= bit;
410*4882a593Smuzhiyun
411*4882a593Smuzhiyun /* Look for one in first longword */
412*4882a593Smuzhiyun __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
413*4882a593Smuzhiyun : "=d" (res) : "d" (num & -num));
414*4882a593Smuzhiyun if (res < 32) {
415*4882a593Smuzhiyun offset += res ^ 31;
416*4882a593Smuzhiyun return offset < size ? offset : size;
417*4882a593Smuzhiyun }
418*4882a593Smuzhiyun offset += 32;
419*4882a593Smuzhiyun
420*4882a593Smuzhiyun if (offset >= size)
421*4882a593Smuzhiyun return size;
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun /* No one yet, search remaining full bytes for a one */
424*4882a593Smuzhiyun return offset + find_first_bit(p, size - offset);
425*4882a593Smuzhiyun }
426*4882a593Smuzhiyun #define find_next_bit find_next_bit
427*4882a593Smuzhiyun
428*4882a593Smuzhiyun /*
429*4882a593Smuzhiyun * ffz = Find First Zero in word. Undefined if no zero exists,
430*4882a593Smuzhiyun * so code should check against ~0UL first..
431*4882a593Smuzhiyun */
ffz(unsigned long word)432*4882a593Smuzhiyun static inline unsigned long ffz(unsigned long word)
433*4882a593Smuzhiyun {
434*4882a593Smuzhiyun int res;
435*4882a593Smuzhiyun
436*4882a593Smuzhiyun __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
437*4882a593Smuzhiyun : "=d" (res) : "d" (~word & -~word));
438*4882a593Smuzhiyun return res ^ 31;
439*4882a593Smuzhiyun }
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun #endif
442*4882a593Smuzhiyun
443*4882a593Smuzhiyun #include <asm-generic/bitops/find.h>
444*4882a593Smuzhiyun
445*4882a593Smuzhiyun #ifdef __KERNEL__
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
448*4882a593Smuzhiyun
449*4882a593Smuzhiyun /*
450*4882a593Smuzhiyun * The newer ColdFire family members support a "bitrev" instruction
451*4882a593Smuzhiyun * and we can use that to implement a fast ffs. Older Coldfire parts,
452*4882a593Smuzhiyun * and normal 68000 parts don't have anything special, so we use the
453*4882a593Smuzhiyun * generic functions for those.
454*4882a593Smuzhiyun */
455*4882a593Smuzhiyun #if (defined(__mcfisaaplus__) || defined(__mcfisac__)) && \
456*4882a593Smuzhiyun !defined(CONFIG_M68000) && !defined(CONFIG_MCPU32)
__ffs(unsigned long x)457*4882a593Smuzhiyun static inline unsigned long __ffs(unsigned long x)
458*4882a593Smuzhiyun {
459*4882a593Smuzhiyun __asm__ __volatile__ ("bitrev %0; ff1 %0"
460*4882a593Smuzhiyun : "=d" (x)
461*4882a593Smuzhiyun : "0" (x));
462*4882a593Smuzhiyun return x;
463*4882a593Smuzhiyun }
464*4882a593Smuzhiyun
ffs(int x)465*4882a593Smuzhiyun static inline int ffs(int x)
466*4882a593Smuzhiyun {
467*4882a593Smuzhiyun if (!x)
468*4882a593Smuzhiyun return 0;
469*4882a593Smuzhiyun return __ffs(x) + 1;
470*4882a593Smuzhiyun }
471*4882a593Smuzhiyun
472*4882a593Smuzhiyun #else
473*4882a593Smuzhiyun #include <asm-generic/bitops/ffs.h>
474*4882a593Smuzhiyun #include <asm-generic/bitops/__ffs.h>
475*4882a593Smuzhiyun #endif
476*4882a593Smuzhiyun
477*4882a593Smuzhiyun #include <asm-generic/bitops/fls.h>
478*4882a593Smuzhiyun #include <asm-generic/bitops/__fls.h>
479*4882a593Smuzhiyun
480*4882a593Smuzhiyun #else
481*4882a593Smuzhiyun
482*4882a593Smuzhiyun /*
483*4882a593Smuzhiyun * ffs: find first bit set. This is defined the same way as
484*4882a593Smuzhiyun * the libc and compiler builtin ffs routines, therefore
485*4882a593Smuzhiyun * differs in spirit from the above ffz (man ffs).
486*4882a593Smuzhiyun */
ffs(int x)487*4882a593Smuzhiyun static inline int ffs(int x)
488*4882a593Smuzhiyun {
489*4882a593Smuzhiyun int cnt;
490*4882a593Smuzhiyun
491*4882a593Smuzhiyun __asm__ ("bfffo %1{#0:#0},%0"
492*4882a593Smuzhiyun : "=d" (cnt)
493*4882a593Smuzhiyun : "dm" (x & -x));
494*4882a593Smuzhiyun return 32 - cnt;
495*4882a593Smuzhiyun }
496*4882a593Smuzhiyun
__ffs(unsigned long x)497*4882a593Smuzhiyun static inline unsigned long __ffs(unsigned long x)
498*4882a593Smuzhiyun {
499*4882a593Smuzhiyun return ffs(x) - 1;
500*4882a593Smuzhiyun }
501*4882a593Smuzhiyun
502*4882a593Smuzhiyun /*
503*4882a593Smuzhiyun * fls: find last bit set.
504*4882a593Smuzhiyun */
fls(unsigned int x)505*4882a593Smuzhiyun static inline int fls(unsigned int x)
506*4882a593Smuzhiyun {
507*4882a593Smuzhiyun int cnt;
508*4882a593Smuzhiyun
509*4882a593Smuzhiyun __asm__ ("bfffo %1{#0,#0},%0"
510*4882a593Smuzhiyun : "=d" (cnt)
511*4882a593Smuzhiyun : "dm" (x));
512*4882a593Smuzhiyun return 32 - cnt;
513*4882a593Smuzhiyun }
514*4882a593Smuzhiyun
__fls(int x)515*4882a593Smuzhiyun static inline int __fls(int x)
516*4882a593Smuzhiyun {
517*4882a593Smuzhiyun return fls(x) - 1;
518*4882a593Smuzhiyun }
519*4882a593Smuzhiyun
520*4882a593Smuzhiyun #endif
521*4882a593Smuzhiyun
522*4882a593Smuzhiyun /* Simple test-and-set bit locks */
523*4882a593Smuzhiyun #define test_and_set_bit_lock test_and_set_bit
524*4882a593Smuzhiyun #define clear_bit_unlock clear_bit
525*4882a593Smuzhiyun #define __clear_bit_unlock clear_bit_unlock
526*4882a593Smuzhiyun
527*4882a593Smuzhiyun #include <asm-generic/bitops/ext2-atomic.h>
528*4882a593Smuzhiyun #include <asm-generic/bitops/le.h>
529*4882a593Smuzhiyun #include <asm-generic/bitops/fls64.h>
530*4882a593Smuzhiyun #include <asm-generic/bitops/sched.h>
531*4882a593Smuzhiyun #include <asm-generic/bitops/hweight.h>
532*4882a593Smuzhiyun #endif /* __KERNEL__ */
533*4882a593Smuzhiyun
534*4882a593Smuzhiyun #endif /* _M68K_BITOPS_H */
535