1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _PARISC_BITOPS_H
3*4882a593Smuzhiyun #define _PARISC_BITOPS_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #ifndef _LINUX_BITOPS_H
6*4882a593Smuzhiyun #error only <linux/bitops.h> can be included directly
7*4882a593Smuzhiyun #endif
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #include <linux/compiler.h>
10*4882a593Smuzhiyun #include <asm/types.h>
11*4882a593Smuzhiyun #include <asm/byteorder.h>
12*4882a593Smuzhiyun #include <asm/barrier.h>
13*4882a593Smuzhiyun #include <linux/atomic.h>
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun /* See http://marc.theaimsgroup.com/?t=108826637900003 for discussion
16*4882a593Smuzhiyun * on use of volatile and __*_bit() (set/clear/change):
17*4882a593Smuzhiyun * *_bit() want use of volatile.
18*4882a593Smuzhiyun * __*_bit() are "relaxed" and don't use spinlock or volatile.
19*4882a593Smuzhiyun */
20*4882a593Smuzhiyun
set_bit(int nr,volatile unsigned long * addr)21*4882a593Smuzhiyun static __inline__ void set_bit(int nr, volatile unsigned long * addr)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
24*4882a593Smuzhiyun unsigned long flags;
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun addr += BIT_WORD(nr);
27*4882a593Smuzhiyun _atomic_spin_lock_irqsave(addr, flags);
28*4882a593Smuzhiyun *addr |= mask;
29*4882a593Smuzhiyun _atomic_spin_unlock_irqrestore(addr, flags);
30*4882a593Smuzhiyun }
31*4882a593Smuzhiyun
clear_bit(int nr,volatile unsigned long * addr)32*4882a593Smuzhiyun static __inline__ void clear_bit(int nr, volatile unsigned long * addr)
33*4882a593Smuzhiyun {
34*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
35*4882a593Smuzhiyun unsigned long flags;
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun addr += BIT_WORD(nr);
38*4882a593Smuzhiyun _atomic_spin_lock_irqsave(addr, flags);
39*4882a593Smuzhiyun *addr &= ~mask;
40*4882a593Smuzhiyun _atomic_spin_unlock_irqrestore(addr, flags);
41*4882a593Smuzhiyun }
42*4882a593Smuzhiyun
change_bit(int nr,volatile unsigned long * addr)43*4882a593Smuzhiyun static __inline__ void change_bit(int nr, volatile unsigned long * addr)
44*4882a593Smuzhiyun {
45*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
46*4882a593Smuzhiyun unsigned long flags;
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun addr += BIT_WORD(nr);
49*4882a593Smuzhiyun _atomic_spin_lock_irqsave(addr, flags);
50*4882a593Smuzhiyun *addr ^= mask;
51*4882a593Smuzhiyun _atomic_spin_unlock_irqrestore(addr, flags);
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun
test_and_set_bit(int nr,volatile unsigned long * addr)54*4882a593Smuzhiyun static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
57*4882a593Smuzhiyun unsigned long old;
58*4882a593Smuzhiyun unsigned long flags;
59*4882a593Smuzhiyun int set;
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun addr += BIT_WORD(nr);
62*4882a593Smuzhiyun _atomic_spin_lock_irqsave(addr, flags);
63*4882a593Smuzhiyun old = *addr;
64*4882a593Smuzhiyun set = (old & mask) ? 1 : 0;
65*4882a593Smuzhiyun if (!set)
66*4882a593Smuzhiyun *addr = old | mask;
67*4882a593Smuzhiyun _atomic_spin_unlock_irqrestore(addr, flags);
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun return set;
70*4882a593Smuzhiyun }
71*4882a593Smuzhiyun
test_and_clear_bit(int nr,volatile unsigned long * addr)72*4882a593Smuzhiyun static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr)
73*4882a593Smuzhiyun {
74*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
75*4882a593Smuzhiyun unsigned long old;
76*4882a593Smuzhiyun unsigned long flags;
77*4882a593Smuzhiyun int set;
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun addr += BIT_WORD(nr);
80*4882a593Smuzhiyun _atomic_spin_lock_irqsave(addr, flags);
81*4882a593Smuzhiyun old = *addr;
82*4882a593Smuzhiyun set = (old & mask) ? 1 : 0;
83*4882a593Smuzhiyun if (set)
84*4882a593Smuzhiyun *addr = old & ~mask;
85*4882a593Smuzhiyun _atomic_spin_unlock_irqrestore(addr, flags);
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun return set;
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun
test_and_change_bit(int nr,volatile unsigned long * addr)90*4882a593Smuzhiyun static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
93*4882a593Smuzhiyun unsigned long oldbit;
94*4882a593Smuzhiyun unsigned long flags;
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun addr += BIT_WORD(nr);
97*4882a593Smuzhiyun _atomic_spin_lock_irqsave(addr, flags);
98*4882a593Smuzhiyun oldbit = *addr;
99*4882a593Smuzhiyun *addr = oldbit ^ mask;
100*4882a593Smuzhiyun _atomic_spin_unlock_irqrestore(addr, flags);
101*4882a593Smuzhiyun
102*4882a593Smuzhiyun return (oldbit & mask) ? 1 : 0;
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun #include <asm-generic/bitops/non-atomic.h>
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun #ifdef __KERNEL__
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun /**
110*4882a593Smuzhiyun * __ffs - find first bit in word. returns 0 to "BITS_PER_LONG-1".
111*4882a593Smuzhiyun * @word: The word to search
112*4882a593Smuzhiyun *
113*4882a593Smuzhiyun * __ffs() return is undefined if no bit is set.
114*4882a593Smuzhiyun *
115*4882a593Smuzhiyun * 32-bit fast __ffs by LaMont Jones "lamont At hp com".
116*4882a593Smuzhiyun * 64-bit enhancement by Grant Grundler "grundler At parisc-linux org".
117*4882a593Smuzhiyun * (with help from willy/jejb to get the semantics right)
118*4882a593Smuzhiyun *
119*4882a593Smuzhiyun * This algorithm avoids branches by making use of nullification.
120*4882a593Smuzhiyun * One side effect of "extr" instructions is it sets PSW[N] bit.
121*4882a593Smuzhiyun * How PSW[N] (nullify next insn) gets set is determined by the
122*4882a593Smuzhiyun * "condition" field (eg "<>" or "TR" below) in the extr* insn.
123*4882a593Smuzhiyun * Only the 1st and one of either the 2cd or 3rd insn will get executed.
124*4882a593Smuzhiyun * Each set of 3 insn will get executed in 2 cycles on PA8x00 vs 16 or so
125*4882a593Smuzhiyun * cycles for each mispredicted branch.
126*4882a593Smuzhiyun */
127*4882a593Smuzhiyun
__ffs(unsigned long x)128*4882a593Smuzhiyun static __inline__ unsigned long __ffs(unsigned long x)
129*4882a593Smuzhiyun {
130*4882a593Smuzhiyun unsigned long ret;
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun __asm__(
133*4882a593Smuzhiyun #ifdef CONFIG_64BIT
134*4882a593Smuzhiyun " ldi 63,%1\n"
135*4882a593Smuzhiyun " extrd,u,*<> %0,63,32,%%r0\n"
136*4882a593Smuzhiyun " extrd,u,*TR %0,31,32,%0\n" /* move top 32-bits down */
137*4882a593Smuzhiyun " addi -32,%1,%1\n"
138*4882a593Smuzhiyun #else
139*4882a593Smuzhiyun " ldi 31,%1\n"
140*4882a593Smuzhiyun #endif
141*4882a593Smuzhiyun " extru,<> %0,31,16,%%r0\n"
142*4882a593Smuzhiyun " extru,TR %0,15,16,%0\n" /* xxxx0000 -> 0000xxxx */
143*4882a593Smuzhiyun " addi -16,%1,%1\n"
144*4882a593Smuzhiyun " extru,<> %0,31,8,%%r0\n"
145*4882a593Smuzhiyun " extru,TR %0,23,8,%0\n" /* 0000xx00 -> 000000xx */
146*4882a593Smuzhiyun " addi -8,%1,%1\n"
147*4882a593Smuzhiyun " extru,<> %0,31,4,%%r0\n"
148*4882a593Smuzhiyun " extru,TR %0,27,4,%0\n" /* 000000x0 -> 0000000x */
149*4882a593Smuzhiyun " addi -4,%1,%1\n"
150*4882a593Smuzhiyun " extru,<> %0,31,2,%%r0\n"
151*4882a593Smuzhiyun " extru,TR %0,29,2,%0\n" /* 0000000y, 1100b -> 0011b */
152*4882a593Smuzhiyun " addi -2,%1,%1\n"
153*4882a593Smuzhiyun " extru,= %0,31,1,%%r0\n" /* check last bit */
154*4882a593Smuzhiyun " addi -1,%1,%1\n"
155*4882a593Smuzhiyun : "+r" (x), "=r" (ret) );
156*4882a593Smuzhiyun return ret;
157*4882a593Smuzhiyun }
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun #include <asm-generic/bitops/ffz.h>
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun /*
162*4882a593Smuzhiyun * ffs: find first bit set. returns 1 to BITS_PER_LONG or 0 (if none set)
163*4882a593Smuzhiyun * This is defined the same way as the libc and compiler builtin
164*4882a593Smuzhiyun * ffs routines, therefore differs in spirit from the above ffz (man ffs).
165*4882a593Smuzhiyun */
ffs(int x)166*4882a593Smuzhiyun static __inline__ int ffs(int x)
167*4882a593Smuzhiyun {
168*4882a593Smuzhiyun return x ? (__ffs((unsigned long)x) + 1) : 0;
169*4882a593Smuzhiyun }
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun /*
172*4882a593Smuzhiyun * fls: find last (most significant) bit set.
173*4882a593Smuzhiyun * fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
174*4882a593Smuzhiyun */
175*4882a593Smuzhiyun
fls(unsigned int x)176*4882a593Smuzhiyun static __inline__ int fls(unsigned int x)
177*4882a593Smuzhiyun {
178*4882a593Smuzhiyun int ret;
179*4882a593Smuzhiyun if (!x)
180*4882a593Smuzhiyun return 0;
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun __asm__(
183*4882a593Smuzhiyun " ldi 1,%1\n"
184*4882a593Smuzhiyun " extru,<> %0,15,16,%%r0\n"
185*4882a593Smuzhiyun " zdep,TR %0,15,16,%0\n" /* xxxx0000 */
186*4882a593Smuzhiyun " addi 16,%1,%1\n"
187*4882a593Smuzhiyun " extru,<> %0,7,8,%%r0\n"
188*4882a593Smuzhiyun " zdep,TR %0,23,24,%0\n" /* xx000000 */
189*4882a593Smuzhiyun " addi 8,%1,%1\n"
190*4882a593Smuzhiyun " extru,<> %0,3,4,%%r0\n"
191*4882a593Smuzhiyun " zdep,TR %0,27,28,%0\n" /* x0000000 */
192*4882a593Smuzhiyun " addi 4,%1,%1\n"
193*4882a593Smuzhiyun " extru,<> %0,1,2,%%r0\n"
194*4882a593Smuzhiyun " zdep,TR %0,29,30,%0\n" /* y0000000 (y&3 = 0) */
195*4882a593Smuzhiyun " addi 2,%1,%1\n"
196*4882a593Smuzhiyun " extru,= %0,0,1,%%r0\n"
197*4882a593Smuzhiyun " addi 1,%1,%1\n" /* if y & 8, add 1 */
198*4882a593Smuzhiyun : "+r" (x), "=r" (ret) );
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun return ret;
201*4882a593Smuzhiyun }
202*4882a593Smuzhiyun
203*4882a593Smuzhiyun #include <asm-generic/bitops/__fls.h>
204*4882a593Smuzhiyun #include <asm-generic/bitops/fls64.h>
205*4882a593Smuzhiyun #include <asm-generic/bitops/hweight.h>
206*4882a593Smuzhiyun #include <asm-generic/bitops/lock.h>
207*4882a593Smuzhiyun #include <asm-generic/bitops/sched.h>
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun #endif /* __KERNEL__ */
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun #include <asm-generic/bitops/find.h>
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun #ifdef __KERNEL__
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun #include <asm-generic/bitops/le.h>
216*4882a593Smuzhiyun #include <asm-generic/bitops/ext2-atomic-setbit.h>
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun #endif /* __KERNEL__ */
219*4882a593Smuzhiyun
220*4882a593Smuzhiyun #endif /* _PARISC_BITOPS_H */
221