1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __ASM_SH_BITOPS_LLSC_H
3*4882a593Smuzhiyun #define __ASM_SH_BITOPS_LLSC_H
4*4882a593Smuzhiyun
set_bit(int nr,volatile void * addr)5*4882a593Smuzhiyun static inline void set_bit(int nr, volatile void *addr)
6*4882a593Smuzhiyun {
7*4882a593Smuzhiyun int mask;
8*4882a593Smuzhiyun volatile unsigned int *a = addr;
9*4882a593Smuzhiyun unsigned long tmp;
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun a += nr >> 5;
12*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun __asm__ __volatile__ (
15*4882a593Smuzhiyun "1: \n\t"
16*4882a593Smuzhiyun "movli.l @%1, %0 ! set_bit \n\t"
17*4882a593Smuzhiyun "or %2, %0 \n\t"
18*4882a593Smuzhiyun "movco.l %0, @%1 \n\t"
19*4882a593Smuzhiyun "bf 1b \n\t"
20*4882a593Smuzhiyun : "=&z" (tmp)
21*4882a593Smuzhiyun : "r" (a), "r" (mask)
22*4882a593Smuzhiyun : "t", "memory"
23*4882a593Smuzhiyun );
24*4882a593Smuzhiyun }
25*4882a593Smuzhiyun
clear_bit(int nr,volatile void * addr)26*4882a593Smuzhiyun static inline void clear_bit(int nr, volatile void *addr)
27*4882a593Smuzhiyun {
28*4882a593Smuzhiyun int mask;
29*4882a593Smuzhiyun volatile unsigned int *a = addr;
30*4882a593Smuzhiyun unsigned long tmp;
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun a += nr >> 5;
33*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun __asm__ __volatile__ (
36*4882a593Smuzhiyun "1: \n\t"
37*4882a593Smuzhiyun "movli.l @%1, %0 ! clear_bit \n\t"
38*4882a593Smuzhiyun "and %2, %0 \n\t"
39*4882a593Smuzhiyun "movco.l %0, @%1 \n\t"
40*4882a593Smuzhiyun "bf 1b \n\t"
41*4882a593Smuzhiyun : "=&z" (tmp)
42*4882a593Smuzhiyun : "r" (a), "r" (~mask)
43*4882a593Smuzhiyun : "t", "memory"
44*4882a593Smuzhiyun );
45*4882a593Smuzhiyun }
46*4882a593Smuzhiyun
change_bit(int nr,volatile void * addr)47*4882a593Smuzhiyun static inline void change_bit(int nr, volatile void *addr)
48*4882a593Smuzhiyun {
49*4882a593Smuzhiyun int mask;
50*4882a593Smuzhiyun volatile unsigned int *a = addr;
51*4882a593Smuzhiyun unsigned long tmp;
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun a += nr >> 5;
54*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun __asm__ __volatile__ (
57*4882a593Smuzhiyun "1: \n\t"
58*4882a593Smuzhiyun "movli.l @%1, %0 ! change_bit \n\t"
59*4882a593Smuzhiyun "xor %2, %0 \n\t"
60*4882a593Smuzhiyun "movco.l %0, @%1 \n\t"
61*4882a593Smuzhiyun "bf 1b \n\t"
62*4882a593Smuzhiyun : "=&z" (tmp)
63*4882a593Smuzhiyun : "r" (a), "r" (mask)
64*4882a593Smuzhiyun : "t", "memory"
65*4882a593Smuzhiyun );
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun
test_and_set_bit(int nr,volatile void * addr)68*4882a593Smuzhiyun static inline int test_and_set_bit(int nr, volatile void *addr)
69*4882a593Smuzhiyun {
70*4882a593Smuzhiyun int mask, retval;
71*4882a593Smuzhiyun volatile unsigned int *a = addr;
72*4882a593Smuzhiyun unsigned long tmp;
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun a += nr >> 5;
75*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun __asm__ __volatile__ (
78*4882a593Smuzhiyun "1: \n\t"
79*4882a593Smuzhiyun "movli.l @%2, %0 ! test_and_set_bit \n\t"
80*4882a593Smuzhiyun "mov %0, %1 \n\t"
81*4882a593Smuzhiyun "or %3, %0 \n\t"
82*4882a593Smuzhiyun "movco.l %0, @%2 \n\t"
83*4882a593Smuzhiyun "bf 1b \n\t"
84*4882a593Smuzhiyun "and %3, %1 \n\t"
85*4882a593Smuzhiyun : "=&z" (tmp), "=&r" (retval)
86*4882a593Smuzhiyun : "r" (a), "r" (mask)
87*4882a593Smuzhiyun : "t", "memory"
88*4882a593Smuzhiyun );
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun return retval != 0;
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun
test_and_clear_bit(int nr,volatile void * addr)93*4882a593Smuzhiyun static inline int test_and_clear_bit(int nr, volatile void *addr)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun int mask, retval;
96*4882a593Smuzhiyun volatile unsigned int *a = addr;
97*4882a593Smuzhiyun unsigned long tmp;
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun a += nr >> 5;
100*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
101*4882a593Smuzhiyun
102*4882a593Smuzhiyun __asm__ __volatile__ (
103*4882a593Smuzhiyun "1: \n\t"
104*4882a593Smuzhiyun "movli.l @%2, %0 ! test_and_clear_bit \n\t"
105*4882a593Smuzhiyun "mov %0, %1 \n\t"
106*4882a593Smuzhiyun "and %4, %0 \n\t"
107*4882a593Smuzhiyun "movco.l %0, @%2 \n\t"
108*4882a593Smuzhiyun "bf 1b \n\t"
109*4882a593Smuzhiyun "and %3, %1 \n\t"
110*4882a593Smuzhiyun "synco \n\t"
111*4882a593Smuzhiyun : "=&z" (tmp), "=&r" (retval)
112*4882a593Smuzhiyun : "r" (a), "r" (mask), "r" (~mask)
113*4882a593Smuzhiyun : "t", "memory"
114*4882a593Smuzhiyun );
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun return retval != 0;
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun
test_and_change_bit(int nr,volatile void * addr)119*4882a593Smuzhiyun static inline int test_and_change_bit(int nr, volatile void *addr)
120*4882a593Smuzhiyun {
121*4882a593Smuzhiyun int mask, retval;
122*4882a593Smuzhiyun volatile unsigned int *a = addr;
123*4882a593Smuzhiyun unsigned long tmp;
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun a += nr >> 5;
126*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun __asm__ __volatile__ (
129*4882a593Smuzhiyun "1: \n\t"
130*4882a593Smuzhiyun "movli.l @%2, %0 ! test_and_change_bit \n\t"
131*4882a593Smuzhiyun "mov %0, %1 \n\t"
132*4882a593Smuzhiyun "xor %3, %0 \n\t"
133*4882a593Smuzhiyun "movco.l %0, @%2 \n\t"
134*4882a593Smuzhiyun "bf 1b \n\t"
135*4882a593Smuzhiyun "and %3, %1 \n\t"
136*4882a593Smuzhiyun "synco \n\t"
137*4882a593Smuzhiyun : "=&z" (tmp), "=&r" (retval)
138*4882a593Smuzhiyun : "r" (a), "r" (mask)
139*4882a593Smuzhiyun : "t", "memory"
140*4882a593Smuzhiyun );
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun return retval != 0;
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun #include <asm-generic/bitops/non-atomic.h>
146*4882a593Smuzhiyun
147*4882a593Smuzhiyun #endif /* __ASM_SH_BITOPS_LLSC_H */
148