1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __ASM_SH_BITOPS_GRB_H
3*4882a593Smuzhiyun #define __ASM_SH_BITOPS_GRB_H
4*4882a593Smuzhiyun
set_bit(int nr,volatile void * addr)5*4882a593Smuzhiyun static inline void set_bit(int nr, volatile void * addr)
6*4882a593Smuzhiyun {
7*4882a593Smuzhiyun int mask;
8*4882a593Smuzhiyun volatile unsigned int *a = addr;
9*4882a593Smuzhiyun unsigned long tmp;
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun a += nr >> 5;
12*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun __asm__ __volatile__ (
15*4882a593Smuzhiyun " .align 2 \n\t"
16*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */
17*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */
18*4882a593Smuzhiyun " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
19*4882a593Smuzhiyun " mov.l @%1, %0 \n\t" /* load old value */
20*4882a593Smuzhiyun " or %2, %0 \n\t" /* or */
21*4882a593Smuzhiyun " mov.l %0, @%1 \n\t" /* store new value */
22*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */
23*4882a593Smuzhiyun : "=&r" (tmp),
24*4882a593Smuzhiyun "+r" (a)
25*4882a593Smuzhiyun : "r" (mask)
26*4882a593Smuzhiyun : "memory" , "r0", "r1");
27*4882a593Smuzhiyun }
28*4882a593Smuzhiyun
clear_bit(int nr,volatile void * addr)29*4882a593Smuzhiyun static inline void clear_bit(int nr, volatile void * addr)
30*4882a593Smuzhiyun {
31*4882a593Smuzhiyun int mask;
32*4882a593Smuzhiyun volatile unsigned int *a = addr;
33*4882a593Smuzhiyun unsigned long tmp;
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun a += nr >> 5;
36*4882a593Smuzhiyun mask = ~(1 << (nr & 0x1f));
37*4882a593Smuzhiyun __asm__ __volatile__ (
38*4882a593Smuzhiyun " .align 2 \n\t"
39*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */
40*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */
41*4882a593Smuzhiyun " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
42*4882a593Smuzhiyun " mov.l @%1, %0 \n\t" /* load old value */
43*4882a593Smuzhiyun " and %2, %0 \n\t" /* and */
44*4882a593Smuzhiyun " mov.l %0, @%1 \n\t" /* store new value */
45*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */
46*4882a593Smuzhiyun : "=&r" (tmp),
47*4882a593Smuzhiyun "+r" (a)
48*4882a593Smuzhiyun : "r" (mask)
49*4882a593Smuzhiyun : "memory" , "r0", "r1");
50*4882a593Smuzhiyun }
51*4882a593Smuzhiyun
change_bit(int nr,volatile void * addr)52*4882a593Smuzhiyun static inline void change_bit(int nr, volatile void * addr)
53*4882a593Smuzhiyun {
54*4882a593Smuzhiyun int mask;
55*4882a593Smuzhiyun volatile unsigned int *a = addr;
56*4882a593Smuzhiyun unsigned long tmp;
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun a += nr >> 5;
59*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
60*4882a593Smuzhiyun __asm__ __volatile__ (
61*4882a593Smuzhiyun " .align 2 \n\t"
62*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */
63*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */
64*4882a593Smuzhiyun " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
65*4882a593Smuzhiyun " mov.l @%1, %0 \n\t" /* load old value */
66*4882a593Smuzhiyun " xor %2, %0 \n\t" /* xor */
67*4882a593Smuzhiyun " mov.l %0, @%1 \n\t" /* store new value */
68*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */
69*4882a593Smuzhiyun : "=&r" (tmp),
70*4882a593Smuzhiyun "+r" (a)
71*4882a593Smuzhiyun : "r" (mask)
72*4882a593Smuzhiyun : "memory" , "r0", "r1");
73*4882a593Smuzhiyun }
74*4882a593Smuzhiyun
test_and_set_bit(int nr,volatile void * addr)75*4882a593Smuzhiyun static inline int test_and_set_bit(int nr, volatile void * addr)
76*4882a593Smuzhiyun {
77*4882a593Smuzhiyun int mask, retval;
78*4882a593Smuzhiyun volatile unsigned int *a = addr;
79*4882a593Smuzhiyun unsigned long tmp;
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun a += nr >> 5;
82*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
83*4882a593Smuzhiyun
84*4882a593Smuzhiyun __asm__ __volatile__ (
85*4882a593Smuzhiyun " .align 2 \n\t"
86*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */
87*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */
88*4882a593Smuzhiyun " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
89*4882a593Smuzhiyun " mov.l @%2, %0 \n\t" /* load old value */
90*4882a593Smuzhiyun " mov %0, %1 \n\t"
91*4882a593Smuzhiyun " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
92*4882a593Smuzhiyun " mov #-1, %1 \n\t" /* retvat = -1 */
93*4882a593Smuzhiyun " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
94*4882a593Smuzhiyun " or %3, %0 \n\t"
95*4882a593Smuzhiyun " mov.l %0, @%2 \n\t" /* store new value */
96*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */
97*4882a593Smuzhiyun : "=&r" (tmp),
98*4882a593Smuzhiyun "=&r" (retval),
99*4882a593Smuzhiyun "+r" (a)
100*4882a593Smuzhiyun : "r" (mask)
101*4882a593Smuzhiyun : "memory" , "r0", "r1" ,"t");
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun return retval;
104*4882a593Smuzhiyun }
105*4882a593Smuzhiyun
test_and_clear_bit(int nr,volatile void * addr)106*4882a593Smuzhiyun static inline int test_and_clear_bit(int nr, volatile void * addr)
107*4882a593Smuzhiyun {
108*4882a593Smuzhiyun int mask, retval,not_mask;
109*4882a593Smuzhiyun volatile unsigned int *a = addr;
110*4882a593Smuzhiyun unsigned long tmp;
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun a += nr >> 5;
113*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun not_mask = ~mask;
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun __asm__ __volatile__ (
118*4882a593Smuzhiyun " .align 2 \n\t"
119*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */
120*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */
121*4882a593Smuzhiyun " mov #-14, r15 \n\t" /* LOGIN */
122*4882a593Smuzhiyun " mov.l @%2, %0 \n\t" /* load old value */
123*4882a593Smuzhiyun " mov %0, %1 \n\t" /* %1 = *a */
124*4882a593Smuzhiyun " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
125*4882a593Smuzhiyun " mov #-1, %1 \n\t" /* retvat = -1 */
126*4882a593Smuzhiyun " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
127*4882a593Smuzhiyun " and %4, %0 \n\t"
128*4882a593Smuzhiyun " mov.l %0, @%2 \n\t" /* store new value */
129*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */
130*4882a593Smuzhiyun : "=&r" (tmp),
131*4882a593Smuzhiyun "=&r" (retval),
132*4882a593Smuzhiyun "+r" (a)
133*4882a593Smuzhiyun : "r" (mask),
134*4882a593Smuzhiyun "r" (not_mask)
135*4882a593Smuzhiyun : "memory" , "r0", "r1", "t");
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun return retval;
138*4882a593Smuzhiyun }
139*4882a593Smuzhiyun
test_and_change_bit(int nr,volatile void * addr)140*4882a593Smuzhiyun static inline int test_and_change_bit(int nr, volatile void * addr)
141*4882a593Smuzhiyun {
142*4882a593Smuzhiyun int mask, retval;
143*4882a593Smuzhiyun volatile unsigned int *a = addr;
144*4882a593Smuzhiyun unsigned long tmp;
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun a += nr >> 5;
147*4882a593Smuzhiyun mask = 1 << (nr & 0x1f);
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun __asm__ __volatile__ (
150*4882a593Smuzhiyun " .align 2 \n\t"
151*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */
152*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */
153*4882a593Smuzhiyun " mov #-14, r15 \n\t" /* LOGIN */
154*4882a593Smuzhiyun " mov.l @%2, %0 \n\t" /* load old value */
155*4882a593Smuzhiyun " mov %0, %1 \n\t" /* %1 = *a */
156*4882a593Smuzhiyun " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
157*4882a593Smuzhiyun " mov #-1, %1 \n\t" /* retvat = -1 */
158*4882a593Smuzhiyun " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
159*4882a593Smuzhiyun " xor %3, %0 \n\t"
160*4882a593Smuzhiyun " mov.l %0, @%2 \n\t" /* store new value */
161*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */
162*4882a593Smuzhiyun : "=&r" (tmp),
163*4882a593Smuzhiyun "=&r" (retval),
164*4882a593Smuzhiyun "+r" (a)
165*4882a593Smuzhiyun : "r" (mask)
166*4882a593Smuzhiyun : "memory" , "r0", "r1", "t");
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun return retval;
169*4882a593Smuzhiyun }
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun #include <asm-generic/bitops/non-atomic.h>
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun #endif /* __ASM_SH_BITOPS_GRB_H */
174