1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * This file is licensed under the terms of the GNU General Public License
5*4882a593Smuzhiyun * version 2. This program is licensed "as is" without any warranty of any
6*4882a593Smuzhiyun * kind, whether express or implied.
7*4882a593Smuzhiyun */
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #ifndef __ASM_OPENRISC_BITOPS_ATOMIC_H
10*4882a593Smuzhiyun #define __ASM_OPENRISC_BITOPS_ATOMIC_H
11*4882a593Smuzhiyun
set_bit(int nr,volatile unsigned long * addr)12*4882a593Smuzhiyun static inline void set_bit(int nr, volatile unsigned long *addr)
13*4882a593Smuzhiyun {
14*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
15*4882a593Smuzhiyun unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
16*4882a593Smuzhiyun unsigned long tmp;
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun __asm__ __volatile__(
19*4882a593Smuzhiyun "1: l.lwa %0,0(%1) \n"
20*4882a593Smuzhiyun " l.or %0,%0,%2 \n"
21*4882a593Smuzhiyun " l.swa 0(%1),%0 \n"
22*4882a593Smuzhiyun " l.bnf 1b \n"
23*4882a593Smuzhiyun " l.nop \n"
24*4882a593Smuzhiyun : "=&r"(tmp)
25*4882a593Smuzhiyun : "r"(p), "r"(mask)
26*4882a593Smuzhiyun : "cc", "memory");
27*4882a593Smuzhiyun }
28*4882a593Smuzhiyun
clear_bit(int nr,volatile unsigned long * addr)29*4882a593Smuzhiyun static inline void clear_bit(int nr, volatile unsigned long *addr)
30*4882a593Smuzhiyun {
31*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
32*4882a593Smuzhiyun unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
33*4882a593Smuzhiyun unsigned long tmp;
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun __asm__ __volatile__(
36*4882a593Smuzhiyun "1: l.lwa %0,0(%1) \n"
37*4882a593Smuzhiyun " l.and %0,%0,%2 \n"
38*4882a593Smuzhiyun " l.swa 0(%1),%0 \n"
39*4882a593Smuzhiyun " l.bnf 1b \n"
40*4882a593Smuzhiyun " l.nop \n"
41*4882a593Smuzhiyun : "=&r"(tmp)
42*4882a593Smuzhiyun : "r"(p), "r"(~mask)
43*4882a593Smuzhiyun : "cc", "memory");
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun
change_bit(int nr,volatile unsigned long * addr)46*4882a593Smuzhiyun static inline void change_bit(int nr, volatile unsigned long *addr)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
49*4882a593Smuzhiyun unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
50*4882a593Smuzhiyun unsigned long tmp;
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun __asm__ __volatile__(
53*4882a593Smuzhiyun "1: l.lwa %0,0(%1) \n"
54*4882a593Smuzhiyun " l.xor %0,%0,%2 \n"
55*4882a593Smuzhiyun " l.swa 0(%1),%0 \n"
56*4882a593Smuzhiyun " l.bnf 1b \n"
57*4882a593Smuzhiyun " l.nop \n"
58*4882a593Smuzhiyun : "=&r"(tmp)
59*4882a593Smuzhiyun : "r"(p), "r"(mask)
60*4882a593Smuzhiyun : "cc", "memory");
61*4882a593Smuzhiyun }
62*4882a593Smuzhiyun
test_and_set_bit(int nr,volatile unsigned long * addr)63*4882a593Smuzhiyun static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
66*4882a593Smuzhiyun unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
67*4882a593Smuzhiyun unsigned long old;
68*4882a593Smuzhiyun unsigned long tmp;
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun __asm__ __volatile__(
71*4882a593Smuzhiyun "1: l.lwa %0,0(%2) \n"
72*4882a593Smuzhiyun " l.or %1,%0,%3 \n"
73*4882a593Smuzhiyun " l.swa 0(%2),%1 \n"
74*4882a593Smuzhiyun " l.bnf 1b \n"
75*4882a593Smuzhiyun " l.nop \n"
76*4882a593Smuzhiyun : "=&r"(old), "=&r"(tmp)
77*4882a593Smuzhiyun : "r"(p), "r"(mask)
78*4882a593Smuzhiyun : "cc", "memory");
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun return (old & mask) != 0;
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun
test_and_clear_bit(int nr,volatile unsigned long * addr)83*4882a593Smuzhiyun static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
86*4882a593Smuzhiyun unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
87*4882a593Smuzhiyun unsigned long old;
88*4882a593Smuzhiyun unsigned long tmp;
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun __asm__ __volatile__(
91*4882a593Smuzhiyun "1: l.lwa %0,0(%2) \n"
92*4882a593Smuzhiyun " l.and %1,%0,%3 \n"
93*4882a593Smuzhiyun " l.swa 0(%2),%1 \n"
94*4882a593Smuzhiyun " l.bnf 1b \n"
95*4882a593Smuzhiyun " l.nop \n"
96*4882a593Smuzhiyun : "=&r"(old), "=&r"(tmp)
97*4882a593Smuzhiyun : "r"(p), "r"(~mask)
98*4882a593Smuzhiyun : "cc", "memory");
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun return (old & mask) != 0;
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
test_and_change_bit(int nr,volatile unsigned long * addr)103*4882a593Smuzhiyun static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun unsigned long mask = BIT_MASK(nr);
106*4882a593Smuzhiyun unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
107*4882a593Smuzhiyun unsigned long old;
108*4882a593Smuzhiyun unsigned long tmp;
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun __asm__ __volatile__(
111*4882a593Smuzhiyun "1: l.lwa %0,0(%2) \n"
112*4882a593Smuzhiyun " l.xor %1,%0,%3 \n"
113*4882a593Smuzhiyun " l.swa 0(%2),%1 \n"
114*4882a593Smuzhiyun " l.bnf 1b \n"
115*4882a593Smuzhiyun " l.nop \n"
116*4882a593Smuzhiyun : "=&r"(old), "=&r"(tmp)
117*4882a593Smuzhiyun : "r"(p), "r"(mask)
118*4882a593Smuzhiyun : "cc", "memory");
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun return (old & mask) != 0;
121*4882a593Smuzhiyun }
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun #endif /* __ASM_OPENRISC_BITOPS_ATOMIC_H */
124