1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun
3*4882a593Smuzhiyun #ifndef __ASM_CSKY_SPINLOCK_H
4*4882a593Smuzhiyun #define __ASM_CSKY_SPINLOCK_H
5*4882a593Smuzhiyun
6*4882a593Smuzhiyun #include <linux/spinlock_types.h>
7*4882a593Smuzhiyun #include <asm/barrier.h>
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #ifdef CONFIG_QUEUED_RWLOCKS
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun /*
12*4882a593Smuzhiyun * Ticket-based spin-locking.
13*4882a593Smuzhiyun */
arch_spin_lock(arch_spinlock_t * lock)14*4882a593Smuzhiyun static inline void arch_spin_lock(arch_spinlock_t *lock)
15*4882a593Smuzhiyun {
16*4882a593Smuzhiyun arch_spinlock_t lockval;
17*4882a593Smuzhiyun u32 ticket_next = 1 << TICKET_NEXT;
18*4882a593Smuzhiyun u32 *p = &lock->lock;
19*4882a593Smuzhiyun u32 tmp;
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun asm volatile (
22*4882a593Smuzhiyun "1: ldex.w %0, (%2) \n"
23*4882a593Smuzhiyun " mov %1, %0 \n"
24*4882a593Smuzhiyun " add %0, %3 \n"
25*4882a593Smuzhiyun " stex.w %0, (%2) \n"
26*4882a593Smuzhiyun " bez %0, 1b \n"
27*4882a593Smuzhiyun : "=&r" (tmp), "=&r" (lockval)
28*4882a593Smuzhiyun : "r"(p), "r"(ticket_next)
29*4882a593Smuzhiyun : "cc");
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun while (lockval.tickets.next != lockval.tickets.owner)
32*4882a593Smuzhiyun lockval.tickets.owner = READ_ONCE(lock->tickets.owner);
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun smp_mb();
35*4882a593Smuzhiyun }
36*4882a593Smuzhiyun
arch_spin_trylock(arch_spinlock_t * lock)37*4882a593Smuzhiyun static inline int arch_spin_trylock(arch_spinlock_t *lock)
38*4882a593Smuzhiyun {
39*4882a593Smuzhiyun u32 tmp, contended, res;
40*4882a593Smuzhiyun u32 ticket_next = 1 << TICKET_NEXT;
41*4882a593Smuzhiyun u32 *p = &lock->lock;
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun do {
44*4882a593Smuzhiyun asm volatile (
45*4882a593Smuzhiyun " ldex.w %0, (%3) \n"
46*4882a593Smuzhiyun " movi %2, 1 \n"
47*4882a593Smuzhiyun " rotli %1, %0, 16 \n"
48*4882a593Smuzhiyun " cmpne %1, %0 \n"
49*4882a593Smuzhiyun " bt 1f \n"
50*4882a593Smuzhiyun " movi %2, 0 \n"
51*4882a593Smuzhiyun " add %0, %0, %4 \n"
52*4882a593Smuzhiyun " stex.w %0, (%3) \n"
53*4882a593Smuzhiyun "1: \n"
54*4882a593Smuzhiyun : "=&r" (res), "=&r" (tmp), "=&r" (contended)
55*4882a593Smuzhiyun : "r"(p), "r"(ticket_next)
56*4882a593Smuzhiyun : "cc");
57*4882a593Smuzhiyun } while (!res);
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun if (!contended)
60*4882a593Smuzhiyun smp_mb();
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun return !contended;
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun
arch_spin_unlock(arch_spinlock_t * lock)65*4882a593Smuzhiyun static inline void arch_spin_unlock(arch_spinlock_t *lock)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun smp_mb();
68*4882a593Smuzhiyun WRITE_ONCE(lock->tickets.owner, lock->tickets.owner + 1);
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun
arch_spin_value_unlocked(arch_spinlock_t lock)71*4882a593Smuzhiyun static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
72*4882a593Smuzhiyun {
73*4882a593Smuzhiyun return lock.tickets.owner == lock.tickets.next;
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun
arch_spin_is_locked(arch_spinlock_t * lock)76*4882a593Smuzhiyun static inline int arch_spin_is_locked(arch_spinlock_t *lock)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun return !arch_spin_value_unlocked(READ_ONCE(*lock));
79*4882a593Smuzhiyun }
80*4882a593Smuzhiyun
arch_spin_is_contended(arch_spinlock_t * lock)81*4882a593Smuzhiyun static inline int arch_spin_is_contended(arch_spinlock_t *lock)
82*4882a593Smuzhiyun {
83*4882a593Smuzhiyun struct __raw_tickets tickets = READ_ONCE(lock->tickets);
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun return (tickets.next - tickets.owner) > 1;
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun #define arch_spin_is_contended arch_spin_is_contended
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun #include <asm/qrwlock.h>
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun /* See include/linux/spinlock.h */
92*4882a593Smuzhiyun #define smp_mb__after_spinlock() smp_mb()
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun #else /* CONFIG_QUEUED_RWLOCKS */
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun /*
97*4882a593Smuzhiyun * Test-and-set spin-locking.
98*4882a593Smuzhiyun */
arch_spin_lock(arch_spinlock_t * lock)99*4882a593Smuzhiyun static inline void arch_spin_lock(arch_spinlock_t *lock)
100*4882a593Smuzhiyun {
101*4882a593Smuzhiyun u32 *p = &lock->lock;
102*4882a593Smuzhiyun u32 tmp;
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun asm volatile (
105*4882a593Smuzhiyun "1: ldex.w %0, (%1) \n"
106*4882a593Smuzhiyun " bnez %0, 1b \n"
107*4882a593Smuzhiyun " movi %0, 1 \n"
108*4882a593Smuzhiyun " stex.w %0, (%1) \n"
109*4882a593Smuzhiyun " bez %0, 1b \n"
110*4882a593Smuzhiyun : "=&r" (tmp)
111*4882a593Smuzhiyun : "r"(p)
112*4882a593Smuzhiyun : "cc");
113*4882a593Smuzhiyun smp_mb();
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun
arch_spin_unlock(arch_spinlock_t * lock)116*4882a593Smuzhiyun static inline void arch_spin_unlock(arch_spinlock_t *lock)
117*4882a593Smuzhiyun {
118*4882a593Smuzhiyun smp_mb();
119*4882a593Smuzhiyun WRITE_ONCE(lock->lock, 0);
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
arch_spin_trylock(arch_spinlock_t * lock)122*4882a593Smuzhiyun static inline int arch_spin_trylock(arch_spinlock_t *lock)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun u32 *p = &lock->lock;
125*4882a593Smuzhiyun u32 tmp;
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun asm volatile (
128*4882a593Smuzhiyun "1: ldex.w %0, (%1) \n"
129*4882a593Smuzhiyun " bnez %0, 2f \n"
130*4882a593Smuzhiyun " movi %0, 1 \n"
131*4882a593Smuzhiyun " stex.w %0, (%1) \n"
132*4882a593Smuzhiyun " bez %0, 1b \n"
133*4882a593Smuzhiyun " movi %0, 0 \n"
134*4882a593Smuzhiyun "2: \n"
135*4882a593Smuzhiyun : "=&r" (tmp)
136*4882a593Smuzhiyun : "r"(p)
137*4882a593Smuzhiyun : "cc");
138*4882a593Smuzhiyun
139*4882a593Smuzhiyun if (!tmp)
140*4882a593Smuzhiyun smp_mb();
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun return !tmp;
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun #define arch_spin_is_locked(x) (READ_ONCE((x)->lock) != 0)
146*4882a593Smuzhiyun
147*4882a593Smuzhiyun /*
148*4882a593Smuzhiyun * read lock/unlock/trylock
149*4882a593Smuzhiyun */
arch_read_lock(arch_rwlock_t * lock)150*4882a593Smuzhiyun static inline void arch_read_lock(arch_rwlock_t *lock)
151*4882a593Smuzhiyun {
152*4882a593Smuzhiyun u32 *p = &lock->lock;
153*4882a593Smuzhiyun u32 tmp;
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun asm volatile (
156*4882a593Smuzhiyun "1: ldex.w %0, (%1) \n"
157*4882a593Smuzhiyun " blz %0, 1b \n"
158*4882a593Smuzhiyun " addi %0, 1 \n"
159*4882a593Smuzhiyun " stex.w %0, (%1) \n"
160*4882a593Smuzhiyun " bez %0, 1b \n"
161*4882a593Smuzhiyun : "=&r" (tmp)
162*4882a593Smuzhiyun : "r"(p)
163*4882a593Smuzhiyun : "cc");
164*4882a593Smuzhiyun smp_mb();
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun
arch_read_unlock(arch_rwlock_t * lock)167*4882a593Smuzhiyun static inline void arch_read_unlock(arch_rwlock_t *lock)
168*4882a593Smuzhiyun {
169*4882a593Smuzhiyun u32 *p = &lock->lock;
170*4882a593Smuzhiyun u32 tmp;
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun smp_mb();
173*4882a593Smuzhiyun asm volatile (
174*4882a593Smuzhiyun "1: ldex.w %0, (%1) \n"
175*4882a593Smuzhiyun " subi %0, 1 \n"
176*4882a593Smuzhiyun " stex.w %0, (%1) \n"
177*4882a593Smuzhiyun " bez %0, 1b \n"
178*4882a593Smuzhiyun : "=&r" (tmp)
179*4882a593Smuzhiyun : "r"(p)
180*4882a593Smuzhiyun : "cc");
181*4882a593Smuzhiyun }
182*4882a593Smuzhiyun
arch_read_trylock(arch_rwlock_t * lock)183*4882a593Smuzhiyun static inline int arch_read_trylock(arch_rwlock_t *lock)
184*4882a593Smuzhiyun {
185*4882a593Smuzhiyun u32 *p = &lock->lock;
186*4882a593Smuzhiyun u32 tmp;
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun asm volatile (
189*4882a593Smuzhiyun "1: ldex.w %0, (%1) \n"
190*4882a593Smuzhiyun " blz %0, 2f \n"
191*4882a593Smuzhiyun " addi %0, 1 \n"
192*4882a593Smuzhiyun " stex.w %0, (%1) \n"
193*4882a593Smuzhiyun " bez %0, 1b \n"
194*4882a593Smuzhiyun " movi %0, 0 \n"
195*4882a593Smuzhiyun "2: \n"
196*4882a593Smuzhiyun : "=&r" (tmp)
197*4882a593Smuzhiyun : "r"(p)
198*4882a593Smuzhiyun : "cc");
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun if (!tmp)
201*4882a593Smuzhiyun smp_mb();
202*4882a593Smuzhiyun
203*4882a593Smuzhiyun return !tmp;
204*4882a593Smuzhiyun }
205*4882a593Smuzhiyun
206*4882a593Smuzhiyun /*
207*4882a593Smuzhiyun * write lock/unlock/trylock
208*4882a593Smuzhiyun */
arch_write_lock(arch_rwlock_t * lock)209*4882a593Smuzhiyun static inline void arch_write_lock(arch_rwlock_t *lock)
210*4882a593Smuzhiyun {
211*4882a593Smuzhiyun u32 *p = &lock->lock;
212*4882a593Smuzhiyun u32 tmp;
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun asm volatile (
215*4882a593Smuzhiyun "1: ldex.w %0, (%1) \n"
216*4882a593Smuzhiyun " bnez %0, 1b \n"
217*4882a593Smuzhiyun " subi %0, 1 \n"
218*4882a593Smuzhiyun " stex.w %0, (%1) \n"
219*4882a593Smuzhiyun " bez %0, 1b \n"
220*4882a593Smuzhiyun : "=&r" (tmp)
221*4882a593Smuzhiyun : "r"(p)
222*4882a593Smuzhiyun : "cc");
223*4882a593Smuzhiyun smp_mb();
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun
arch_write_unlock(arch_rwlock_t * lock)226*4882a593Smuzhiyun static inline void arch_write_unlock(arch_rwlock_t *lock)
227*4882a593Smuzhiyun {
228*4882a593Smuzhiyun smp_mb();
229*4882a593Smuzhiyun WRITE_ONCE(lock->lock, 0);
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun
arch_write_trylock(arch_rwlock_t * lock)232*4882a593Smuzhiyun static inline int arch_write_trylock(arch_rwlock_t *lock)
233*4882a593Smuzhiyun {
234*4882a593Smuzhiyun u32 *p = &lock->lock;
235*4882a593Smuzhiyun u32 tmp;
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun asm volatile (
238*4882a593Smuzhiyun "1: ldex.w %0, (%1) \n"
239*4882a593Smuzhiyun " bnez %0, 2f \n"
240*4882a593Smuzhiyun " subi %0, 1 \n"
241*4882a593Smuzhiyun " stex.w %0, (%1) \n"
242*4882a593Smuzhiyun " bez %0, 1b \n"
243*4882a593Smuzhiyun " movi %0, 0 \n"
244*4882a593Smuzhiyun "2: \n"
245*4882a593Smuzhiyun : "=&r" (tmp)
246*4882a593Smuzhiyun : "r"(p)
247*4882a593Smuzhiyun : "cc");
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun if (!tmp)
250*4882a593Smuzhiyun smp_mb();
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun return !tmp;
253*4882a593Smuzhiyun }
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun #endif /* CONFIG_QUEUED_RWLOCKS */
256*4882a593Smuzhiyun #endif /* __ASM_CSKY_SPINLOCK_H */
257