xref: /OK3568_Linux_fs/kernel/arch/sparc/lib/atomic_64.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun/* atomic.S: These things are too big to do inline.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun#include <linux/linkage.h>
8*4882a593Smuzhiyun#include <asm/asi.h>
9*4882a593Smuzhiyun#include <asm/backoff.h>
10*4882a593Smuzhiyun#include <asm/export.h>
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun	.text
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun	/* Three versions of the atomic routines, one that
15*4882a593Smuzhiyun	 * does not return a value and does not perform
16*4882a593Smuzhiyun	 * memory barriers, and a two which return
17*4882a593Smuzhiyun	 * a value, the new and old value resp. and does the
18*4882a593Smuzhiyun	 * barriers.
19*4882a593Smuzhiyun	 */
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun#define ATOMIC_OP(op)							\
22*4882a593SmuzhiyunENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
23*4882a593Smuzhiyun	BACKOFF_SETUP(%o2);						\
24*4882a593Smuzhiyun1:	lduw	[%o1], %g1;						\
25*4882a593Smuzhiyun	op	%g1, %o0, %g7;						\
26*4882a593Smuzhiyun	cas	[%o1], %g1, %g7;					\
27*4882a593Smuzhiyun	cmp	%g1, %g7;						\
28*4882a593Smuzhiyun	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
29*4882a593Smuzhiyun	 nop;								\
30*4882a593Smuzhiyun	retl;								\
31*4882a593Smuzhiyun	 nop;								\
32*4882a593Smuzhiyun2:	BACKOFF_SPIN(%o2, %o3, 1b);					\
33*4882a593SmuzhiyunENDPROC(atomic_##op);							\
34*4882a593SmuzhiyunEXPORT_SYMBOL(atomic_##op);
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun#define ATOMIC_OP_RETURN(op)						\
37*4882a593SmuzhiyunENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
38*4882a593Smuzhiyun	BACKOFF_SETUP(%o2);						\
39*4882a593Smuzhiyun1:	lduw	[%o1], %g1;						\
40*4882a593Smuzhiyun	op	%g1, %o0, %g7;						\
41*4882a593Smuzhiyun	cas	[%o1], %g1, %g7;					\
42*4882a593Smuzhiyun	cmp	%g1, %g7;						\
43*4882a593Smuzhiyun	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
44*4882a593Smuzhiyun	 op	%g1, %o0, %g1;						\
45*4882a593Smuzhiyun	retl;								\
46*4882a593Smuzhiyun	 sra	%g1, 0, %o0;						\
47*4882a593Smuzhiyun2:	BACKOFF_SPIN(%o2, %o3, 1b);					\
48*4882a593SmuzhiyunENDPROC(atomic_##op##_return);						\
49*4882a593SmuzhiyunEXPORT_SYMBOL(atomic_##op##_return);
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun#define ATOMIC_FETCH_OP(op)						\
52*4882a593SmuzhiyunENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
53*4882a593Smuzhiyun	BACKOFF_SETUP(%o2);						\
54*4882a593Smuzhiyun1:	lduw	[%o1], %g1;						\
55*4882a593Smuzhiyun	op	%g1, %o0, %g7;						\
56*4882a593Smuzhiyun	cas	[%o1], %g1, %g7;					\
57*4882a593Smuzhiyun	cmp	%g1, %g7;						\
58*4882a593Smuzhiyun	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
59*4882a593Smuzhiyun	 nop;								\
60*4882a593Smuzhiyun	retl;								\
61*4882a593Smuzhiyun	 sra	%g1, 0, %o0;						\
62*4882a593Smuzhiyun2:	BACKOFF_SPIN(%o2, %o3, 1b);					\
63*4882a593SmuzhiyunENDPROC(atomic_fetch_##op);						\
64*4882a593SmuzhiyunEXPORT_SYMBOL(atomic_fetch_##op);
65*4882a593Smuzhiyun
66*4882a593SmuzhiyunATOMIC_OP(add)
67*4882a593SmuzhiyunATOMIC_OP_RETURN(add)
68*4882a593SmuzhiyunATOMIC_FETCH_OP(add)
69*4882a593Smuzhiyun
70*4882a593SmuzhiyunATOMIC_OP(sub)
71*4882a593SmuzhiyunATOMIC_OP_RETURN(sub)
72*4882a593SmuzhiyunATOMIC_FETCH_OP(sub)
73*4882a593Smuzhiyun
74*4882a593SmuzhiyunATOMIC_OP(and)
75*4882a593SmuzhiyunATOMIC_FETCH_OP(and)
76*4882a593Smuzhiyun
77*4882a593SmuzhiyunATOMIC_OP(or)
78*4882a593SmuzhiyunATOMIC_FETCH_OP(or)
79*4882a593Smuzhiyun
80*4882a593SmuzhiyunATOMIC_OP(xor)
81*4882a593SmuzhiyunATOMIC_FETCH_OP(xor)
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun#undef ATOMIC_FETCH_OP
84*4882a593Smuzhiyun#undef ATOMIC_OP_RETURN
85*4882a593Smuzhiyun#undef ATOMIC_OP
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun#define ATOMIC64_OP(op)							\
88*4882a593SmuzhiyunENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
89*4882a593Smuzhiyun	BACKOFF_SETUP(%o2);						\
90*4882a593Smuzhiyun1:	ldx	[%o1], %g1;						\
91*4882a593Smuzhiyun	op	%g1, %o0, %g7;						\
92*4882a593Smuzhiyun	casx	[%o1], %g1, %g7;					\
93*4882a593Smuzhiyun	cmp	%g1, %g7;						\
94*4882a593Smuzhiyun	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
95*4882a593Smuzhiyun	 nop;								\
96*4882a593Smuzhiyun	retl;								\
97*4882a593Smuzhiyun	 nop;								\
98*4882a593Smuzhiyun2:	BACKOFF_SPIN(%o2, %o3, 1b);					\
99*4882a593SmuzhiyunENDPROC(atomic64_##op);							\
100*4882a593SmuzhiyunEXPORT_SYMBOL(atomic64_##op);
101*4882a593Smuzhiyun
102*4882a593Smuzhiyun#define ATOMIC64_OP_RETURN(op)						\
103*4882a593SmuzhiyunENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
104*4882a593Smuzhiyun	BACKOFF_SETUP(%o2);						\
105*4882a593Smuzhiyun1:	ldx	[%o1], %g1;						\
106*4882a593Smuzhiyun	op	%g1, %o0, %g7;						\
107*4882a593Smuzhiyun	casx	[%o1], %g1, %g7;					\
108*4882a593Smuzhiyun	cmp	%g1, %g7;						\
109*4882a593Smuzhiyun	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
110*4882a593Smuzhiyun	 nop;								\
111*4882a593Smuzhiyun	retl;								\
112*4882a593Smuzhiyun	 op	%g1, %o0, %o0;						\
113*4882a593Smuzhiyun2:	BACKOFF_SPIN(%o2, %o3, 1b);					\
114*4882a593SmuzhiyunENDPROC(atomic64_##op##_return);					\
115*4882a593SmuzhiyunEXPORT_SYMBOL(atomic64_##op##_return);
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun#define ATOMIC64_FETCH_OP(op)						\
118*4882a593SmuzhiyunENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
119*4882a593Smuzhiyun	BACKOFF_SETUP(%o2);						\
120*4882a593Smuzhiyun1:	ldx	[%o1], %g1;						\
121*4882a593Smuzhiyun	op	%g1, %o0, %g7;						\
122*4882a593Smuzhiyun	casx	[%o1], %g1, %g7;					\
123*4882a593Smuzhiyun	cmp	%g1, %g7;						\
124*4882a593Smuzhiyun	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
125*4882a593Smuzhiyun	 nop;								\
126*4882a593Smuzhiyun	retl;								\
127*4882a593Smuzhiyun	 mov	%g1, %o0;						\
128*4882a593Smuzhiyun2:	BACKOFF_SPIN(%o2, %o3, 1b);					\
129*4882a593SmuzhiyunENDPROC(atomic64_fetch_##op);						\
130*4882a593SmuzhiyunEXPORT_SYMBOL(atomic64_fetch_##op);
131*4882a593Smuzhiyun
132*4882a593SmuzhiyunATOMIC64_OP(add)
133*4882a593SmuzhiyunATOMIC64_OP_RETURN(add)
134*4882a593SmuzhiyunATOMIC64_FETCH_OP(add)
135*4882a593Smuzhiyun
136*4882a593SmuzhiyunATOMIC64_OP(sub)
137*4882a593SmuzhiyunATOMIC64_OP_RETURN(sub)
138*4882a593SmuzhiyunATOMIC64_FETCH_OP(sub)
139*4882a593Smuzhiyun
140*4882a593SmuzhiyunATOMIC64_OP(and)
141*4882a593SmuzhiyunATOMIC64_FETCH_OP(and)
142*4882a593Smuzhiyun
143*4882a593SmuzhiyunATOMIC64_OP(or)
144*4882a593SmuzhiyunATOMIC64_FETCH_OP(or)
145*4882a593Smuzhiyun
146*4882a593SmuzhiyunATOMIC64_OP(xor)
147*4882a593SmuzhiyunATOMIC64_FETCH_OP(xor)
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun#undef ATOMIC64_FETCH_OP
150*4882a593Smuzhiyun#undef ATOMIC64_OP_RETURN
151*4882a593Smuzhiyun#undef ATOMIC64_OP
152*4882a593Smuzhiyun
153*4882a593SmuzhiyunENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
154*4882a593Smuzhiyun	BACKOFF_SETUP(%o2)
155*4882a593Smuzhiyun1:	ldx	[%o0], %g1
156*4882a593Smuzhiyun	brlez,pn %g1, 3f
157*4882a593Smuzhiyun	 sub	%g1, 1, %g7
158*4882a593Smuzhiyun	casx	[%o0], %g1, %g7
159*4882a593Smuzhiyun	cmp	%g1, %g7
160*4882a593Smuzhiyun	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
161*4882a593Smuzhiyun	 nop
162*4882a593Smuzhiyun3:	retl
163*4882a593Smuzhiyun	 sub	%g1, 1, %o0
164*4882a593Smuzhiyun2:	BACKOFF_SPIN(%o2, %o3, 1b)
165*4882a593SmuzhiyunENDPROC(atomic64_dec_if_positive)
166*4882a593SmuzhiyunEXPORT_SYMBOL(atomic64_dec_if_positive)
167