1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /* 32-bit atomic xchg() and cmpxchg() definitions.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
5*4882a593Smuzhiyun * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com.au)
6*4882a593Smuzhiyun * Copyright (C) 2007 Kyle McMartin (kyle@parisc-linux.org)
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun * Additions by Keith M Wesolowski (wesolows@foobazco.org) based
9*4882a593Smuzhiyun * on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf <prumpf@tux.org>.
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #ifndef __ARCH_SPARC_CMPXCHG__
13*4882a593Smuzhiyun #define __ARCH_SPARC_CMPXCHG__
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun unsigned long __xchg_u32(volatile u32 *m, u32 new);
16*4882a593Smuzhiyun void __xchg_called_with_bad_pointer(void);
17*4882a593Smuzhiyun
__xchg(unsigned long x,__volatile__ void * ptr,int size)18*4882a593Smuzhiyun static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int size)
19*4882a593Smuzhiyun {
20*4882a593Smuzhiyun switch (size) {
21*4882a593Smuzhiyun case 4:
22*4882a593Smuzhiyun return __xchg_u32(ptr, x);
23*4882a593Smuzhiyun }
24*4882a593Smuzhiyun __xchg_called_with_bad_pointer();
25*4882a593Smuzhiyun return x;
26*4882a593Smuzhiyun }
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun /* Emulate cmpxchg() the same way we emulate atomics,
31*4882a593Smuzhiyun * by hashing the object address and indexing into an array
32*4882a593Smuzhiyun * of spinlocks to get a bit of performance...
33*4882a593Smuzhiyun *
34*4882a593Smuzhiyun * See arch/sparc/lib/atomic32.c for implementation.
35*4882a593Smuzhiyun *
36*4882a593Smuzhiyun * Cribbed from <asm-parisc/atomic.h>
37*4882a593Smuzhiyun */
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun /* bug catcher for when unsupported size is used - won't link */
40*4882a593Smuzhiyun void __cmpxchg_called_with_bad_pointer(void);
41*4882a593Smuzhiyun /* we only need to support cmpxchg of a u32 on sparc */
42*4882a593Smuzhiyun unsigned long __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_);
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun /* don't worry...optimizer will get rid of most of this */
45*4882a593Smuzhiyun static inline unsigned long
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new_,int size)46*4882a593Smuzhiyun __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun switch (size) {
49*4882a593Smuzhiyun case 4:
50*4882a593Smuzhiyun return __cmpxchg_u32((u32 *)ptr, (u32)old, (u32)new_);
51*4882a593Smuzhiyun default:
52*4882a593Smuzhiyun __cmpxchg_called_with_bad_pointer();
53*4882a593Smuzhiyun break;
54*4882a593Smuzhiyun }
55*4882a593Smuzhiyun return old;
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun #define cmpxchg(ptr, o, n) \
59*4882a593Smuzhiyun ({ \
60*4882a593Smuzhiyun __typeof__(*(ptr)) _o_ = (o); \
61*4882a593Smuzhiyun __typeof__(*(ptr)) _n_ = (n); \
62*4882a593Smuzhiyun (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
63*4882a593Smuzhiyun (unsigned long)_n_, sizeof(*(ptr))); \
64*4882a593Smuzhiyun })
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new);
67*4882a593Smuzhiyun #define cmpxchg64(ptr, old, new) __cmpxchg_u64(ptr, old, new)
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun #include <asm-generic/cmpxchg-local.h>
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun /*
72*4882a593Smuzhiyun * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
73*4882a593Smuzhiyun * them available.
74*4882a593Smuzhiyun */
75*4882a593Smuzhiyun #define cmpxchg_local(ptr, o, n) \
76*4882a593Smuzhiyun ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
77*4882a593Smuzhiyun (unsigned long)(n), sizeof(*(ptr))))
78*4882a593Smuzhiyun #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun #endif /* __ARCH_SPARC_CMPXCHG__ */
81