xref: /OK3568_Linux_fs/kernel/arch/ia64/include/uapi/asm/cmpxchg.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
2*4882a593Smuzhiyun #ifndef _ASM_IA64_CMPXCHG_H
3*4882a593Smuzhiyun #define _ASM_IA64_CMPXCHG_H
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun /*
6*4882a593Smuzhiyun  * Compare/Exchange, forked from asm/intrinsics.h
7*4882a593Smuzhiyun  * which was:
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  *	Copyright (C) 2002-2003 Hewlett-Packard Co
10*4882a593Smuzhiyun  *	David Mosberger-Tang <davidm@hpl.hp.com>
11*4882a593Smuzhiyun  */
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #ifndef __ASSEMBLY__
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include <linux/types.h>
16*4882a593Smuzhiyun /* include compiler specific intrinsics */
17*4882a593Smuzhiyun #include <asm/ia64regs.h>
18*4882a593Smuzhiyun #ifdef __INTEL_COMPILER
19*4882a593Smuzhiyun # include <asm/intel_intrin.h>
20*4882a593Smuzhiyun #else
21*4882a593Smuzhiyun # include <asm/gcc_intrin.h>
22*4882a593Smuzhiyun #endif
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun /*
25*4882a593Smuzhiyun  * This function doesn't exist, so you'll get a linker error if
26*4882a593Smuzhiyun  * something tries to do an invalid xchg().
27*4882a593Smuzhiyun  */
28*4882a593Smuzhiyun extern void ia64_xchg_called_with_bad_pointer(void);
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun #define __xchg(x, ptr, size)						\
31*4882a593Smuzhiyun ({									\
32*4882a593Smuzhiyun 	unsigned long __xchg_result;					\
33*4882a593Smuzhiyun 									\
34*4882a593Smuzhiyun 	switch (size) {							\
35*4882a593Smuzhiyun 	case 1:								\
36*4882a593Smuzhiyun 		__xchg_result = ia64_xchg1((__u8 *)ptr, x);		\
37*4882a593Smuzhiyun 		break;							\
38*4882a593Smuzhiyun 									\
39*4882a593Smuzhiyun 	case 2:								\
40*4882a593Smuzhiyun 		__xchg_result = ia64_xchg2((__u16 *)ptr, x);		\
41*4882a593Smuzhiyun 		break;							\
42*4882a593Smuzhiyun 									\
43*4882a593Smuzhiyun 	case 4:								\
44*4882a593Smuzhiyun 		__xchg_result = ia64_xchg4((__u32 *)ptr, x);		\
45*4882a593Smuzhiyun 		break;							\
46*4882a593Smuzhiyun 									\
47*4882a593Smuzhiyun 	case 8:								\
48*4882a593Smuzhiyun 		__xchg_result = ia64_xchg8((__u64 *)ptr, x);		\
49*4882a593Smuzhiyun 		break;							\
50*4882a593Smuzhiyun 	default:							\
51*4882a593Smuzhiyun 		ia64_xchg_called_with_bad_pointer();			\
52*4882a593Smuzhiyun 	}								\
53*4882a593Smuzhiyun 	__xchg_result;							\
54*4882a593Smuzhiyun })
55*4882a593Smuzhiyun 
56*4882a593Smuzhiyun #define xchg(ptr, x)							\
57*4882a593Smuzhiyun ((__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr))))
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun /*
60*4882a593Smuzhiyun  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
61*4882a593Smuzhiyun  * store NEW in MEM.  Return the initial value in MEM.  Success is
62*4882a593Smuzhiyun  * indicated by comparing RETURN with OLD.
63*4882a593Smuzhiyun  */
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun /*
66*4882a593Smuzhiyun  * This function doesn't exist, so you'll get a linker error
67*4882a593Smuzhiyun  * if something tries to do an invalid cmpxchg().
68*4882a593Smuzhiyun  */
69*4882a593Smuzhiyun extern long ia64_cmpxchg_called_with_bad_pointer(void);
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun #define ia64_cmpxchg(sem, ptr, old, new, size)				\
72*4882a593Smuzhiyun ({									\
73*4882a593Smuzhiyun 	__u64 _o_, _r_;							\
74*4882a593Smuzhiyun 									\
75*4882a593Smuzhiyun 	switch (size) {							\
76*4882a593Smuzhiyun 	case 1:								\
77*4882a593Smuzhiyun 		_o_ = (__u8) (long) (old);				\
78*4882a593Smuzhiyun 		break;							\
79*4882a593Smuzhiyun 	case 2:								\
80*4882a593Smuzhiyun 		_o_ = (__u16) (long) (old);				\
81*4882a593Smuzhiyun 		break;							\
82*4882a593Smuzhiyun 	case 4:								\
83*4882a593Smuzhiyun 		_o_ = (__u32) (long) (old);				\
84*4882a593Smuzhiyun 		break;							\
85*4882a593Smuzhiyun 	case 8:								\
86*4882a593Smuzhiyun 		_o_ = (__u64) (long) (old);				\
87*4882a593Smuzhiyun 		break;							\
88*4882a593Smuzhiyun 	default:							\
89*4882a593Smuzhiyun 		break;							\
90*4882a593Smuzhiyun 	}								\
91*4882a593Smuzhiyun 	switch (size) {							\
92*4882a593Smuzhiyun 	case 1:								\
93*4882a593Smuzhiyun 		_r_ = ia64_cmpxchg1_##sem((__u8 *) ptr, new, _o_);	\
94*4882a593Smuzhiyun 		break;							\
95*4882a593Smuzhiyun 									\
96*4882a593Smuzhiyun 	case 2:								\
97*4882a593Smuzhiyun 		_r_ = ia64_cmpxchg2_##sem((__u16 *) ptr, new, _o_);	\
98*4882a593Smuzhiyun 		break;							\
99*4882a593Smuzhiyun 									\
100*4882a593Smuzhiyun 	case 4:								\
101*4882a593Smuzhiyun 		_r_ = ia64_cmpxchg4_##sem((__u32 *) ptr, new, _o_);	\
102*4882a593Smuzhiyun 		break;							\
103*4882a593Smuzhiyun 									\
104*4882a593Smuzhiyun 	case 8:								\
105*4882a593Smuzhiyun 		_r_ = ia64_cmpxchg8_##sem((__u64 *) ptr, new, _o_);	\
106*4882a593Smuzhiyun 		break;							\
107*4882a593Smuzhiyun 									\
108*4882a593Smuzhiyun 	default:							\
109*4882a593Smuzhiyun 		_r_ = ia64_cmpxchg_called_with_bad_pointer();		\
110*4882a593Smuzhiyun 		break;							\
111*4882a593Smuzhiyun 	}								\
112*4882a593Smuzhiyun 	(__typeof__(old)) _r_;						\
113*4882a593Smuzhiyun })
114*4882a593Smuzhiyun 
115*4882a593Smuzhiyun #define cmpxchg_acq(ptr, o, n)	\
116*4882a593Smuzhiyun 	ia64_cmpxchg(acq, (ptr), (o), (n), sizeof(*(ptr)))
117*4882a593Smuzhiyun #define cmpxchg_rel(ptr, o, n)	\
118*4882a593Smuzhiyun 	ia64_cmpxchg(rel, (ptr), (o), (n), sizeof(*(ptr)))
119*4882a593Smuzhiyun 
120*4882a593Smuzhiyun /*
121*4882a593Smuzhiyun  * Worse still - early processor implementations actually just ignored
122*4882a593Smuzhiyun  * the acquire/release and did a full fence all the time.  Unfortunately
123*4882a593Smuzhiyun  * this meant a lot of badly written code that used .acq when they really
124*4882a593Smuzhiyun  * wanted .rel became legacy out in the wild - so when we made a cpu
125*4882a593Smuzhiyun  * that strictly did the .acq or .rel ... all that code started breaking - so
126*4882a593Smuzhiyun  * we had to back-pedal and keep the "legacy" behavior of a full fence :-(
127*4882a593Smuzhiyun  */
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun /* for compatibility with other platforms: */
130*4882a593Smuzhiyun #define cmpxchg(ptr, o, n)	cmpxchg_acq((ptr), (o), (n))
131*4882a593Smuzhiyun #define cmpxchg64(ptr, o, n)	cmpxchg_acq((ptr), (o), (n))
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun #define cmpxchg_local		cmpxchg
134*4882a593Smuzhiyun #define cmpxchg64_local		cmpxchg64
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun #ifdef CONFIG_IA64_DEBUG_CMPXCHG
137*4882a593Smuzhiyun # define CMPXCHG_BUGCHECK_DECL	int _cmpxchg_bugcheck_count = 128;
138*4882a593Smuzhiyun # define CMPXCHG_BUGCHECK(v)						\
139*4882a593Smuzhiyun do {									\
140*4882a593Smuzhiyun 	if (_cmpxchg_bugcheck_count-- <= 0) {				\
141*4882a593Smuzhiyun 		void *ip;						\
142*4882a593Smuzhiyun 		extern int printk(const char *fmt, ...);		\
143*4882a593Smuzhiyun 		ip = (void *) ia64_getreg(_IA64_REG_IP);		\
144*4882a593Smuzhiyun 		printk("CMPXCHG_BUGCHECK: stuck at %p on word %p\n", ip, (v));\
145*4882a593Smuzhiyun 		break;							\
146*4882a593Smuzhiyun 	}								\
147*4882a593Smuzhiyun } while (0)
148*4882a593Smuzhiyun #else /* !CONFIG_IA64_DEBUG_CMPXCHG */
149*4882a593Smuzhiyun # define CMPXCHG_BUGCHECK_DECL
150*4882a593Smuzhiyun # define CMPXCHG_BUGCHECK(v)
151*4882a593Smuzhiyun #endif /* !CONFIG_IA64_DEBUG_CMPXCHG */
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun #endif /* !__ASSEMBLY__ */
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun #endif /* _ASM_IA64_CMPXCHG_H */
156