xref: /OK3568_Linux_fs/kernel/arch/hexagon/include/asm/cmpxchg.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * xchg/cmpxchg operations for the Hexagon architecture
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2010-2011, The Linux Foundation. All rights reserved.
6*4882a593Smuzhiyun  */
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #ifndef _ASM_CMPXCHG_H
9*4882a593Smuzhiyun #define _ASM_CMPXCHG_H
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun /*
12*4882a593Smuzhiyun  * __xchg - atomically exchange a register and a memory location
13*4882a593Smuzhiyun  * @x: value to swap
14*4882a593Smuzhiyun  * @ptr: pointer to memory
15*4882a593Smuzhiyun  * @size:  size of the value
16*4882a593Smuzhiyun  *
17*4882a593Smuzhiyun  * Only 4 bytes supported currently.
18*4882a593Smuzhiyun  *
19*4882a593Smuzhiyun  * Note:  there was an errata for V2 about .new's and memw_locked.
20*4882a593Smuzhiyun  *
21*4882a593Smuzhiyun  */
__xchg(unsigned long x,volatile void * ptr,int size)22*4882a593Smuzhiyun static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
23*4882a593Smuzhiyun 				   int size)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun 	unsigned long retval;
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun 	/*  Can't seem to use printk or panic here, so just stop  */
28*4882a593Smuzhiyun 	if (size != 4) do { asm volatile("brkpt;\n"); } while (1);
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun 	__asm__ __volatile__ (
31*4882a593Smuzhiyun 	"1:	%0 = memw_locked(%1);\n"    /*  load into retval */
32*4882a593Smuzhiyun 	"	memw_locked(%1,P0) = %2;\n" /*  store into memory */
33*4882a593Smuzhiyun 	"	if (!P0) jump 1b;\n"
34*4882a593Smuzhiyun 	: "=&r" (retval)
35*4882a593Smuzhiyun 	: "r" (ptr), "r" (x)
36*4882a593Smuzhiyun 	: "memory", "p0"
37*4882a593Smuzhiyun 	);
38*4882a593Smuzhiyun 	return retval;
39*4882a593Smuzhiyun }
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun /*
42*4882a593Smuzhiyun  * Atomically swap the contents of a register with memory.  Should be atomic
43*4882a593Smuzhiyun  * between multiple CPU's and within interrupts on the same CPU.
44*4882a593Smuzhiyun  */
45*4882a593Smuzhiyun #define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), (ptr), \
46*4882a593Smuzhiyun 	sizeof(*(ptr))))
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun /*
49*4882a593Smuzhiyun  *  see rt-mutex-design.txt; cmpxchg supposedly checks if *ptr == A and swaps.
50*4882a593Smuzhiyun  *  looks just like atomic_cmpxchg on our arch currently with a bunch of
51*4882a593Smuzhiyun  *  variable casting.
52*4882a593Smuzhiyun  */
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun #define cmpxchg(ptr, old, new)					\
55*4882a593Smuzhiyun ({								\
56*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);				\
57*4882a593Smuzhiyun 	__typeof__(*(ptr)) __old = (old);			\
58*4882a593Smuzhiyun 	__typeof__(*(ptr)) __new = (new);			\
59*4882a593Smuzhiyun 	__typeof__(*(ptr)) __oldval = 0;			\
60*4882a593Smuzhiyun 								\
61*4882a593Smuzhiyun 	asm volatile(						\
62*4882a593Smuzhiyun 		"1:	%0 = memw_locked(%1);\n"		\
63*4882a593Smuzhiyun 		"	{ P0 = cmp.eq(%0,%2);\n"		\
64*4882a593Smuzhiyun 		"	  if (!P0.new) jump:nt 2f; }\n"		\
65*4882a593Smuzhiyun 		"	memw_locked(%1,p0) = %3;\n"		\
66*4882a593Smuzhiyun 		"	if (!P0) jump 1b;\n"			\
67*4882a593Smuzhiyun 		"2:\n"						\
68*4882a593Smuzhiyun 		: "=&r" (__oldval)				\
69*4882a593Smuzhiyun 		: "r" (__ptr), "r" (__old), "r" (__new)		\
70*4882a593Smuzhiyun 		: "memory", "p0"				\
71*4882a593Smuzhiyun 	);							\
72*4882a593Smuzhiyun 	__oldval;						\
73*4882a593Smuzhiyun })
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun #endif /* _ASM_CMPXCHG_H */
76