xref: /OK3568_Linux_fs/kernel/arch/sh/include/asm/cmpxchg-xchg.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __ASM_SH_CMPXCHG_XCHG_H
3*4882a593Smuzhiyun #define __ASM_SH_CMPXCHG_XCHG_H
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun /*
6*4882a593Smuzhiyun  * Copyright (C) 2016 Red Hat, Inc.
7*4882a593Smuzhiyun  * Author: Michael S. Tsirkin <mst@redhat.com>
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun #include <linux/bits.h>
10*4882a593Smuzhiyun #include <linux/compiler.h>
11*4882a593Smuzhiyun #include <asm/byteorder.h>
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun /*
14*4882a593Smuzhiyun  * Portable implementations of 1 and 2 byte xchg using a 4 byte cmpxchg.
15*4882a593Smuzhiyun  * Note: this header isn't self-contained: before including it, __cmpxchg_u32
16*4882a593Smuzhiyun  * must be defined first.
17*4882a593Smuzhiyun  */
__xchg_cmpxchg(volatile void * ptr,u32 x,int size)18*4882a593Smuzhiyun static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
19*4882a593Smuzhiyun {
20*4882a593Smuzhiyun 	int off = (unsigned long)ptr % sizeof(u32);
21*4882a593Smuzhiyun 	volatile u32 *p = ptr - off;
22*4882a593Smuzhiyun #ifdef __BIG_ENDIAN
23*4882a593Smuzhiyun 	int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
24*4882a593Smuzhiyun #else
25*4882a593Smuzhiyun 	int bitoff = off * BITS_PER_BYTE;
26*4882a593Smuzhiyun #endif
27*4882a593Smuzhiyun 	u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
28*4882a593Smuzhiyun 	u32 oldv, newv;
29*4882a593Smuzhiyun 	u32 ret;
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun 	do {
32*4882a593Smuzhiyun 		oldv = READ_ONCE(*p);
33*4882a593Smuzhiyun 		ret = (oldv & bitmask) >> bitoff;
34*4882a593Smuzhiyun 		newv = (oldv & ~bitmask) | (x << bitoff);
35*4882a593Smuzhiyun 	} while (__cmpxchg_u32(p, oldv, newv) != oldv);
36*4882a593Smuzhiyun 
37*4882a593Smuzhiyun 	return ret;
38*4882a593Smuzhiyun }
39*4882a593Smuzhiyun 
xchg_u16(volatile u16 * m,unsigned long val)40*4882a593Smuzhiyun static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun 	return __xchg_cmpxchg(m, val, sizeof *m);
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun 
xchg_u8(volatile u8 * m,unsigned long val)45*4882a593Smuzhiyun static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun 	return __xchg_cmpxchg(m, val, sizeof *m);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun 
50*4882a593Smuzhiyun #endif /* __ASM_SH_CMPXCHG_XCHG_H */
51