1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun #ifndef TOOLS_ASM_X86_CMPXCHG_H 3*4882a593Smuzhiyun #define TOOLS_ASM_X86_CMPXCHG_H 4*4882a593Smuzhiyun 5*4882a593Smuzhiyun #include <linux/compiler.h> 6*4882a593Smuzhiyun 7*4882a593Smuzhiyun /* 8*4882a593Smuzhiyun * Non-existant functions to indicate usage errors at link time 9*4882a593Smuzhiyun * (or compile-time if the compiler implements __compiletime_error(). 10*4882a593Smuzhiyun */ 11*4882a593Smuzhiyun extern void __cmpxchg_wrong_size(void) 12*4882a593Smuzhiyun __compiletime_error("Bad argument size for cmpxchg"); 13*4882a593Smuzhiyun 14*4882a593Smuzhiyun /* 15*4882a593Smuzhiyun * Constants for operation sizes. On 32-bit, the 64-bit size it set to 16*4882a593Smuzhiyun * -1 because sizeof will never return -1, thereby making those switch 17*4882a593Smuzhiyun * case statements guaranteeed dead code which the compiler will 18*4882a593Smuzhiyun * eliminate, and allowing the "missing symbol in the default case" to 19*4882a593Smuzhiyun * indicate a usage error. 20*4882a593Smuzhiyun */ 21*4882a593Smuzhiyun #define __X86_CASE_B 1 22*4882a593Smuzhiyun #define __X86_CASE_W 2 23*4882a593Smuzhiyun #define __X86_CASE_L 4 24*4882a593Smuzhiyun #ifdef __x86_64__ 25*4882a593Smuzhiyun #define __X86_CASE_Q 8 26*4882a593Smuzhiyun #else 27*4882a593Smuzhiyun #define __X86_CASE_Q -1 /* sizeof will never return -1 */ 28*4882a593Smuzhiyun #endif 29*4882a593Smuzhiyun 30*4882a593Smuzhiyun /* 31*4882a593Smuzhiyun * Atomic compare and exchange. Compare OLD with MEM, if identical, 32*4882a593Smuzhiyun * store NEW in MEM. Return the initial value in MEM. Success is 33*4882a593Smuzhiyun * indicated by comparing RETURN with OLD. 34*4882a593Smuzhiyun */ 35*4882a593Smuzhiyun #define __raw_cmpxchg(ptr, old, new, size, lock) \ 36*4882a593Smuzhiyun ({ \ 37*4882a593Smuzhiyun __typeof__(*(ptr)) __ret; \ 38*4882a593Smuzhiyun __typeof__(*(ptr)) __old = (old); \ 39*4882a593Smuzhiyun __typeof__(*(ptr)) __new = (new); \ 40*4882a593Smuzhiyun switch (size) { \ 41*4882a593Smuzhiyun case __X86_CASE_B: \ 42*4882a593Smuzhiyun { \ 43*4882a593Smuzhiyun volatile u8 *__ptr = (volatile u8 *)(ptr); \ 44*4882a593Smuzhiyun asm volatile(lock "cmpxchgb %2,%1" \ 45*4882a593Smuzhiyun : "=a" (__ret), "+m" (*__ptr) \ 46*4882a593Smuzhiyun : "q" (__new), "0" (__old) \ 47*4882a593Smuzhiyun : "memory"); \ 48*4882a593Smuzhiyun break; \ 49*4882a593Smuzhiyun } \ 50*4882a593Smuzhiyun case __X86_CASE_W: \ 51*4882a593Smuzhiyun { \ 52*4882a593Smuzhiyun volatile u16 *__ptr = (volatile u16 *)(ptr); \ 53*4882a593Smuzhiyun asm volatile(lock "cmpxchgw %2,%1" \ 54*4882a593Smuzhiyun : "=a" (__ret), "+m" (*__ptr) \ 55*4882a593Smuzhiyun : "r" (__new), "0" (__old) \ 56*4882a593Smuzhiyun : "memory"); \ 57*4882a593Smuzhiyun break; \ 58*4882a593Smuzhiyun } \ 59*4882a593Smuzhiyun case __X86_CASE_L: \ 60*4882a593Smuzhiyun { \ 61*4882a593Smuzhiyun volatile u32 *__ptr = (volatile u32 *)(ptr); \ 62*4882a593Smuzhiyun asm volatile(lock "cmpxchgl %2,%1" \ 63*4882a593Smuzhiyun : "=a" (__ret), "+m" (*__ptr) \ 64*4882a593Smuzhiyun : "r" (__new), "0" (__old) \ 65*4882a593Smuzhiyun : "memory"); \ 66*4882a593Smuzhiyun break; \ 67*4882a593Smuzhiyun } \ 68*4882a593Smuzhiyun case __X86_CASE_Q: \ 69*4882a593Smuzhiyun { \ 70*4882a593Smuzhiyun volatile u64 *__ptr = (volatile u64 *)(ptr); \ 71*4882a593Smuzhiyun asm volatile(lock "cmpxchgq %2,%1" \ 72*4882a593Smuzhiyun : "=a" (__ret), "+m" (*__ptr) \ 73*4882a593Smuzhiyun : "r" (__new), "0" (__old) \ 74*4882a593Smuzhiyun : "memory"); \ 75*4882a593Smuzhiyun break; \ 76*4882a593Smuzhiyun } \ 77*4882a593Smuzhiyun default: \ 78*4882a593Smuzhiyun __cmpxchg_wrong_size(); \ 79*4882a593Smuzhiyun } \ 80*4882a593Smuzhiyun __ret; \ 81*4882a593Smuzhiyun }) 82*4882a593Smuzhiyun 83*4882a593Smuzhiyun #define __cmpxchg(ptr, old, new, size) \ 84*4882a593Smuzhiyun __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX) 85*4882a593Smuzhiyun 86*4882a593Smuzhiyun #define cmpxchg(ptr, old, new) \ 87*4882a593Smuzhiyun __cmpxchg(ptr, old, new, sizeof(*(ptr))) 88*4882a593Smuzhiyun 89*4882a593Smuzhiyun 90*4882a593Smuzhiyun #endif /* TOOLS_ASM_X86_CMPXCHG_H */ 91