1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun #ifndef _TOOLS_LINUX_ASM_AARCH64_BARRIER_H 3*4882a593Smuzhiyun #define _TOOLS_LINUX_ASM_AARCH64_BARRIER_H 4*4882a593Smuzhiyun 5*4882a593Smuzhiyun /* 6*4882a593Smuzhiyun * From tools/perf/perf-sys.h, last modified in: 7*4882a593Smuzhiyun * f428ebd184c82a7914b2aa7e9f868918aaf7ea78 perf tools: Fix AAAAARGH64 memory barriers 8*4882a593Smuzhiyun * 9*4882a593Smuzhiyun * XXX: arch/arm64/include/asm/barrier.h in the kernel sources use dsb, is this 10*4882a593Smuzhiyun * a case like for arm32 where we do things differently in userspace? 11*4882a593Smuzhiyun */ 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun #define mb() asm volatile("dmb ish" ::: "memory") 14*4882a593Smuzhiyun #define wmb() asm volatile("dmb ishst" ::: "memory") 15*4882a593Smuzhiyun #define rmb() asm volatile("dmb ishld" ::: "memory") 16*4882a593Smuzhiyun 17*4882a593Smuzhiyun /* 18*4882a593Smuzhiyun * Kernel uses dmb variants on arm64 for smp_*() barriers. Pretty much the same 19*4882a593Smuzhiyun * implementation as above mb()/wmb()/rmb(), though for the latter kernel uses 20*4882a593Smuzhiyun * dsb. In any case, should above mb()/wmb()/rmb() change, make sure the below 21*4882a593Smuzhiyun * smp_*() don't. 22*4882a593Smuzhiyun */ 23*4882a593Smuzhiyun #define smp_mb() asm volatile("dmb ish" ::: "memory") 24*4882a593Smuzhiyun #define smp_wmb() asm volatile("dmb ishst" ::: "memory") 25*4882a593Smuzhiyun #define smp_rmb() asm volatile("dmb ishld" ::: "memory") 26*4882a593Smuzhiyun 27*4882a593Smuzhiyun #define smp_store_release(p, v) \ 28*4882a593Smuzhiyun do { \ 29*4882a593Smuzhiyun union { typeof(*p) __val; char __c[1]; } __u = \ 30*4882a593Smuzhiyun { .__val = (v) }; \ 31*4882a593Smuzhiyun \ 32*4882a593Smuzhiyun switch (sizeof(*p)) { \ 33*4882a593Smuzhiyun case 1: \ 34*4882a593Smuzhiyun asm volatile ("stlrb %w1, %0" \ 35*4882a593Smuzhiyun : "=Q" (*p) \ 36*4882a593Smuzhiyun : "r" (*(__u8_alias_t *)__u.__c) \ 37*4882a593Smuzhiyun : "memory"); \ 38*4882a593Smuzhiyun break; \ 39*4882a593Smuzhiyun case 2: \ 40*4882a593Smuzhiyun asm volatile ("stlrh %w1, %0" \ 41*4882a593Smuzhiyun : "=Q" (*p) \ 42*4882a593Smuzhiyun : "r" (*(__u16_alias_t *)__u.__c) \ 43*4882a593Smuzhiyun : "memory"); \ 44*4882a593Smuzhiyun break; \ 45*4882a593Smuzhiyun case 4: \ 46*4882a593Smuzhiyun asm volatile ("stlr %w1, %0" \ 47*4882a593Smuzhiyun : "=Q" (*p) \ 48*4882a593Smuzhiyun : "r" (*(__u32_alias_t *)__u.__c) \ 49*4882a593Smuzhiyun : "memory"); \ 50*4882a593Smuzhiyun break; \ 51*4882a593Smuzhiyun case 8: \ 52*4882a593Smuzhiyun asm volatile ("stlr %1, %0" \ 53*4882a593Smuzhiyun : "=Q" (*p) \ 54*4882a593Smuzhiyun : "r" (*(__u64_alias_t *)__u.__c) \ 55*4882a593Smuzhiyun : "memory"); \ 56*4882a593Smuzhiyun break; \ 57*4882a593Smuzhiyun default: \ 58*4882a593Smuzhiyun /* Only to shut up gcc ... */ \ 59*4882a593Smuzhiyun mb(); \ 60*4882a593Smuzhiyun break; \ 61*4882a593Smuzhiyun } \ 62*4882a593Smuzhiyun } while (0) 63*4882a593Smuzhiyun 64*4882a593Smuzhiyun #define smp_load_acquire(p) \ 65*4882a593Smuzhiyun ({ \ 66*4882a593Smuzhiyun union { typeof(*p) __val; char __c[1]; } __u = \ 67*4882a593Smuzhiyun { .__c = { 0 } }; \ 68*4882a593Smuzhiyun \ 69*4882a593Smuzhiyun switch (sizeof(*p)) { \ 70*4882a593Smuzhiyun case 1: \ 71*4882a593Smuzhiyun asm volatile ("ldarb %w0, %1" \ 72*4882a593Smuzhiyun : "=r" (*(__u8_alias_t *)__u.__c) \ 73*4882a593Smuzhiyun : "Q" (*p) : "memory"); \ 74*4882a593Smuzhiyun break; \ 75*4882a593Smuzhiyun case 2: \ 76*4882a593Smuzhiyun asm volatile ("ldarh %w0, %1" \ 77*4882a593Smuzhiyun : "=r" (*(__u16_alias_t *)__u.__c) \ 78*4882a593Smuzhiyun : "Q" (*p) : "memory"); \ 79*4882a593Smuzhiyun break; \ 80*4882a593Smuzhiyun case 4: \ 81*4882a593Smuzhiyun asm volatile ("ldar %w0, %1" \ 82*4882a593Smuzhiyun : "=r" (*(__u32_alias_t *)__u.__c) \ 83*4882a593Smuzhiyun : "Q" (*p) : "memory"); \ 84*4882a593Smuzhiyun break; \ 85*4882a593Smuzhiyun case 8: \ 86*4882a593Smuzhiyun asm volatile ("ldar %0, %1" \ 87*4882a593Smuzhiyun : "=r" (*(__u64_alias_t *)__u.__c) \ 88*4882a593Smuzhiyun : "Q" (*p) : "memory"); \ 89*4882a593Smuzhiyun break; \ 90*4882a593Smuzhiyun default: \ 91*4882a593Smuzhiyun /* Only to shut up gcc ... */ \ 92*4882a593Smuzhiyun mb(); \ 93*4882a593Smuzhiyun break; \ 94*4882a593Smuzhiyun } \ 95*4882a593Smuzhiyun __u.__val; \ 96*4882a593Smuzhiyun }) 97*4882a593Smuzhiyun 98*4882a593Smuzhiyun #endif /* _TOOLS_LINUX_ASM_AARCH64_BARRIER_H */ 99