1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun /* 3*4882a593Smuzhiyun * Copyright (C) 2019 Google LLC. 4*4882a593Smuzhiyun */ 5*4882a593Smuzhiyun #ifndef __ASM_RWONCE_H 6*4882a593Smuzhiyun #define __ASM_RWONCE_H 7*4882a593Smuzhiyun 8*4882a593Smuzhiyun #ifdef CONFIG_SMP 9*4882a593Smuzhiyun 10*4882a593Smuzhiyun #include <asm/barrier.h> 11*4882a593Smuzhiyun 12*4882a593Smuzhiyun /* 13*4882a593Smuzhiyun * Alpha is apparently daft enough to reorder address-dependent loads 14*4882a593Smuzhiyun * on some CPU implementations. Knock some common sense into it with 15*4882a593Smuzhiyun * a memory barrier in READ_ONCE(). 16*4882a593Smuzhiyun * 17*4882a593Smuzhiyun * For the curious, more information about this unusual reordering is 18*4882a593Smuzhiyun * available in chapter 15 of the "perfbook": 19*4882a593Smuzhiyun * 20*4882a593Smuzhiyun * https://kernel.org/pub/linux/kernel/people/paulmck/perfbook/perfbook.html 21*4882a593Smuzhiyun * 22*4882a593Smuzhiyun */ 23*4882a593Smuzhiyun #define __READ_ONCE(x) \ 24*4882a593Smuzhiyun ({ \ 25*4882a593Smuzhiyun __unqual_scalar_typeof(x) __x = \ 26*4882a593Smuzhiyun (*(volatile typeof(__x) *)(&(x))); \ 27*4882a593Smuzhiyun mb(); \ 28*4882a593Smuzhiyun (typeof(x))__x; \ 29*4882a593Smuzhiyun }) 30*4882a593Smuzhiyun 31*4882a593Smuzhiyun #endif /* CONFIG_SMP */ 32*4882a593Smuzhiyun 33*4882a593Smuzhiyun #include <asm-generic/rwonce.h> 34*4882a593Smuzhiyun 35*4882a593Smuzhiyun #endif /* __ASM_RWONCE_H */ 36