xref: /OK3568_Linux_fs/kernel/arch/arm64/include/asm/rwonce.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (C) 2020 Google LLC.
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun #ifndef __ASM_RWONCE_H
6*4882a593Smuzhiyun #define __ASM_RWONCE_H
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #if defined(CONFIG_LTO) && !defined(__ASSEMBLY__)
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <linux/compiler_types.h>
11*4882a593Smuzhiyun #include <asm/alternative-macros.h>
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #ifndef BUILD_VDSO
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #ifdef CONFIG_AS_HAS_LDAPR
16*4882a593Smuzhiyun #define __LOAD_RCPC(sfx, regs...)					\
17*4882a593Smuzhiyun 	ALTERNATIVE(							\
18*4882a593Smuzhiyun 		"ldar"	#sfx "\t" #regs,				\
19*4882a593Smuzhiyun 		".arch_extension rcpc\n"				\
20*4882a593Smuzhiyun 		"ldapr"	#sfx "\t" #regs,				\
21*4882a593Smuzhiyun 	ARM64_HAS_LDAPR)
22*4882a593Smuzhiyun #else
23*4882a593Smuzhiyun #define __LOAD_RCPC(sfx, regs...)	"ldar" #sfx "\t" #regs
24*4882a593Smuzhiyun #endif /* CONFIG_AS_HAS_LDAPR */
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun /*
27*4882a593Smuzhiyun  * When building with LTO, there is an increased risk of the compiler
28*4882a593Smuzhiyun  * converting an address dependency headed by a READ_ONCE() invocation
29*4882a593Smuzhiyun  * into a control dependency and consequently allowing for harmful
30*4882a593Smuzhiyun  * reordering by the CPU.
31*4882a593Smuzhiyun  *
32*4882a593Smuzhiyun  * Ensure that such transformations are harmless by overriding the generic
33*4882a593Smuzhiyun  * READ_ONCE() definition with one that provides RCpc acquire semantics
34*4882a593Smuzhiyun  * when building with LTO.
35*4882a593Smuzhiyun  */
36*4882a593Smuzhiyun #define __READ_ONCE(x)							\
37*4882a593Smuzhiyun ({									\
38*4882a593Smuzhiyun 	typeof(&(x)) __x = &(x);					\
39*4882a593Smuzhiyun 	int atomic = 1;							\
40*4882a593Smuzhiyun 	union { __unqual_scalar_typeof(*__x) __val; char __c[1]; } __u;	\
41*4882a593Smuzhiyun 	switch (sizeof(x)) {						\
42*4882a593Smuzhiyun 	case 1:								\
43*4882a593Smuzhiyun 		asm volatile(__LOAD_RCPC(b, %w0, %1)			\
44*4882a593Smuzhiyun 			: "=r" (*(__u8 *)__u.__c)			\
45*4882a593Smuzhiyun 			: "Q" (*__x) : "memory");			\
46*4882a593Smuzhiyun 		break;							\
47*4882a593Smuzhiyun 	case 2:								\
48*4882a593Smuzhiyun 		asm volatile(__LOAD_RCPC(h, %w0, %1)			\
49*4882a593Smuzhiyun 			: "=r" (*(__u16 *)__u.__c)			\
50*4882a593Smuzhiyun 			: "Q" (*__x) : "memory");			\
51*4882a593Smuzhiyun 		break;							\
52*4882a593Smuzhiyun 	case 4:								\
53*4882a593Smuzhiyun 		asm volatile(__LOAD_RCPC(, %w0, %1)			\
54*4882a593Smuzhiyun 			: "=r" (*(__u32 *)__u.__c)			\
55*4882a593Smuzhiyun 			: "Q" (*__x) : "memory");			\
56*4882a593Smuzhiyun 		break;							\
57*4882a593Smuzhiyun 	case 8:								\
58*4882a593Smuzhiyun 		asm volatile(__LOAD_RCPC(, %0, %1)			\
59*4882a593Smuzhiyun 			: "=r" (*(__u64 *)__u.__c)			\
60*4882a593Smuzhiyun 			: "Q" (*__x) : "memory");			\
61*4882a593Smuzhiyun 		break;							\
62*4882a593Smuzhiyun 	default:							\
63*4882a593Smuzhiyun 		atomic = 0;						\
64*4882a593Smuzhiyun 	}								\
65*4882a593Smuzhiyun 	atomic ? (typeof(*__x))__u.__val : (*(volatile typeof(__x))__x);\
66*4882a593Smuzhiyun })
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun #endif	/* !BUILD_VDSO */
69*4882a593Smuzhiyun #endif	/* CONFIG_LTO && !__ASSEMBLY__ */
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun #include <asm-generic/rwonce.h>
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun #endif	/* __ASM_RWONCE_H */
74