xref: /OK3568_Linux_fs/kernel/arch/m68k/include/asm/uaccess.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __M68K_UACCESS_H
3*4882a593Smuzhiyun #define __M68K_UACCESS_H
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun #ifdef CONFIG_MMU
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun /*
8*4882a593Smuzhiyun  * User space memory access functions
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun #include <linux/compiler.h>
11*4882a593Smuzhiyun #include <linux/types.h>
12*4882a593Smuzhiyun #include <asm/segment.h>
13*4882a593Smuzhiyun #include <asm/extable.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun /* We let the MMU do all checking */
access_ok(const void __user * addr,unsigned long size)16*4882a593Smuzhiyun static inline int access_ok(const void __user *addr,
17*4882a593Smuzhiyun 			    unsigned long size)
18*4882a593Smuzhiyun {
19*4882a593Smuzhiyun 	return 1;
20*4882a593Smuzhiyun }
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun /*
23*4882a593Smuzhiyun  * Not all varients of the 68k family support the notion of address spaces.
24*4882a593Smuzhiyun  * The traditional 680x0 parts do, and they use the sfc/dfc registers and
25*4882a593Smuzhiyun  * the "moves" instruction to access user space from kernel space. Other
26*4882a593Smuzhiyun  * family members like ColdFire don't support this, and only have a single
27*4882a593Smuzhiyun  * address space, and use the usual "move" instruction for user space access.
28*4882a593Smuzhiyun  *
29*4882a593Smuzhiyun  * Outside of this difference the user space access functions are the same.
30*4882a593Smuzhiyun  * So lets keep the code simple and just define in what we need to use.
31*4882a593Smuzhiyun  */
32*4882a593Smuzhiyun #ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
33*4882a593Smuzhiyun #define	MOVES	"moves"
34*4882a593Smuzhiyun #else
35*4882a593Smuzhiyun #define	MOVES	"move"
36*4882a593Smuzhiyun #endif
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun extern int __put_user_bad(void);
39*4882a593Smuzhiyun extern int __get_user_bad(void);
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun #define __put_user_asm(res, x, ptr, bwl, reg, err)	\
42*4882a593Smuzhiyun asm volatile ("\n"					\
43*4882a593Smuzhiyun 	"1:	"MOVES"."#bwl"	%2,%1\n"		\
44*4882a593Smuzhiyun 	"2:\n"						\
45*4882a593Smuzhiyun 	"	.section .fixup,\"ax\"\n"		\
46*4882a593Smuzhiyun 	"	.even\n"				\
47*4882a593Smuzhiyun 	"10:	moveq.l	%3,%0\n"			\
48*4882a593Smuzhiyun 	"	jra 2b\n"				\
49*4882a593Smuzhiyun 	"	.previous\n"				\
50*4882a593Smuzhiyun 	"\n"						\
51*4882a593Smuzhiyun 	"	.section __ex_table,\"a\"\n"		\
52*4882a593Smuzhiyun 	"	.align	4\n"				\
53*4882a593Smuzhiyun 	"	.long	1b,10b\n"			\
54*4882a593Smuzhiyun 	"	.long	2b,10b\n"			\
55*4882a593Smuzhiyun 	"	.previous"				\
56*4882a593Smuzhiyun 	: "+d" (res), "=m" (*(ptr))			\
57*4882a593Smuzhiyun 	: #reg (x), "i" (err))
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun /*
60*4882a593Smuzhiyun  * These are the main single-value transfer routines.  They automatically
61*4882a593Smuzhiyun  * use the right size if we just have the right pointer type.
62*4882a593Smuzhiyun  */
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun #define __put_user(x, ptr)						\
65*4882a593Smuzhiyun ({									\
66*4882a593Smuzhiyun 	typeof(*(ptr)) __pu_val = (x);					\
67*4882a593Smuzhiyun 	int __pu_err = 0;						\
68*4882a593Smuzhiyun 	__chk_user_ptr(ptr);						\
69*4882a593Smuzhiyun 	switch (sizeof (*(ptr))) {					\
70*4882a593Smuzhiyun 	case 1:								\
71*4882a593Smuzhiyun 		__put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT);	\
72*4882a593Smuzhiyun 		break;							\
73*4882a593Smuzhiyun 	case 2:								\
74*4882a593Smuzhiyun 		__put_user_asm(__pu_err, __pu_val, ptr, w, r, -EFAULT);	\
75*4882a593Smuzhiyun 		break;							\
76*4882a593Smuzhiyun 	case 4:								\
77*4882a593Smuzhiyun 		__put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT);	\
78*4882a593Smuzhiyun 		break;							\
79*4882a593Smuzhiyun 	case 8:								\
80*4882a593Smuzhiyun  	    {								\
81*4882a593Smuzhiyun  		const void __user *__pu_ptr = (ptr);			\
82*4882a593Smuzhiyun 		asm volatile ("\n"					\
83*4882a593Smuzhiyun 			"1:	"MOVES".l	%2,(%1)+\n"		\
84*4882a593Smuzhiyun 			"2:	"MOVES".l	%R2,(%1)\n"		\
85*4882a593Smuzhiyun 			"3:\n"						\
86*4882a593Smuzhiyun 			"	.section .fixup,\"ax\"\n"		\
87*4882a593Smuzhiyun 			"	.even\n"				\
88*4882a593Smuzhiyun 			"10:	movel %3,%0\n"				\
89*4882a593Smuzhiyun 			"	jra 3b\n"				\
90*4882a593Smuzhiyun 			"	.previous\n"				\
91*4882a593Smuzhiyun 			"\n"						\
92*4882a593Smuzhiyun 			"	.section __ex_table,\"a\"\n"		\
93*4882a593Smuzhiyun 			"	.align 4\n"				\
94*4882a593Smuzhiyun 			"	.long 1b,10b\n"				\
95*4882a593Smuzhiyun 			"	.long 2b,10b\n"				\
96*4882a593Smuzhiyun 			"	.long 3b,10b\n"				\
97*4882a593Smuzhiyun 			"	.previous"				\
98*4882a593Smuzhiyun 			: "+d" (__pu_err), "+a" (__pu_ptr)		\
99*4882a593Smuzhiyun 			: "r" (__pu_val), "i" (-EFAULT)			\
100*4882a593Smuzhiyun 			: "memory");					\
101*4882a593Smuzhiyun 		break;							\
102*4882a593Smuzhiyun 	    }								\
103*4882a593Smuzhiyun 	default:							\
104*4882a593Smuzhiyun 		__pu_err = __put_user_bad();				\
105*4882a593Smuzhiyun 		break;							\
106*4882a593Smuzhiyun 	}								\
107*4882a593Smuzhiyun 	__pu_err;							\
108*4882a593Smuzhiyun })
109*4882a593Smuzhiyun #define put_user(x, ptr)	__put_user(x, ptr)
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun 
112*4882a593Smuzhiyun #define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({		\
113*4882a593Smuzhiyun 	type __gu_val;							\
114*4882a593Smuzhiyun 	asm volatile ("\n"						\
115*4882a593Smuzhiyun 		"1:	"MOVES"."#bwl"	%2,%1\n"			\
116*4882a593Smuzhiyun 		"2:\n"							\
117*4882a593Smuzhiyun 		"	.section .fixup,\"ax\"\n"			\
118*4882a593Smuzhiyun 		"	.even\n"					\
119*4882a593Smuzhiyun 		"10:	move.l	%3,%0\n"				\
120*4882a593Smuzhiyun 		"	sub.l	%1,%1\n"				\
121*4882a593Smuzhiyun 		"	jra	2b\n"					\
122*4882a593Smuzhiyun 		"	.previous\n"					\
123*4882a593Smuzhiyun 		"\n"							\
124*4882a593Smuzhiyun 		"	.section __ex_table,\"a\"\n"			\
125*4882a593Smuzhiyun 		"	.align	4\n"					\
126*4882a593Smuzhiyun 		"	.long	1b,10b\n"				\
127*4882a593Smuzhiyun 		"	.previous"					\
128*4882a593Smuzhiyun 		: "+d" (res), "=&" #reg (__gu_val)			\
129*4882a593Smuzhiyun 		: "m" (*(ptr)), "i" (err));				\
130*4882a593Smuzhiyun 	(x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val;	\
131*4882a593Smuzhiyun })
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun #define __get_user(x, ptr)						\
134*4882a593Smuzhiyun ({									\
135*4882a593Smuzhiyun 	int __gu_err = 0;						\
136*4882a593Smuzhiyun 	__chk_user_ptr(ptr);						\
137*4882a593Smuzhiyun 	switch (sizeof(*(ptr))) {					\
138*4882a593Smuzhiyun 	case 1:								\
139*4882a593Smuzhiyun 		__get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT);	\
140*4882a593Smuzhiyun 		break;							\
141*4882a593Smuzhiyun 	case 2:								\
142*4882a593Smuzhiyun 		__get_user_asm(__gu_err, x, ptr, u16, w, r, -EFAULT);	\
143*4882a593Smuzhiyun 		break;							\
144*4882a593Smuzhiyun 	case 4:								\
145*4882a593Smuzhiyun 		__get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT);	\
146*4882a593Smuzhiyun 		break;							\
147*4882a593Smuzhiyun 	case 8: {							\
148*4882a593Smuzhiyun 		const void __user *__gu_ptr = (ptr);			\
149*4882a593Smuzhiyun 		union {							\
150*4882a593Smuzhiyun 			u64 l;						\
151*4882a593Smuzhiyun 			__typeof__(*(ptr)) t;				\
152*4882a593Smuzhiyun 		} __gu_val;						\
153*4882a593Smuzhiyun 		asm volatile ("\n"					\
154*4882a593Smuzhiyun 			"1:	"MOVES".l	(%2)+,%1\n"		\
155*4882a593Smuzhiyun 			"2:	"MOVES".l	(%2),%R1\n"		\
156*4882a593Smuzhiyun 			"3:\n"						\
157*4882a593Smuzhiyun 			"	.section .fixup,\"ax\"\n"		\
158*4882a593Smuzhiyun 			"	.even\n"				\
159*4882a593Smuzhiyun 			"10:	move.l	%3,%0\n"			\
160*4882a593Smuzhiyun 			"	sub.l	%1,%1\n"			\
161*4882a593Smuzhiyun 			"	sub.l	%R1,%R1\n"			\
162*4882a593Smuzhiyun 			"	jra	3b\n"				\
163*4882a593Smuzhiyun 			"	.previous\n"				\
164*4882a593Smuzhiyun 			"\n"						\
165*4882a593Smuzhiyun 			"	.section __ex_table,\"a\"\n"		\
166*4882a593Smuzhiyun 			"	.align	4\n"				\
167*4882a593Smuzhiyun 			"	.long	1b,10b\n"			\
168*4882a593Smuzhiyun 			"	.long	2b,10b\n"			\
169*4882a593Smuzhiyun 			"	.previous"				\
170*4882a593Smuzhiyun 			: "+d" (__gu_err), "=&r" (__gu_val.l),		\
171*4882a593Smuzhiyun 			  "+a" (__gu_ptr)				\
172*4882a593Smuzhiyun 			: "i" (-EFAULT)					\
173*4882a593Smuzhiyun 			: "memory");					\
174*4882a593Smuzhiyun 		(x) = __gu_val.t;					\
175*4882a593Smuzhiyun 		break;							\
176*4882a593Smuzhiyun 	}								\
177*4882a593Smuzhiyun 	default:							\
178*4882a593Smuzhiyun 		__gu_err = __get_user_bad();				\
179*4882a593Smuzhiyun 		break;							\
180*4882a593Smuzhiyun 	}								\
181*4882a593Smuzhiyun 	__gu_err;							\
182*4882a593Smuzhiyun })
183*4882a593Smuzhiyun #define get_user(x, ptr) __get_user(x, ptr)
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
186*4882a593Smuzhiyun unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun #define __suffix0
189*4882a593Smuzhiyun #define __suffix1 b
190*4882a593Smuzhiyun #define __suffix2 w
191*4882a593Smuzhiyun #define __suffix4 l
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun #define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
194*4882a593Smuzhiyun 	asm volatile ("\n"						\
195*4882a593Smuzhiyun 		"1:	"MOVES"."#s1"	(%2)+,%3\n"			\
196*4882a593Smuzhiyun 		"	move."#s1"	%3,(%1)+\n"			\
197*4882a593Smuzhiyun 		"	.ifnc	\""#s2"\",\"\"\n"			\
198*4882a593Smuzhiyun 		"2:	"MOVES"."#s2"	(%2)+,%3\n"			\
199*4882a593Smuzhiyun 		"	move."#s2"	%3,(%1)+\n"			\
200*4882a593Smuzhiyun 		"	.ifnc	\""#s3"\",\"\"\n"			\
201*4882a593Smuzhiyun 		"3:	"MOVES"."#s3"	(%2)+,%3\n"			\
202*4882a593Smuzhiyun 		"	move."#s3"	%3,(%1)+\n"			\
203*4882a593Smuzhiyun 		"	.endif\n"					\
204*4882a593Smuzhiyun 		"	.endif\n"					\
205*4882a593Smuzhiyun 		"4:\n"							\
206*4882a593Smuzhiyun 		"	.section __ex_table,\"a\"\n"			\
207*4882a593Smuzhiyun 		"	.align	4\n"					\
208*4882a593Smuzhiyun 		"	.long	1b,10f\n"				\
209*4882a593Smuzhiyun 		"	.ifnc	\""#s2"\",\"\"\n"			\
210*4882a593Smuzhiyun 		"	.long	2b,20f\n"				\
211*4882a593Smuzhiyun 		"	.ifnc	\""#s3"\",\"\"\n"			\
212*4882a593Smuzhiyun 		"	.long	3b,30f\n"				\
213*4882a593Smuzhiyun 		"	.endif\n"					\
214*4882a593Smuzhiyun 		"	.endif\n"					\
215*4882a593Smuzhiyun 		"	.previous\n"					\
216*4882a593Smuzhiyun 		"\n"							\
217*4882a593Smuzhiyun 		"	.section .fixup,\"ax\"\n"			\
218*4882a593Smuzhiyun 		"	.even\n"					\
219*4882a593Smuzhiyun 		"10:	addq.l #"#n1",%0\n"				\
220*4882a593Smuzhiyun 		"	.ifnc	\""#s2"\",\"\"\n"			\
221*4882a593Smuzhiyun 		"20:	addq.l #"#n2",%0\n"				\
222*4882a593Smuzhiyun 		"	.ifnc	\""#s3"\",\"\"\n"			\
223*4882a593Smuzhiyun 		"30:	addq.l #"#n3",%0\n"				\
224*4882a593Smuzhiyun 		"	.endif\n"					\
225*4882a593Smuzhiyun 		"	.endif\n"					\
226*4882a593Smuzhiyun 		"	jra	4b\n"					\
227*4882a593Smuzhiyun 		"	.previous\n"					\
228*4882a593Smuzhiyun 		: "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp)	\
229*4882a593Smuzhiyun 		: : "memory")
230*4882a593Smuzhiyun 
231*4882a593Smuzhiyun #define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
232*4882a593Smuzhiyun 	____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
233*4882a593Smuzhiyun #define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3)	\
234*4882a593Smuzhiyun 	___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3,  \
235*4882a593Smuzhiyun 					__suffix##n1, __suffix##n2, __suffix##n3)
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun static __always_inline unsigned long
__constant_copy_from_user(void * to,const void __user * from,unsigned long n)238*4882a593Smuzhiyun __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun 	unsigned long res = 0, tmp;
241*4882a593Smuzhiyun 
242*4882a593Smuzhiyun 	switch (n) {
243*4882a593Smuzhiyun 	case 1:
244*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
245*4882a593Smuzhiyun 		break;
246*4882a593Smuzhiyun 	case 2:
247*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
248*4882a593Smuzhiyun 		break;
249*4882a593Smuzhiyun 	case 3:
250*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
251*4882a593Smuzhiyun 		break;
252*4882a593Smuzhiyun 	case 4:
253*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
254*4882a593Smuzhiyun 		break;
255*4882a593Smuzhiyun 	case 5:
256*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
257*4882a593Smuzhiyun 		break;
258*4882a593Smuzhiyun 	case 6:
259*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
260*4882a593Smuzhiyun 		break;
261*4882a593Smuzhiyun 	case 7:
262*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
263*4882a593Smuzhiyun 		break;
264*4882a593Smuzhiyun 	case 8:
265*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
266*4882a593Smuzhiyun 		break;
267*4882a593Smuzhiyun 	case 9:
268*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
269*4882a593Smuzhiyun 		break;
270*4882a593Smuzhiyun 	case 10:
271*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
272*4882a593Smuzhiyun 		break;
273*4882a593Smuzhiyun 	case 12:
274*4882a593Smuzhiyun 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
275*4882a593Smuzhiyun 		break;
276*4882a593Smuzhiyun 	default:
277*4882a593Smuzhiyun 		/* we limit the inlined version to 3 moves */
278*4882a593Smuzhiyun 		return __generic_copy_from_user(to, from, n);
279*4882a593Smuzhiyun 	}
280*4882a593Smuzhiyun 
281*4882a593Smuzhiyun 	return res;
282*4882a593Smuzhiyun }
283*4882a593Smuzhiyun 
284*4882a593Smuzhiyun #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3)	\
285*4882a593Smuzhiyun 	asm volatile ("\n"						\
286*4882a593Smuzhiyun 		"	move."#s1"	(%2)+,%3\n"			\
287*4882a593Smuzhiyun 		"11:	"MOVES"."#s1"	%3,(%1)+\n"			\
288*4882a593Smuzhiyun 		"12:	move."#s2"	(%2)+,%3\n"			\
289*4882a593Smuzhiyun 		"21:	"MOVES"."#s2"	%3,(%1)+\n"			\
290*4882a593Smuzhiyun 		"22:\n"							\
291*4882a593Smuzhiyun 		"	.ifnc	\""#s3"\",\"\"\n"			\
292*4882a593Smuzhiyun 		"	move."#s3"	(%2)+,%3\n"			\
293*4882a593Smuzhiyun 		"31:	"MOVES"."#s3"	%3,(%1)+\n"			\
294*4882a593Smuzhiyun 		"32:\n"							\
295*4882a593Smuzhiyun 		"	.endif\n"					\
296*4882a593Smuzhiyun 		"4:\n"							\
297*4882a593Smuzhiyun 		"\n"							\
298*4882a593Smuzhiyun 		"	.section __ex_table,\"a\"\n"			\
299*4882a593Smuzhiyun 		"	.align	4\n"					\
300*4882a593Smuzhiyun 		"	.long	11b,5f\n"				\
301*4882a593Smuzhiyun 		"	.long	12b,5f\n"				\
302*4882a593Smuzhiyun 		"	.long	21b,5f\n"				\
303*4882a593Smuzhiyun 		"	.long	22b,5f\n"				\
304*4882a593Smuzhiyun 		"	.ifnc	\""#s3"\",\"\"\n"			\
305*4882a593Smuzhiyun 		"	.long	31b,5f\n"				\
306*4882a593Smuzhiyun 		"	.long	32b,5f\n"				\
307*4882a593Smuzhiyun 		"	.endif\n"					\
308*4882a593Smuzhiyun 		"	.previous\n"					\
309*4882a593Smuzhiyun 		"\n"							\
310*4882a593Smuzhiyun 		"	.section .fixup,\"ax\"\n"			\
311*4882a593Smuzhiyun 		"	.even\n"					\
312*4882a593Smuzhiyun 		"5:	moveq.l	#"#n",%0\n"				\
313*4882a593Smuzhiyun 		"	jra	4b\n"					\
314*4882a593Smuzhiyun 		"	.previous\n"					\
315*4882a593Smuzhiyun 		: "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp)	\
316*4882a593Smuzhiyun 		: : "memory")
317*4882a593Smuzhiyun 
318*4882a593Smuzhiyun static __always_inline unsigned long
__constant_copy_to_user(void __user * to,const void * from,unsigned long n)319*4882a593Smuzhiyun __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
320*4882a593Smuzhiyun {
321*4882a593Smuzhiyun 	unsigned long res = 0, tmp;
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 	switch (n) {
324*4882a593Smuzhiyun 	case 1:
325*4882a593Smuzhiyun 		__put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
326*4882a593Smuzhiyun 		break;
327*4882a593Smuzhiyun 	case 2:
328*4882a593Smuzhiyun 		__put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, r, 2);
329*4882a593Smuzhiyun 		break;
330*4882a593Smuzhiyun 	case 3:
331*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
332*4882a593Smuzhiyun 		break;
333*4882a593Smuzhiyun 	case 4:
334*4882a593Smuzhiyun 		__put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
335*4882a593Smuzhiyun 		break;
336*4882a593Smuzhiyun 	case 5:
337*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
338*4882a593Smuzhiyun 		break;
339*4882a593Smuzhiyun 	case 6:
340*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
341*4882a593Smuzhiyun 		break;
342*4882a593Smuzhiyun 	case 7:
343*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
344*4882a593Smuzhiyun 		break;
345*4882a593Smuzhiyun 	case 8:
346*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
347*4882a593Smuzhiyun 		break;
348*4882a593Smuzhiyun 	case 9:
349*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
350*4882a593Smuzhiyun 		break;
351*4882a593Smuzhiyun 	case 10:
352*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
353*4882a593Smuzhiyun 		break;
354*4882a593Smuzhiyun 	case 12:
355*4882a593Smuzhiyun 		__constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
356*4882a593Smuzhiyun 		break;
357*4882a593Smuzhiyun 	default:
358*4882a593Smuzhiyun 		/* limit the inlined version to 3 moves */
359*4882a593Smuzhiyun 		return __generic_copy_to_user(to, from, n);
360*4882a593Smuzhiyun 	}
361*4882a593Smuzhiyun 
362*4882a593Smuzhiyun 	return res;
363*4882a593Smuzhiyun }
364*4882a593Smuzhiyun 
365*4882a593Smuzhiyun static inline unsigned long
raw_copy_from_user(void * to,const void __user * from,unsigned long n)366*4882a593Smuzhiyun raw_copy_from_user(void *to, const void __user *from, unsigned long n)
367*4882a593Smuzhiyun {
368*4882a593Smuzhiyun 	if (__builtin_constant_p(n))
369*4882a593Smuzhiyun 		return __constant_copy_from_user(to, from, n);
370*4882a593Smuzhiyun 	return __generic_copy_from_user(to, from, n);
371*4882a593Smuzhiyun }
372*4882a593Smuzhiyun 
373*4882a593Smuzhiyun static inline unsigned long
raw_copy_to_user(void __user * to,const void * from,unsigned long n)374*4882a593Smuzhiyun raw_copy_to_user(void __user *to, const void *from, unsigned long n)
375*4882a593Smuzhiyun {
376*4882a593Smuzhiyun 	if (__builtin_constant_p(n))
377*4882a593Smuzhiyun 		return __constant_copy_to_user(to, from, n);
378*4882a593Smuzhiyun 	return __generic_copy_to_user(to, from, n);
379*4882a593Smuzhiyun }
380*4882a593Smuzhiyun #define INLINE_COPY_FROM_USER
381*4882a593Smuzhiyun #define INLINE_COPY_TO_USER
382*4882a593Smuzhiyun 
383*4882a593Smuzhiyun #define user_addr_max() \
384*4882a593Smuzhiyun 	(uaccess_kernel() ? ~0UL : TASK_SIZE)
385*4882a593Smuzhiyun 
386*4882a593Smuzhiyun extern long strncpy_from_user(char *dst, const char __user *src, long count);
387*4882a593Smuzhiyun extern __must_check long strnlen_user(const char __user *str, long n);
388*4882a593Smuzhiyun 
389*4882a593Smuzhiyun unsigned long __clear_user(void __user *to, unsigned long n);
390*4882a593Smuzhiyun 
391*4882a593Smuzhiyun #define clear_user	__clear_user
392*4882a593Smuzhiyun 
393*4882a593Smuzhiyun #else /* !CONFIG_MMU */
394*4882a593Smuzhiyun #include <asm-generic/uaccess.h>
395*4882a593Smuzhiyun #endif
396*4882a593Smuzhiyun 
397*4882a593Smuzhiyun #endif /* _M68K_UACCESS_H */
398