xref: /OK3568_Linux_fs/kernel/arch/arc/include/asm/uaccess.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * vineetg: June 2010
6*4882a593Smuzhiyun  *    -__clear_user( ) called multiple times during elf load was byte loop
7*4882a593Smuzhiyun  *    converted to do as much word clear as possible.
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  * vineetg: Dec 2009
10*4882a593Smuzhiyun  *    -Hand crafted constant propagation for "constant" copy sizes
11*4882a593Smuzhiyun  *    -stock kernel shrunk by 33K at -O3
12*4882a593Smuzhiyun  *
13*4882a593Smuzhiyun  * vineetg: Sept 2009
14*4882a593Smuzhiyun  *    -Added option to (UN)inline copy_(to|from)_user to reduce code sz
15*4882a593Smuzhiyun  *    -kernel shrunk by 200K even at -O3 (gcc 4.2.1)
16*4882a593Smuzhiyun  *    -Enabled when doing -Os
17*4882a593Smuzhiyun  *
18*4882a593Smuzhiyun  * Amit Bhor, Sameer Dhavale: Codito Technologies 2004
19*4882a593Smuzhiyun  */
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun #ifndef _ASM_ARC_UACCESS_H
22*4882a593Smuzhiyun #define _ASM_ARC_UACCESS_H
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun #include <linux/string.h>	/* for generic string functions */
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun #define __kernel_ok		(uaccess_kernel())
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun /*
30*4882a593Smuzhiyun  * Algorithmically, for __user_ok() we want do:
31*4882a593Smuzhiyun  * 	(start < TASK_SIZE) && (start+len < TASK_SIZE)
32*4882a593Smuzhiyun  * where TASK_SIZE could either be retrieved from thread_info->addr_limit or
33*4882a593Smuzhiyun  * emitted directly in code.
34*4882a593Smuzhiyun  *
35*4882a593Smuzhiyun  * This can however be rewritten as follows:
36*4882a593Smuzhiyun  *	(len <= TASK_SIZE) && (start+len < TASK_SIZE)
37*4882a593Smuzhiyun  *
38*4882a593Smuzhiyun  * Because it essentially checks if buffer end is within limit and @len is
39*4882a593Smuzhiyun  * non-ngeative, which implies that buffer start will be within limit too.
40*4882a593Smuzhiyun  *
41*4882a593Smuzhiyun  * The reason for rewriting being, for majority of cases, @len is generally
42*4882a593Smuzhiyun  * compile time constant, causing first sub-expression to be compile time
43*4882a593Smuzhiyun  * subsumed.
44*4882a593Smuzhiyun  *
45*4882a593Smuzhiyun  * The second part would generate weird large LIMMs e.g. (0x6000_0000 - 0x10),
46*4882a593Smuzhiyun  * so we check for TASK_SIZE using get_fs() since the addr_limit load from mem
47*4882a593Smuzhiyun  * would already have been done at this call site for __kernel_ok()
48*4882a593Smuzhiyun  *
49*4882a593Smuzhiyun  */
50*4882a593Smuzhiyun #define __user_ok(addr, sz)	(((sz) <= TASK_SIZE) && \
51*4882a593Smuzhiyun 				 ((addr) <= (get_fs() - (sz))))
52*4882a593Smuzhiyun #define __access_ok(addr, sz)	(unlikely(__kernel_ok) || \
53*4882a593Smuzhiyun 				 likely(__user_ok((addr), (sz))))
54*4882a593Smuzhiyun 
55*4882a593Smuzhiyun /*********** Single byte/hword/word copies ******************/
56*4882a593Smuzhiyun 
57*4882a593Smuzhiyun #define __get_user_fn(sz, u, k)					\
58*4882a593Smuzhiyun ({								\
59*4882a593Smuzhiyun 	long __ret = 0;	/* success by default */	\
60*4882a593Smuzhiyun 	switch (sz) {						\
61*4882a593Smuzhiyun 	case 1: __arc_get_user_one(*(k), u, "ldb", __ret); break;	\
62*4882a593Smuzhiyun 	case 2: __arc_get_user_one(*(k), u, "ldw", __ret); break;	\
63*4882a593Smuzhiyun 	case 4: __arc_get_user_one(*(k), u, "ld", __ret);  break;	\
64*4882a593Smuzhiyun 	case 8: __arc_get_user_one_64(*(k), u, __ret);     break;	\
65*4882a593Smuzhiyun 	}							\
66*4882a593Smuzhiyun 	__ret;							\
67*4882a593Smuzhiyun })
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun /*
70*4882a593Smuzhiyun  * Returns 0 on success, -EFAULT if not.
71*4882a593Smuzhiyun  * @ret already contains 0 - given that errors will be less likely
72*4882a593Smuzhiyun  * (hence +r asm constraint below).
73*4882a593Smuzhiyun  * In case of error, fixup code will make it -EFAULT
74*4882a593Smuzhiyun  */
75*4882a593Smuzhiyun #define __arc_get_user_one(dst, src, op, ret)	\
76*4882a593Smuzhiyun 	__asm__ __volatile__(                   \
77*4882a593Smuzhiyun 	"1:	"op"    %1,[%2]\n"		\
78*4882a593Smuzhiyun 	"2:	;nop\n"				\
79*4882a593Smuzhiyun 	"	.section .fixup, \"ax\"\n"	\
80*4882a593Smuzhiyun 	"	.align 4\n"			\
81*4882a593Smuzhiyun 	"3:	# return -EFAULT\n"		\
82*4882a593Smuzhiyun 	"	mov %0, %3\n"			\
83*4882a593Smuzhiyun 	"	# zero out dst ptr\n"		\
84*4882a593Smuzhiyun 	"	mov %1,  0\n"			\
85*4882a593Smuzhiyun 	"	j   2b\n"			\
86*4882a593Smuzhiyun 	"	.previous\n"			\
87*4882a593Smuzhiyun 	"	.section __ex_table, \"a\"\n"	\
88*4882a593Smuzhiyun 	"	.align 4\n"			\
89*4882a593Smuzhiyun 	"	.word 1b,3b\n"			\
90*4882a593Smuzhiyun 	"	.previous\n"			\
91*4882a593Smuzhiyun 						\
92*4882a593Smuzhiyun 	: "+r" (ret), "=r" (dst)		\
93*4882a593Smuzhiyun 	: "r" (src), "ir" (-EFAULT))
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun #define __arc_get_user_one_64(dst, src, ret)	\
96*4882a593Smuzhiyun 	__asm__ __volatile__(                   \
97*4882a593Smuzhiyun 	"1:	ld   %1,[%2]\n"			\
98*4882a593Smuzhiyun 	"4:	ld  %R1,[%2, 4]\n"		\
99*4882a593Smuzhiyun 	"2:	;nop\n"				\
100*4882a593Smuzhiyun 	"	.section .fixup, \"ax\"\n"	\
101*4882a593Smuzhiyun 	"	.align 4\n"			\
102*4882a593Smuzhiyun 	"3:	# return -EFAULT\n"		\
103*4882a593Smuzhiyun 	"	mov %0, %3\n"			\
104*4882a593Smuzhiyun 	"	# zero out dst ptr\n"		\
105*4882a593Smuzhiyun 	"	mov %1,  0\n"			\
106*4882a593Smuzhiyun 	"	mov %R1, 0\n"			\
107*4882a593Smuzhiyun 	"	j   2b\n"			\
108*4882a593Smuzhiyun 	"	.previous\n"			\
109*4882a593Smuzhiyun 	"	.section __ex_table, \"a\"\n"	\
110*4882a593Smuzhiyun 	"	.align 4\n"			\
111*4882a593Smuzhiyun 	"	.word 1b,3b\n"			\
112*4882a593Smuzhiyun 	"	.word 4b,3b\n"			\
113*4882a593Smuzhiyun 	"	.previous\n"			\
114*4882a593Smuzhiyun 						\
115*4882a593Smuzhiyun 	: "+r" (ret), "=r" (dst)		\
116*4882a593Smuzhiyun 	: "r" (src), "ir" (-EFAULT))
117*4882a593Smuzhiyun 
118*4882a593Smuzhiyun #define __put_user_fn(sz, u, k)					\
119*4882a593Smuzhiyun ({								\
120*4882a593Smuzhiyun 	long __ret = 0;	/* success by default */	\
121*4882a593Smuzhiyun 	switch (sz) {						\
122*4882a593Smuzhiyun 	case 1: __arc_put_user_one(*(k), u, "stb", __ret); break;	\
123*4882a593Smuzhiyun 	case 2: __arc_put_user_one(*(k), u, "stw", __ret); break;	\
124*4882a593Smuzhiyun 	case 4: __arc_put_user_one(*(k), u, "st", __ret);  break;	\
125*4882a593Smuzhiyun 	case 8: __arc_put_user_one_64(*(k), u, __ret);     break;	\
126*4882a593Smuzhiyun 	}							\
127*4882a593Smuzhiyun 	__ret;							\
128*4882a593Smuzhiyun })
129*4882a593Smuzhiyun 
130*4882a593Smuzhiyun #define __arc_put_user_one(src, dst, op, ret)	\
131*4882a593Smuzhiyun 	__asm__ __volatile__(                   \
132*4882a593Smuzhiyun 	"1:	"op"    %1,[%2]\n"		\
133*4882a593Smuzhiyun 	"2:	;nop\n"				\
134*4882a593Smuzhiyun 	"	.section .fixup, \"ax\"\n"	\
135*4882a593Smuzhiyun 	"	.align 4\n"			\
136*4882a593Smuzhiyun 	"3:	mov %0, %3\n"			\
137*4882a593Smuzhiyun 	"	j   2b\n"			\
138*4882a593Smuzhiyun 	"	.previous\n"			\
139*4882a593Smuzhiyun 	"	.section __ex_table, \"a\"\n"	\
140*4882a593Smuzhiyun 	"	.align 4\n"			\
141*4882a593Smuzhiyun 	"	.word 1b,3b\n"			\
142*4882a593Smuzhiyun 	"	.previous\n"			\
143*4882a593Smuzhiyun 						\
144*4882a593Smuzhiyun 	: "+r" (ret)				\
145*4882a593Smuzhiyun 	: "r" (src), "r" (dst), "ir" (-EFAULT))
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun #define __arc_put_user_one_64(src, dst, ret)	\
148*4882a593Smuzhiyun 	__asm__ __volatile__(                   \
149*4882a593Smuzhiyun 	"1:	st   %1,[%2]\n"			\
150*4882a593Smuzhiyun 	"4:	st  %R1,[%2, 4]\n"		\
151*4882a593Smuzhiyun 	"2:	;nop\n"				\
152*4882a593Smuzhiyun 	"	.section .fixup, \"ax\"\n"	\
153*4882a593Smuzhiyun 	"	.align 4\n"			\
154*4882a593Smuzhiyun 	"3:	mov %0, %3\n"			\
155*4882a593Smuzhiyun 	"	j   2b\n"			\
156*4882a593Smuzhiyun 	"	.previous\n"			\
157*4882a593Smuzhiyun 	"	.section __ex_table, \"a\"\n"	\
158*4882a593Smuzhiyun 	"	.align 4\n"			\
159*4882a593Smuzhiyun 	"	.word 1b,3b\n"			\
160*4882a593Smuzhiyun 	"	.word 4b,3b\n"			\
161*4882a593Smuzhiyun 	"	.previous\n"			\
162*4882a593Smuzhiyun 						\
163*4882a593Smuzhiyun 	: "+r" (ret)				\
164*4882a593Smuzhiyun 	: "r" (src), "r" (dst), "ir" (-EFAULT))
165*4882a593Smuzhiyun 
166*4882a593Smuzhiyun 
167*4882a593Smuzhiyun static inline unsigned long
raw_copy_from_user(void * to,const void __user * from,unsigned long n)168*4882a593Smuzhiyun raw_copy_from_user(void *to, const void __user *from, unsigned long n)
169*4882a593Smuzhiyun {
170*4882a593Smuzhiyun 	long res = 0;
171*4882a593Smuzhiyun 	char val;
172*4882a593Smuzhiyun 	unsigned long tmp1, tmp2, tmp3, tmp4;
173*4882a593Smuzhiyun 	unsigned long orig_n = n;
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	if (n == 0)
176*4882a593Smuzhiyun 		return 0;
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	/* unaligned */
179*4882a593Smuzhiyun 	if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun 		unsigned char tmp;
182*4882a593Smuzhiyun 
183*4882a593Smuzhiyun 		__asm__ __volatile__ (
184*4882a593Smuzhiyun 		"	mov.f   lp_count, %0		\n"
185*4882a593Smuzhiyun 		"	lpnz 2f				\n"
186*4882a593Smuzhiyun 		"1:	ldb.ab  %1, [%3, 1]		\n"
187*4882a593Smuzhiyun 		"	stb.ab  %1, [%2, 1]		\n"
188*4882a593Smuzhiyun 		"	sub     %0,%0,1			\n"
189*4882a593Smuzhiyun 		"2:	;nop				\n"
190*4882a593Smuzhiyun 		"	.section .fixup, \"ax\"		\n"
191*4882a593Smuzhiyun 		"	.align 4			\n"
192*4882a593Smuzhiyun 		"3:	j   2b				\n"
193*4882a593Smuzhiyun 		"	.previous			\n"
194*4882a593Smuzhiyun 		"	.section __ex_table, \"a\"	\n"
195*4882a593Smuzhiyun 		"	.align 4			\n"
196*4882a593Smuzhiyun 		"	.word   1b, 3b			\n"
197*4882a593Smuzhiyun 		"	.previous			\n"
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun 		: "+r" (n),
200*4882a593Smuzhiyun 		/*
201*4882a593Smuzhiyun 		 * Note as an '&' earlyclobber operand to make sure the
202*4882a593Smuzhiyun 		 * temporary register inside the loop is not the same as
203*4882a593Smuzhiyun 		 *  FROM or TO.
204*4882a593Smuzhiyun 		*/
205*4882a593Smuzhiyun 		  "=&r" (tmp), "+r" (to), "+r" (from)
206*4882a593Smuzhiyun 		:
207*4882a593Smuzhiyun 		: "lp_count", "memory");
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 		return n;
210*4882a593Smuzhiyun 	}
211*4882a593Smuzhiyun 
212*4882a593Smuzhiyun 	/*
213*4882a593Smuzhiyun 	 * Hand-crafted constant propagation to reduce code sz of the
214*4882a593Smuzhiyun 	 * laddered copy 16x,8,4,2,1
215*4882a593Smuzhiyun 	 */
216*4882a593Smuzhiyun 	if (__builtin_constant_p(orig_n)) {
217*4882a593Smuzhiyun 		res = orig_n;
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 		if (orig_n / 16) {
220*4882a593Smuzhiyun 			orig_n = orig_n % 16;
221*4882a593Smuzhiyun 
222*4882a593Smuzhiyun 			__asm__ __volatile__(
223*4882a593Smuzhiyun 			"	lsr   lp_count, %7,4		\n"
224*4882a593Smuzhiyun 			"	lp    3f			\n"
225*4882a593Smuzhiyun 			"1:	ld.ab   %3, [%2, 4]		\n"
226*4882a593Smuzhiyun 			"11:	ld.ab   %4, [%2, 4]		\n"
227*4882a593Smuzhiyun 			"12:	ld.ab   %5, [%2, 4]		\n"
228*4882a593Smuzhiyun 			"13:	ld.ab   %6, [%2, 4]		\n"
229*4882a593Smuzhiyun 			"	st.ab   %3, [%1, 4]		\n"
230*4882a593Smuzhiyun 			"	st.ab   %4, [%1, 4]		\n"
231*4882a593Smuzhiyun 			"	st.ab   %5, [%1, 4]		\n"
232*4882a593Smuzhiyun 			"	st.ab   %6, [%1, 4]		\n"
233*4882a593Smuzhiyun 			"	sub     %0,%0,16		\n"
234*4882a593Smuzhiyun 			"3:	;nop				\n"
235*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
236*4882a593Smuzhiyun 			"	.align 4			\n"
237*4882a593Smuzhiyun 			"4:	j   3b				\n"
238*4882a593Smuzhiyun 			"	.previous			\n"
239*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
240*4882a593Smuzhiyun 			"	.align 4			\n"
241*4882a593Smuzhiyun 			"	.word   1b, 4b			\n"
242*4882a593Smuzhiyun 			"	.word   11b,4b			\n"
243*4882a593Smuzhiyun 			"	.word   12b,4b			\n"
244*4882a593Smuzhiyun 			"	.word   13b,4b			\n"
245*4882a593Smuzhiyun 			"	.previous			\n"
246*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from),
247*4882a593Smuzhiyun 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
248*4882a593Smuzhiyun 			: "ir"(n)
249*4882a593Smuzhiyun 			: "lp_count", "memory");
250*4882a593Smuzhiyun 		}
251*4882a593Smuzhiyun 		if (orig_n / 8) {
252*4882a593Smuzhiyun 			orig_n = orig_n % 8;
253*4882a593Smuzhiyun 
254*4882a593Smuzhiyun 			__asm__ __volatile__(
255*4882a593Smuzhiyun 			"14:	ld.ab   %3, [%2,4]		\n"
256*4882a593Smuzhiyun 			"15:	ld.ab   %4, [%2,4]		\n"
257*4882a593Smuzhiyun 			"	st.ab   %3, [%1,4]		\n"
258*4882a593Smuzhiyun 			"	st.ab   %4, [%1,4]		\n"
259*4882a593Smuzhiyun 			"	sub     %0,%0,8			\n"
260*4882a593Smuzhiyun 			"31:	;nop				\n"
261*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
262*4882a593Smuzhiyun 			"	.align 4			\n"
263*4882a593Smuzhiyun 			"4:	j   31b				\n"
264*4882a593Smuzhiyun 			"	.previous			\n"
265*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
266*4882a593Smuzhiyun 			"	.align 4			\n"
267*4882a593Smuzhiyun 			"	.word   14b,4b			\n"
268*4882a593Smuzhiyun 			"	.word   15b,4b			\n"
269*4882a593Smuzhiyun 			"	.previous			\n"
270*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from),
271*4882a593Smuzhiyun 			  "=r"(tmp1), "=r"(tmp2)
272*4882a593Smuzhiyun 			:
273*4882a593Smuzhiyun 			: "memory");
274*4882a593Smuzhiyun 		}
275*4882a593Smuzhiyun 		if (orig_n / 4) {
276*4882a593Smuzhiyun 			orig_n = orig_n % 4;
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun 			__asm__ __volatile__(
279*4882a593Smuzhiyun 			"16:	ld.ab   %3, [%2,4]		\n"
280*4882a593Smuzhiyun 			"	st.ab   %3, [%1,4]		\n"
281*4882a593Smuzhiyun 			"	sub     %0,%0,4			\n"
282*4882a593Smuzhiyun 			"32:	;nop				\n"
283*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
284*4882a593Smuzhiyun 			"	.align 4			\n"
285*4882a593Smuzhiyun 			"4:	j   32b				\n"
286*4882a593Smuzhiyun 			"	.previous			\n"
287*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
288*4882a593Smuzhiyun 			"	.align 4			\n"
289*4882a593Smuzhiyun 			"	.word   16b,4b			\n"
290*4882a593Smuzhiyun 			"	.previous			\n"
291*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
292*4882a593Smuzhiyun 			:
293*4882a593Smuzhiyun 			: "memory");
294*4882a593Smuzhiyun 		}
295*4882a593Smuzhiyun 		if (orig_n / 2) {
296*4882a593Smuzhiyun 			orig_n = orig_n % 2;
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun 			__asm__ __volatile__(
299*4882a593Smuzhiyun 			"17:	ldw.ab   %3, [%2,2]		\n"
300*4882a593Smuzhiyun 			"	stw.ab   %3, [%1,2]		\n"
301*4882a593Smuzhiyun 			"	sub      %0,%0,2		\n"
302*4882a593Smuzhiyun 			"33:	;nop				\n"
303*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
304*4882a593Smuzhiyun 			"	.align 4			\n"
305*4882a593Smuzhiyun 			"4:	j   33b				\n"
306*4882a593Smuzhiyun 			"	.previous			\n"
307*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
308*4882a593Smuzhiyun 			"	.align 4			\n"
309*4882a593Smuzhiyun 			"	.word   17b,4b			\n"
310*4882a593Smuzhiyun 			"	.previous			\n"
311*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
312*4882a593Smuzhiyun 			:
313*4882a593Smuzhiyun 			: "memory");
314*4882a593Smuzhiyun 		}
315*4882a593Smuzhiyun 		if (orig_n & 1) {
316*4882a593Smuzhiyun 			__asm__ __volatile__(
317*4882a593Smuzhiyun 			"18:	ldb.ab   %3, [%2,2]		\n"
318*4882a593Smuzhiyun 			"	stb.ab   %3, [%1,2]		\n"
319*4882a593Smuzhiyun 			"	sub      %0,%0,1		\n"
320*4882a593Smuzhiyun 			"34:	; nop				\n"
321*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
322*4882a593Smuzhiyun 			"	.align 4			\n"
323*4882a593Smuzhiyun 			"4:	j   34b				\n"
324*4882a593Smuzhiyun 			"	.previous			\n"
325*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
326*4882a593Smuzhiyun 			"	.align 4			\n"
327*4882a593Smuzhiyun 			"	.word   18b,4b			\n"
328*4882a593Smuzhiyun 			"	.previous			\n"
329*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
330*4882a593Smuzhiyun 			:
331*4882a593Smuzhiyun 			: "memory");
332*4882a593Smuzhiyun 		}
333*4882a593Smuzhiyun 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 		__asm__ __volatile__(
336*4882a593Smuzhiyun 		"	mov %0,%3			\n"
337*4882a593Smuzhiyun 		"	lsr.f   lp_count, %3,4		\n"  /* 16x bytes */
338*4882a593Smuzhiyun 		"	lpnz    3f			\n"
339*4882a593Smuzhiyun 		"1:	ld.ab   %5, [%2, 4]		\n"
340*4882a593Smuzhiyun 		"11:	ld.ab   %6, [%2, 4]		\n"
341*4882a593Smuzhiyun 		"12:	ld.ab   %7, [%2, 4]		\n"
342*4882a593Smuzhiyun 		"13:	ld.ab   %8, [%2, 4]		\n"
343*4882a593Smuzhiyun 		"	st.ab   %5, [%1, 4]		\n"
344*4882a593Smuzhiyun 		"	st.ab   %6, [%1, 4]		\n"
345*4882a593Smuzhiyun 		"	st.ab   %7, [%1, 4]		\n"
346*4882a593Smuzhiyun 		"	st.ab   %8, [%1, 4]		\n"
347*4882a593Smuzhiyun 		"	sub     %0,%0,16		\n"
348*4882a593Smuzhiyun 		"3:	and.f   %3,%3,0xf		\n"  /* stragglers */
349*4882a593Smuzhiyun 		"	bz      34f			\n"
350*4882a593Smuzhiyun 		"	bbit0   %3,3,31f		\n"  /* 8 bytes left */
351*4882a593Smuzhiyun 		"14:	ld.ab   %5, [%2,4]		\n"
352*4882a593Smuzhiyun 		"15:	ld.ab   %6, [%2,4]		\n"
353*4882a593Smuzhiyun 		"	st.ab   %5, [%1,4]		\n"
354*4882a593Smuzhiyun 		"	st.ab   %6, [%1,4]		\n"
355*4882a593Smuzhiyun 		"	sub.f   %0,%0,8			\n"
356*4882a593Smuzhiyun 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
357*4882a593Smuzhiyun 		"16:	ld.ab   %5, [%2,4]		\n"
358*4882a593Smuzhiyun 		"	st.ab   %5, [%1,4]		\n"
359*4882a593Smuzhiyun 		"	sub.f   %0,%0,4			\n"
360*4882a593Smuzhiyun 		"32:	bbit0   %3,1,33f		\n"  /* 2 bytes left */
361*4882a593Smuzhiyun 		"17:	ldw.ab  %5, [%2,2]		\n"
362*4882a593Smuzhiyun 		"	stw.ab  %5, [%1,2]		\n"
363*4882a593Smuzhiyun 		"	sub.f   %0,%0,2			\n"
364*4882a593Smuzhiyun 		"33:	bbit0   %3,0,34f		\n"
365*4882a593Smuzhiyun 		"18:	ldb.ab  %5, [%2,1]		\n"  /* 1 byte left */
366*4882a593Smuzhiyun 		"	stb.ab  %5, [%1,1]		\n"
367*4882a593Smuzhiyun 		"	sub.f   %0,%0,1			\n"
368*4882a593Smuzhiyun 		"34:	;nop				\n"
369*4882a593Smuzhiyun 		"	.section .fixup, \"ax\"		\n"
370*4882a593Smuzhiyun 		"	.align 4			\n"
371*4882a593Smuzhiyun 		"4:	j   34b				\n"
372*4882a593Smuzhiyun 		"	.previous			\n"
373*4882a593Smuzhiyun 		"	.section __ex_table, \"a\"	\n"
374*4882a593Smuzhiyun 		"	.align 4			\n"
375*4882a593Smuzhiyun 		"	.word   1b, 4b			\n"
376*4882a593Smuzhiyun 		"	.word   11b,4b			\n"
377*4882a593Smuzhiyun 		"	.word   12b,4b			\n"
378*4882a593Smuzhiyun 		"	.word   13b,4b			\n"
379*4882a593Smuzhiyun 		"	.word   14b,4b			\n"
380*4882a593Smuzhiyun 		"	.word   15b,4b			\n"
381*4882a593Smuzhiyun 		"	.word   16b,4b			\n"
382*4882a593Smuzhiyun 		"	.word   17b,4b			\n"
383*4882a593Smuzhiyun 		"	.word   18b,4b			\n"
384*4882a593Smuzhiyun 		"	.previous			\n"
385*4882a593Smuzhiyun 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
386*4882a593Smuzhiyun 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
387*4882a593Smuzhiyun 		:
388*4882a593Smuzhiyun 		: "lp_count", "memory");
389*4882a593Smuzhiyun 	}
390*4882a593Smuzhiyun 
391*4882a593Smuzhiyun 	return res;
392*4882a593Smuzhiyun }
393*4882a593Smuzhiyun 
394*4882a593Smuzhiyun static inline unsigned long
raw_copy_to_user(void __user * to,const void * from,unsigned long n)395*4882a593Smuzhiyun raw_copy_to_user(void __user *to, const void *from, unsigned long n)
396*4882a593Smuzhiyun {
397*4882a593Smuzhiyun 	long res = 0;
398*4882a593Smuzhiyun 	char val;
399*4882a593Smuzhiyun 	unsigned long tmp1, tmp2, tmp3, tmp4;
400*4882a593Smuzhiyun 	unsigned long orig_n = n;
401*4882a593Smuzhiyun 
402*4882a593Smuzhiyun 	if (n == 0)
403*4882a593Smuzhiyun 		return 0;
404*4882a593Smuzhiyun 
405*4882a593Smuzhiyun 	/* unaligned */
406*4882a593Smuzhiyun 	if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
407*4882a593Smuzhiyun 
408*4882a593Smuzhiyun 		unsigned char tmp;
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 		__asm__ __volatile__(
411*4882a593Smuzhiyun 		"	mov.f   lp_count, %0		\n"
412*4882a593Smuzhiyun 		"	lpnz 3f				\n"
413*4882a593Smuzhiyun 		"	ldb.ab  %1, [%3, 1]		\n"
414*4882a593Smuzhiyun 		"1:	stb.ab  %1, [%2, 1]		\n"
415*4882a593Smuzhiyun 		"	sub     %0, %0, 1		\n"
416*4882a593Smuzhiyun 		"3:	;nop				\n"
417*4882a593Smuzhiyun 		"	.section .fixup, \"ax\"		\n"
418*4882a593Smuzhiyun 		"	.align 4			\n"
419*4882a593Smuzhiyun 		"4:	j   3b				\n"
420*4882a593Smuzhiyun 		"	.previous			\n"
421*4882a593Smuzhiyun 		"	.section __ex_table, \"a\"	\n"
422*4882a593Smuzhiyun 		"	.align 4			\n"
423*4882a593Smuzhiyun 		"	.word   1b, 4b			\n"
424*4882a593Smuzhiyun 		"	.previous			\n"
425*4882a593Smuzhiyun 
426*4882a593Smuzhiyun 		: "+r" (n),
427*4882a593Smuzhiyun 		/* Note as an '&' earlyclobber operand to make sure the
428*4882a593Smuzhiyun 		 * temporary register inside the loop is not the same as
429*4882a593Smuzhiyun 		 * FROM or TO.
430*4882a593Smuzhiyun 		 */
431*4882a593Smuzhiyun 		  "=&r" (tmp), "+r" (to), "+r" (from)
432*4882a593Smuzhiyun 		:
433*4882a593Smuzhiyun 		: "lp_count", "memory");
434*4882a593Smuzhiyun 
435*4882a593Smuzhiyun 		return n;
436*4882a593Smuzhiyun 	}
437*4882a593Smuzhiyun 
438*4882a593Smuzhiyun 	if (__builtin_constant_p(orig_n)) {
439*4882a593Smuzhiyun 		res = orig_n;
440*4882a593Smuzhiyun 
441*4882a593Smuzhiyun 		if (orig_n / 16) {
442*4882a593Smuzhiyun 			orig_n = orig_n % 16;
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun 			__asm__ __volatile__(
445*4882a593Smuzhiyun 			"	lsr lp_count, %7,4		\n"
446*4882a593Smuzhiyun 			"	lp  3f				\n"
447*4882a593Smuzhiyun 			"	ld.ab %3, [%2, 4]		\n"
448*4882a593Smuzhiyun 			"	ld.ab %4, [%2, 4]		\n"
449*4882a593Smuzhiyun 			"	ld.ab %5, [%2, 4]		\n"
450*4882a593Smuzhiyun 			"	ld.ab %6, [%2, 4]		\n"
451*4882a593Smuzhiyun 			"1:	st.ab %3, [%1, 4]		\n"
452*4882a593Smuzhiyun 			"11:	st.ab %4, [%1, 4]		\n"
453*4882a593Smuzhiyun 			"12:	st.ab %5, [%1, 4]		\n"
454*4882a593Smuzhiyun 			"13:	st.ab %6, [%1, 4]		\n"
455*4882a593Smuzhiyun 			"	sub   %0, %0, 16		\n"
456*4882a593Smuzhiyun 			"3:;nop					\n"
457*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
458*4882a593Smuzhiyun 			"	.align 4			\n"
459*4882a593Smuzhiyun 			"4:	j   3b				\n"
460*4882a593Smuzhiyun 			"	.previous			\n"
461*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
462*4882a593Smuzhiyun 			"	.align 4			\n"
463*4882a593Smuzhiyun 			"	.word   1b, 4b			\n"
464*4882a593Smuzhiyun 			"	.word   11b,4b			\n"
465*4882a593Smuzhiyun 			"	.word   12b,4b			\n"
466*4882a593Smuzhiyun 			"	.word   13b,4b			\n"
467*4882a593Smuzhiyun 			"	.previous			\n"
468*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from),
469*4882a593Smuzhiyun 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
470*4882a593Smuzhiyun 			: "ir"(n)
471*4882a593Smuzhiyun 			: "lp_count", "memory");
472*4882a593Smuzhiyun 		}
473*4882a593Smuzhiyun 		if (orig_n / 8) {
474*4882a593Smuzhiyun 			orig_n = orig_n % 8;
475*4882a593Smuzhiyun 
476*4882a593Smuzhiyun 			__asm__ __volatile__(
477*4882a593Smuzhiyun 			"	ld.ab   %3, [%2,4]		\n"
478*4882a593Smuzhiyun 			"	ld.ab   %4, [%2,4]		\n"
479*4882a593Smuzhiyun 			"14:	st.ab   %3, [%1,4]		\n"
480*4882a593Smuzhiyun 			"15:	st.ab   %4, [%1,4]		\n"
481*4882a593Smuzhiyun 			"	sub     %0, %0, 8		\n"
482*4882a593Smuzhiyun 			"31:;nop				\n"
483*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
484*4882a593Smuzhiyun 			"	.align 4			\n"
485*4882a593Smuzhiyun 			"4:	j   31b				\n"
486*4882a593Smuzhiyun 			"	.previous			\n"
487*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
488*4882a593Smuzhiyun 			"	.align 4			\n"
489*4882a593Smuzhiyun 			"	.word   14b,4b			\n"
490*4882a593Smuzhiyun 			"	.word   15b,4b			\n"
491*4882a593Smuzhiyun 			"	.previous			\n"
492*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from),
493*4882a593Smuzhiyun 			  "=r"(tmp1), "=r"(tmp2)
494*4882a593Smuzhiyun 			:
495*4882a593Smuzhiyun 			: "memory");
496*4882a593Smuzhiyun 		}
497*4882a593Smuzhiyun 		if (orig_n / 4) {
498*4882a593Smuzhiyun 			orig_n = orig_n % 4;
499*4882a593Smuzhiyun 
500*4882a593Smuzhiyun 			__asm__ __volatile__(
501*4882a593Smuzhiyun 			"	ld.ab   %3, [%2,4]		\n"
502*4882a593Smuzhiyun 			"16:	st.ab   %3, [%1,4]		\n"
503*4882a593Smuzhiyun 			"	sub     %0, %0, 4		\n"
504*4882a593Smuzhiyun 			"32:;nop				\n"
505*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
506*4882a593Smuzhiyun 			"	.align 4			\n"
507*4882a593Smuzhiyun 			"4:	j   32b				\n"
508*4882a593Smuzhiyun 			"	.previous			\n"
509*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
510*4882a593Smuzhiyun 			"	.align 4			\n"
511*4882a593Smuzhiyun 			"	.word   16b,4b			\n"
512*4882a593Smuzhiyun 			"	.previous			\n"
513*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
514*4882a593Smuzhiyun 			:
515*4882a593Smuzhiyun 			: "memory");
516*4882a593Smuzhiyun 		}
517*4882a593Smuzhiyun 		if (orig_n / 2) {
518*4882a593Smuzhiyun 			orig_n = orig_n % 2;
519*4882a593Smuzhiyun 
520*4882a593Smuzhiyun 			__asm__ __volatile__(
521*4882a593Smuzhiyun 			"	ldw.ab    %3, [%2,2]		\n"
522*4882a593Smuzhiyun 			"17:	stw.ab    %3, [%1,2]		\n"
523*4882a593Smuzhiyun 			"	sub       %0, %0, 2		\n"
524*4882a593Smuzhiyun 			"33:;nop				\n"
525*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
526*4882a593Smuzhiyun 			"	.align 4			\n"
527*4882a593Smuzhiyun 			"4:	j   33b				\n"
528*4882a593Smuzhiyun 			"	.previous			\n"
529*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
530*4882a593Smuzhiyun 			"	.align 4			\n"
531*4882a593Smuzhiyun 			"	.word   17b,4b			\n"
532*4882a593Smuzhiyun 			"	.previous			\n"
533*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
534*4882a593Smuzhiyun 			:
535*4882a593Smuzhiyun 			: "memory");
536*4882a593Smuzhiyun 		}
537*4882a593Smuzhiyun 		if (orig_n & 1) {
538*4882a593Smuzhiyun 			__asm__ __volatile__(
539*4882a593Smuzhiyun 			"	ldb.ab  %3, [%2,1]		\n"
540*4882a593Smuzhiyun 			"18:	stb.ab  %3, [%1,1]		\n"
541*4882a593Smuzhiyun 			"	sub     %0, %0, 1		\n"
542*4882a593Smuzhiyun 			"34:	;nop				\n"
543*4882a593Smuzhiyun 			"	.section .fixup, \"ax\"		\n"
544*4882a593Smuzhiyun 			"	.align 4			\n"
545*4882a593Smuzhiyun 			"4:	j   34b				\n"
546*4882a593Smuzhiyun 			"	.previous			\n"
547*4882a593Smuzhiyun 			"	.section __ex_table, \"a\"	\n"
548*4882a593Smuzhiyun 			"	.align 4			\n"
549*4882a593Smuzhiyun 			"	.word   18b,4b			\n"
550*4882a593Smuzhiyun 			"	.previous			\n"
551*4882a593Smuzhiyun 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
552*4882a593Smuzhiyun 			:
553*4882a593Smuzhiyun 			: "memory");
554*4882a593Smuzhiyun 		}
555*4882a593Smuzhiyun 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
556*4882a593Smuzhiyun 
557*4882a593Smuzhiyun 		__asm__ __volatile__(
558*4882a593Smuzhiyun 		"	mov   %0,%3			\n"
559*4882a593Smuzhiyun 		"	lsr.f lp_count, %3,4		\n"  /* 16x bytes */
560*4882a593Smuzhiyun 		"	lpnz  3f			\n"
561*4882a593Smuzhiyun 		"	ld.ab %5, [%2, 4]		\n"
562*4882a593Smuzhiyun 		"	ld.ab %6, [%2, 4]		\n"
563*4882a593Smuzhiyun 		"	ld.ab %7, [%2, 4]		\n"
564*4882a593Smuzhiyun 		"	ld.ab %8, [%2, 4]		\n"
565*4882a593Smuzhiyun 		"1:	st.ab %5, [%1, 4]		\n"
566*4882a593Smuzhiyun 		"11:	st.ab %6, [%1, 4]		\n"
567*4882a593Smuzhiyun 		"12:	st.ab %7, [%1, 4]		\n"
568*4882a593Smuzhiyun 		"13:	st.ab %8, [%1, 4]		\n"
569*4882a593Smuzhiyun 		"	sub   %0, %0, 16		\n"
570*4882a593Smuzhiyun 		"3:	and.f %3,%3,0xf			\n" /* stragglers */
571*4882a593Smuzhiyun 		"	bz 34f				\n"
572*4882a593Smuzhiyun 		"	bbit0   %3,3,31f		\n" /* 8 bytes left */
573*4882a593Smuzhiyun 		"	ld.ab   %5, [%2,4]		\n"
574*4882a593Smuzhiyun 		"	ld.ab   %6, [%2,4]		\n"
575*4882a593Smuzhiyun 		"14:	st.ab   %5, [%1,4]		\n"
576*4882a593Smuzhiyun 		"15:	st.ab   %6, [%1,4]		\n"
577*4882a593Smuzhiyun 		"	sub.f   %0, %0, 8		\n"
578*4882a593Smuzhiyun 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
579*4882a593Smuzhiyun 		"	ld.ab   %5, [%2,4]		\n"
580*4882a593Smuzhiyun 		"16:	st.ab   %5, [%1,4]		\n"
581*4882a593Smuzhiyun 		"	sub.f   %0, %0, 4		\n"
582*4882a593Smuzhiyun 		"32:	bbit0 %3,1,33f			\n"  /* 2 bytes left */
583*4882a593Smuzhiyun 		"	ldw.ab    %5, [%2,2]		\n"
584*4882a593Smuzhiyun 		"17:	stw.ab    %5, [%1,2]		\n"
585*4882a593Smuzhiyun 		"	sub.f %0, %0, 2			\n"
586*4882a593Smuzhiyun 		"33:	bbit0 %3,0,34f			\n"
587*4882a593Smuzhiyun 		"	ldb.ab    %5, [%2,1]		\n"  /* 1 byte left */
588*4882a593Smuzhiyun 		"18:	stb.ab  %5, [%1,1]		\n"
589*4882a593Smuzhiyun 		"	sub.f %0, %0, 1			\n"
590*4882a593Smuzhiyun 		"34:	;nop				\n"
591*4882a593Smuzhiyun 		"	.section .fixup, \"ax\"		\n"
592*4882a593Smuzhiyun 		"	.align 4			\n"
593*4882a593Smuzhiyun 		"4:	j   34b				\n"
594*4882a593Smuzhiyun 		"	.previous			\n"
595*4882a593Smuzhiyun 		"	.section __ex_table, \"a\"	\n"
596*4882a593Smuzhiyun 		"	.align 4			\n"
597*4882a593Smuzhiyun 		"	.word   1b, 4b			\n"
598*4882a593Smuzhiyun 		"	.word   11b,4b			\n"
599*4882a593Smuzhiyun 		"	.word   12b,4b			\n"
600*4882a593Smuzhiyun 		"	.word   13b,4b			\n"
601*4882a593Smuzhiyun 		"	.word   14b,4b			\n"
602*4882a593Smuzhiyun 		"	.word   15b,4b			\n"
603*4882a593Smuzhiyun 		"	.word   16b,4b			\n"
604*4882a593Smuzhiyun 		"	.word   17b,4b			\n"
605*4882a593Smuzhiyun 		"	.word   18b,4b			\n"
606*4882a593Smuzhiyun 		"	.previous			\n"
607*4882a593Smuzhiyun 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
608*4882a593Smuzhiyun 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
609*4882a593Smuzhiyun 		:
610*4882a593Smuzhiyun 		: "lp_count", "memory");
611*4882a593Smuzhiyun 	}
612*4882a593Smuzhiyun 
613*4882a593Smuzhiyun 	return res;
614*4882a593Smuzhiyun }
615*4882a593Smuzhiyun 
__arc_clear_user(void __user * to,unsigned long n)616*4882a593Smuzhiyun static inline unsigned long __arc_clear_user(void __user *to, unsigned long n)
617*4882a593Smuzhiyun {
618*4882a593Smuzhiyun 	long res = n;
619*4882a593Smuzhiyun 	unsigned char *d_char = to;
620*4882a593Smuzhiyun 
621*4882a593Smuzhiyun 	__asm__ __volatile__(
622*4882a593Smuzhiyun 	"	bbit0   %0, 0, 1f		\n"
623*4882a593Smuzhiyun 	"75:	stb.ab  %2, [%0,1]		\n"
624*4882a593Smuzhiyun 	"	sub %1, %1, 1			\n"
625*4882a593Smuzhiyun 	"1:	bbit0   %0, 1, 2f		\n"
626*4882a593Smuzhiyun 	"76:	stw.ab  %2, [%0,2]		\n"
627*4882a593Smuzhiyun 	"	sub %1, %1, 2			\n"
628*4882a593Smuzhiyun 	"2:	asr.f   lp_count, %1, 2		\n"
629*4882a593Smuzhiyun 	"	lpnz    3f			\n"
630*4882a593Smuzhiyun 	"77:	st.ab   %2, [%0,4]		\n"
631*4882a593Smuzhiyun 	"	sub %1, %1, 4			\n"
632*4882a593Smuzhiyun 	"3:	bbit0   %1, 1, 4f		\n"
633*4882a593Smuzhiyun 	"78:	stw.ab  %2, [%0,2]		\n"
634*4882a593Smuzhiyun 	"	sub %1, %1, 2			\n"
635*4882a593Smuzhiyun 	"4:	bbit0   %1, 0, 5f		\n"
636*4882a593Smuzhiyun 	"79:	stb.ab  %2, [%0,1]		\n"
637*4882a593Smuzhiyun 	"	sub %1, %1, 1			\n"
638*4882a593Smuzhiyun 	"5:					\n"
639*4882a593Smuzhiyun 	"	.section .fixup, \"ax\"		\n"
640*4882a593Smuzhiyun 	"	.align 4			\n"
641*4882a593Smuzhiyun 	"3:	j   5b				\n"
642*4882a593Smuzhiyun 	"	.previous			\n"
643*4882a593Smuzhiyun 	"	.section __ex_table, \"a\"	\n"
644*4882a593Smuzhiyun 	"	.align 4			\n"
645*4882a593Smuzhiyun 	"	.word   75b, 3b			\n"
646*4882a593Smuzhiyun 	"	.word   76b, 3b			\n"
647*4882a593Smuzhiyun 	"	.word   77b, 3b			\n"
648*4882a593Smuzhiyun 	"	.word   78b, 3b			\n"
649*4882a593Smuzhiyun 	"	.word   79b, 3b			\n"
650*4882a593Smuzhiyun 	"	.previous			\n"
651*4882a593Smuzhiyun 	: "+r"(d_char), "+r"(res)
652*4882a593Smuzhiyun 	: "i"(0)
653*4882a593Smuzhiyun 	: "lp_count", "memory");
654*4882a593Smuzhiyun 
655*4882a593Smuzhiyun 	return res;
656*4882a593Smuzhiyun }
657*4882a593Smuzhiyun 
658*4882a593Smuzhiyun static inline long
__arc_strncpy_from_user(char * dst,const char __user * src,long count)659*4882a593Smuzhiyun __arc_strncpy_from_user(char *dst, const char __user *src, long count)
660*4882a593Smuzhiyun {
661*4882a593Smuzhiyun 	long res = 0;
662*4882a593Smuzhiyun 	char val;
663*4882a593Smuzhiyun 
664*4882a593Smuzhiyun 	if (count == 0)
665*4882a593Smuzhiyun 		return 0;
666*4882a593Smuzhiyun 
667*4882a593Smuzhiyun 	__asm__ __volatile__(
668*4882a593Smuzhiyun 	"	mov	lp_count, %5		\n"
669*4882a593Smuzhiyun 	"	lp	3f			\n"
670*4882a593Smuzhiyun 	"1:	ldb.ab  %3, [%2, 1]		\n"
671*4882a593Smuzhiyun 	"	breq.d	%3, 0, 3f               \n"
672*4882a593Smuzhiyun 	"	stb.ab  %3, [%1, 1]		\n"
673*4882a593Smuzhiyun 	"	add	%0, %0, 1	# Num of NON NULL bytes copied	\n"
674*4882a593Smuzhiyun 	"3:								\n"
675*4882a593Smuzhiyun 	"	.section .fixup, \"ax\"		\n"
676*4882a593Smuzhiyun 	"	.align 4			\n"
677*4882a593Smuzhiyun 	"4:	mov %0, %4		# sets @res as -EFAULT	\n"
678*4882a593Smuzhiyun 	"	j   3b				\n"
679*4882a593Smuzhiyun 	"	.previous			\n"
680*4882a593Smuzhiyun 	"	.section __ex_table, \"a\"	\n"
681*4882a593Smuzhiyun 	"	.align 4			\n"
682*4882a593Smuzhiyun 	"	.word   1b, 4b			\n"
683*4882a593Smuzhiyun 	"	.previous			\n"
684*4882a593Smuzhiyun 	: "+r"(res), "+r"(dst), "+r"(src), "=r"(val)
685*4882a593Smuzhiyun 	: "g"(-EFAULT), "r"(count)
686*4882a593Smuzhiyun 	: "lp_count", "memory");
687*4882a593Smuzhiyun 
688*4882a593Smuzhiyun 	return res;
689*4882a593Smuzhiyun }
690*4882a593Smuzhiyun 
__arc_strnlen_user(const char __user * s,long n)691*4882a593Smuzhiyun static inline long __arc_strnlen_user(const char __user *s, long n)
692*4882a593Smuzhiyun {
693*4882a593Smuzhiyun 	long res, tmp1, cnt;
694*4882a593Smuzhiyun 	char val;
695*4882a593Smuzhiyun 
696*4882a593Smuzhiyun 	__asm__ __volatile__(
697*4882a593Smuzhiyun 	"	mov %2, %1			\n"
698*4882a593Smuzhiyun 	"1:	ldb.ab  %3, [%0, 1]		\n"
699*4882a593Smuzhiyun 	"	breq.d  %3, 0, 2f		\n"
700*4882a593Smuzhiyun 	"	sub.f   %2, %2, 1		\n"
701*4882a593Smuzhiyun 	"	bnz 1b				\n"
702*4882a593Smuzhiyun 	"	sub %2, %2, 1			\n"
703*4882a593Smuzhiyun 	"2:	sub %0, %1, %2			\n"
704*4882a593Smuzhiyun 	"3:	;nop				\n"
705*4882a593Smuzhiyun 	"	.section .fixup, \"ax\"		\n"
706*4882a593Smuzhiyun 	"	.align 4			\n"
707*4882a593Smuzhiyun 	"4:	mov %0, 0			\n"
708*4882a593Smuzhiyun 	"	j   3b				\n"
709*4882a593Smuzhiyun 	"	.previous			\n"
710*4882a593Smuzhiyun 	"	.section __ex_table, \"a\"	\n"
711*4882a593Smuzhiyun 	"	.align 4			\n"
712*4882a593Smuzhiyun 	"	.word 1b, 4b			\n"
713*4882a593Smuzhiyun 	"	.previous			\n"
714*4882a593Smuzhiyun 	: "=r"(res), "=r"(tmp1), "=r"(cnt), "=r"(val)
715*4882a593Smuzhiyun 	: "0"(s), "1"(n)
716*4882a593Smuzhiyun 	: "memory");
717*4882a593Smuzhiyun 
718*4882a593Smuzhiyun 	return res;
719*4882a593Smuzhiyun }
720*4882a593Smuzhiyun 
721*4882a593Smuzhiyun #ifndef CONFIG_CC_OPTIMIZE_FOR_SIZE
722*4882a593Smuzhiyun 
723*4882a593Smuzhiyun #define INLINE_COPY_TO_USER
724*4882a593Smuzhiyun #define INLINE_COPY_FROM_USER
725*4882a593Smuzhiyun 
726*4882a593Smuzhiyun #define __clear_user(d, n)		__arc_clear_user(d, n)
727*4882a593Smuzhiyun #define __strncpy_from_user(d, s, n)	__arc_strncpy_from_user(d, s, n)
728*4882a593Smuzhiyun #define __strnlen_user(s, n)		__arc_strnlen_user(s, n)
729*4882a593Smuzhiyun #else
730*4882a593Smuzhiyun extern unsigned long arc_clear_user_noinline(void __user *to,
731*4882a593Smuzhiyun 		unsigned long n);
732*4882a593Smuzhiyun extern long arc_strncpy_from_user_noinline (char *dst, const char __user *src,
733*4882a593Smuzhiyun 		long count);
734*4882a593Smuzhiyun extern long arc_strnlen_user_noinline(const char __user *src, long n);
735*4882a593Smuzhiyun 
736*4882a593Smuzhiyun #define __clear_user(d, n)		arc_clear_user_noinline(d, n)
737*4882a593Smuzhiyun #define __strncpy_from_user(d, s, n)	arc_strncpy_from_user_noinline(d, s, n)
738*4882a593Smuzhiyun #define __strnlen_user(s, n)		arc_strnlen_user_noinline(s, n)
739*4882a593Smuzhiyun 
740*4882a593Smuzhiyun #endif
741*4882a593Smuzhiyun 
742*4882a593Smuzhiyun #include <asm/segment.h>
743*4882a593Smuzhiyun #include <asm-generic/uaccess.h>
744*4882a593Smuzhiyun 
745*4882a593Smuzhiyun #endif
746