xref: /OK3568_Linux_fs/kernel/arch/sparc/lib/memset.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun/* linux/arch/sparc/lib/memset.S: Sparc optimized memset, bzero and clear_user code
3*4882a593Smuzhiyun * Copyright (C) 1991,1996 Free Software Foundation
4*4882a593Smuzhiyun * Copyright (C) 1996,1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
5*4882a593Smuzhiyun * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Calls to memset returns initial %o0. Calls to bzero returns 0, if ok, and
8*4882a593Smuzhiyun * number of bytes not yet set if exception occurs and we were called as
9*4882a593Smuzhiyun * clear_user.
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun#include <asm/ptrace.h>
13*4882a593Smuzhiyun#include <asm/export.h>
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun/* Work around cpp -rob */
16*4882a593Smuzhiyun#define ALLOC #alloc
17*4882a593Smuzhiyun#define EXECINSTR #execinstr
18*4882a593Smuzhiyun#define EX(x,y,a,b) 				\
19*4882a593Smuzhiyun98: 	x,y;					\
20*4882a593Smuzhiyun	.section .fixup,ALLOC,EXECINSTR;	\
21*4882a593Smuzhiyun	.align	4;				\
22*4882a593Smuzhiyun99:	ba 30f;					\
23*4882a593Smuzhiyun	 a, b, %o0;				\
24*4882a593Smuzhiyun	.section __ex_table,ALLOC;		\
25*4882a593Smuzhiyun	.align	4;				\
26*4882a593Smuzhiyun	.word	98b, 99b;			\
27*4882a593Smuzhiyun	.text;					\
28*4882a593Smuzhiyun	.align	4
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun#define EXT(start,end,handler) 			\
31*4882a593Smuzhiyun	.section __ex_table,ALLOC;		\
32*4882a593Smuzhiyun	.align	4;				\
33*4882a593Smuzhiyun	.word	start, 0, end, handler;		\
34*4882a593Smuzhiyun	.text;					\
35*4882a593Smuzhiyun	.align	4
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun/* Please don't change these macros, unless you change the logic
38*4882a593Smuzhiyun * in the .fixup section below as well.
39*4882a593Smuzhiyun * Store 64 bytes at (BASE + OFFSET) using value SOURCE. */
40*4882a593Smuzhiyun#define ZERO_BIG_BLOCK(base, offset, source)    \
41*4882a593Smuzhiyun	std	source, [base + offset + 0x00]; \
42*4882a593Smuzhiyun	std	source, [base + offset + 0x08]; \
43*4882a593Smuzhiyun	std	source, [base + offset + 0x10]; \
44*4882a593Smuzhiyun	std	source, [base + offset + 0x18]; \
45*4882a593Smuzhiyun	std	source, [base + offset + 0x20]; \
46*4882a593Smuzhiyun	std	source, [base + offset + 0x28]; \
47*4882a593Smuzhiyun	std	source, [base + offset + 0x30]; \
48*4882a593Smuzhiyun	std	source, [base + offset + 0x38];
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun#define ZERO_LAST_BLOCKS(base, offset, source)	\
51*4882a593Smuzhiyun	std	source, [base - offset - 0x38]; \
52*4882a593Smuzhiyun	std	source, [base - offset - 0x30]; \
53*4882a593Smuzhiyun	std	source, [base - offset - 0x28]; \
54*4882a593Smuzhiyun	std	source, [base - offset - 0x20]; \
55*4882a593Smuzhiyun	std	source, [base - offset - 0x18]; \
56*4882a593Smuzhiyun	std	source, [base - offset - 0x10]; \
57*4882a593Smuzhiyun	std	source, [base - offset - 0x08]; \
58*4882a593Smuzhiyun	std	source, [base - offset - 0x00];
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun	.text
61*4882a593Smuzhiyun	.align 4
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun        .globl  __bzero_begin
64*4882a593Smuzhiyun__bzero_begin:
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun	.globl	__bzero
67*4882a593Smuzhiyun	.type	__bzero,#function
68*4882a593Smuzhiyun	.globl	memset
69*4882a593Smuzhiyun	EXPORT_SYMBOL(__bzero)
70*4882a593Smuzhiyun	EXPORT_SYMBOL(memset)
71*4882a593Smuzhiyun	.globl	__memset_start, __memset_end
72*4882a593Smuzhiyun__memset_start:
73*4882a593Smuzhiyunmemset:
74*4882a593Smuzhiyun	mov	%o0, %g1
75*4882a593Smuzhiyun	mov	1, %g4
76*4882a593Smuzhiyun	and	%o1, 0xff, %g3
77*4882a593Smuzhiyun	sll	%g3, 8, %g2
78*4882a593Smuzhiyun	or	%g3, %g2, %g3
79*4882a593Smuzhiyun	sll	%g3, 16, %g2
80*4882a593Smuzhiyun	or	%g3, %g2, %g3
81*4882a593Smuzhiyun	b	1f
82*4882a593Smuzhiyun	 mov	%o2, %o1
83*4882a593Smuzhiyun3:
84*4882a593Smuzhiyun	cmp	%o2, 3
85*4882a593Smuzhiyun	be	2f
86*4882a593Smuzhiyun	 EX(stb	%g3, [%o0], sub %o1, 0)
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun	cmp	%o2, 2
89*4882a593Smuzhiyun	be	2f
90*4882a593Smuzhiyun	 EX(stb	%g3, [%o0 + 0x01], sub %o1, 1)
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun	EX(stb	%g3, [%o0 + 0x02], sub %o1, 2)
93*4882a593Smuzhiyun2:
94*4882a593Smuzhiyun	sub	%o2, 4, %o2
95*4882a593Smuzhiyun	add	%o1, %o2, %o1
96*4882a593Smuzhiyun	b	4f
97*4882a593Smuzhiyun	 sub	%o0, %o2, %o0
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun__bzero:
100*4882a593Smuzhiyun	clr	%g4
101*4882a593Smuzhiyun	mov	%g0, %g3
102*4882a593Smuzhiyun1:
103*4882a593Smuzhiyun	cmp	%o1, 7
104*4882a593Smuzhiyun	bleu	7f
105*4882a593Smuzhiyun	 andcc	%o0, 3, %o2
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun	bne	3b
108*4882a593Smuzhiyun4:
109*4882a593Smuzhiyun	 andcc	%o0, 4, %g0
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun	be	2f
112*4882a593Smuzhiyun	 mov	%g3, %g2
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun	EX(st	%g3, [%o0], sub %o1, 0)
115*4882a593Smuzhiyun	sub	%o1, 4, %o1
116*4882a593Smuzhiyun	add	%o0, 4, %o0
117*4882a593Smuzhiyun2:
118*4882a593Smuzhiyun	andcc	%o1, 0xffffff80, %o3	! Now everything is 8 aligned and o1 is len to run
119*4882a593Smuzhiyun	be	9f
120*4882a593Smuzhiyun	 andcc	%o1, 0x78, %o2
121*4882a593Smuzhiyun10:
122*4882a593Smuzhiyun	ZERO_BIG_BLOCK(%o0, 0x00, %g2)
123*4882a593Smuzhiyun	subcc	%o3, 128, %o3
124*4882a593Smuzhiyun	ZERO_BIG_BLOCK(%o0, 0x40, %g2)
125*4882a593Smuzhiyun11:
126*4882a593Smuzhiyun	EXT(10b, 11b, 20f)
127*4882a593Smuzhiyun	bne	10b
128*4882a593Smuzhiyun	 add	%o0, 128, %o0
129*4882a593Smuzhiyun
130*4882a593Smuzhiyun	orcc	%o2, %g0, %g0
131*4882a593Smuzhiyun9:
132*4882a593Smuzhiyun	be	13f
133*4882a593Smuzhiyun	 andcc	%o1, 7, %o1
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun	srl	%o2, 1, %o3
136*4882a593Smuzhiyun	set	13f, %o4
137*4882a593Smuzhiyun	sub	%o4, %o3, %o4
138*4882a593Smuzhiyun	jmp	%o4
139*4882a593Smuzhiyun	 add	%o0, %o2, %o0
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun12:
142*4882a593Smuzhiyun	ZERO_LAST_BLOCKS(%o0, 0x48, %g2)
143*4882a593Smuzhiyun	ZERO_LAST_BLOCKS(%o0, 0x08, %g2)
144*4882a593Smuzhiyun13:
145*4882a593Smuzhiyun	EXT(12b, 13b, 21f)
146*4882a593Smuzhiyun	be	8f
147*4882a593Smuzhiyun	 andcc	%o1, 4, %g0
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun	be	1f
150*4882a593Smuzhiyun	 andcc	%o1, 2, %g0
151*4882a593Smuzhiyun
152*4882a593Smuzhiyun	EX(st	%g3, [%o0], and %o1, 7)
153*4882a593Smuzhiyun	add	%o0, 4, %o0
154*4882a593Smuzhiyun1:
155*4882a593Smuzhiyun	be	1f
156*4882a593Smuzhiyun	 andcc	%o1, 1, %g0
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun	EX(sth	%g3, [%o0], and %o1, 3)
159*4882a593Smuzhiyun	add	%o0, 2, %o0
160*4882a593Smuzhiyun1:
161*4882a593Smuzhiyun	bne,a	8f
162*4882a593Smuzhiyun	 EX(stb	%g3, [%o0], and %o1, 1)
163*4882a593Smuzhiyun8:
164*4882a593Smuzhiyun	b	0f
165*4882a593Smuzhiyun	 nop
166*4882a593Smuzhiyun7:
167*4882a593Smuzhiyun	be	13b
168*4882a593Smuzhiyun	 orcc	%o1, 0, %g0
169*4882a593Smuzhiyun
170*4882a593Smuzhiyun	be	0f
171*4882a593Smuzhiyun8:
172*4882a593Smuzhiyun	 add	%o0, 1, %o0
173*4882a593Smuzhiyun	subcc	%o1, 1, %o1
174*4882a593Smuzhiyun	bne	8b
175*4882a593Smuzhiyun	 EX(stb	%g3, [%o0 - 1], add %o1, 1)
176*4882a593Smuzhiyun0:
177*4882a593Smuzhiyun	andcc	%g4, 1, %g0
178*4882a593Smuzhiyun	be	5f
179*4882a593Smuzhiyun	 nop
180*4882a593Smuzhiyun	retl
181*4882a593Smuzhiyun	 mov	%g1, %o0
182*4882a593Smuzhiyun5:
183*4882a593Smuzhiyun	retl
184*4882a593Smuzhiyun	 clr	%o0
185*4882a593Smuzhiyun__memset_end:
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun	.section .fixup,#alloc,#execinstr
188*4882a593Smuzhiyun	.align	4
189*4882a593Smuzhiyun20:
190*4882a593Smuzhiyun	cmp	%g2, 8
191*4882a593Smuzhiyun	bleu	1f
192*4882a593Smuzhiyun	 and	%o1, 0x7f, %o1
193*4882a593Smuzhiyun	sub	%g2, 9, %g2
194*4882a593Smuzhiyun	add	%o3, 64, %o3
195*4882a593Smuzhiyun1:
196*4882a593Smuzhiyun	sll	%g2, 3, %g2
197*4882a593Smuzhiyun	add	%o3, %o1, %o0
198*4882a593Smuzhiyun	b 30f
199*4882a593Smuzhiyun	 sub	%o0, %g2, %o0
200*4882a593Smuzhiyun21:
201*4882a593Smuzhiyun	mov	8, %o0
202*4882a593Smuzhiyun	and	%o1, 7, %o1
203*4882a593Smuzhiyun	sub	%o0, %g2, %o0
204*4882a593Smuzhiyun	sll	%o0, 3, %o0
205*4882a593Smuzhiyun	b 30f
206*4882a593Smuzhiyun	 add	%o0, %o1, %o0
207*4882a593Smuzhiyun30:
208*4882a593Smuzhiyun/* %o4 is faulting address, %o5 is %pc where fault occurred */
209*4882a593Smuzhiyun	save	%sp, -104, %sp
210*4882a593Smuzhiyun	mov	%i5, %o0
211*4882a593Smuzhiyun	mov	%i7, %o1
212*4882a593Smuzhiyun	call	lookup_fault
213*4882a593Smuzhiyun	 mov	%i4, %o2
214*4882a593Smuzhiyun	ret
215*4882a593Smuzhiyun	 restore
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun	.globl __bzero_end
218*4882a593Smuzhiyun__bzero_end:
219