1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_X86_STRING_32_H
3*4882a593Smuzhiyun #define _ASM_X86_STRING_32_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #ifdef __KERNEL__
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun /* Let gcc decide whether to inline or use the out of line functions */
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #define __HAVE_ARCH_STRCPY
10*4882a593Smuzhiyun extern char *strcpy(char *dest, const char *src);
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #define __HAVE_ARCH_STRNCPY
13*4882a593Smuzhiyun extern char *strncpy(char *dest, const char *src, size_t count);
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #define __HAVE_ARCH_STRCAT
16*4882a593Smuzhiyun extern char *strcat(char *dest, const char *src);
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun #define __HAVE_ARCH_STRNCAT
19*4882a593Smuzhiyun extern char *strncat(char *dest, const char *src, size_t count);
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun #define __HAVE_ARCH_STRCMP
22*4882a593Smuzhiyun extern int strcmp(const char *cs, const char *ct);
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun #define __HAVE_ARCH_STRNCMP
25*4882a593Smuzhiyun extern int strncmp(const char *cs, const char *ct, size_t count);
26*4882a593Smuzhiyun
27*4882a593Smuzhiyun #define __HAVE_ARCH_STRCHR
28*4882a593Smuzhiyun extern char *strchr(const char *s, int c);
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun #define __HAVE_ARCH_STRLEN
31*4882a593Smuzhiyun extern size_t strlen(const char *s);
32*4882a593Smuzhiyun
__memcpy(void * to,const void * from,size_t n)33*4882a593Smuzhiyun static __always_inline void *__memcpy(void *to, const void *from, size_t n)
34*4882a593Smuzhiyun {
35*4882a593Smuzhiyun int d0, d1, d2;
36*4882a593Smuzhiyun asm volatile("rep ; movsl\n\t"
37*4882a593Smuzhiyun "movl %4,%%ecx\n\t"
38*4882a593Smuzhiyun "andl $3,%%ecx\n\t"
39*4882a593Smuzhiyun "jz 1f\n\t"
40*4882a593Smuzhiyun "rep ; movsb\n\t"
41*4882a593Smuzhiyun "1:"
42*4882a593Smuzhiyun : "=&c" (d0), "=&D" (d1), "=&S" (d2)
43*4882a593Smuzhiyun : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
44*4882a593Smuzhiyun : "memory");
45*4882a593Smuzhiyun return to;
46*4882a593Smuzhiyun }
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun /*
49*4882a593Smuzhiyun * This looks ugly, but the compiler can optimize it totally,
50*4882a593Smuzhiyun * as the count is constant.
51*4882a593Smuzhiyun */
__constant_memcpy(void * to,const void * from,size_t n)52*4882a593Smuzhiyun static __always_inline void *__constant_memcpy(void *to, const void *from,
53*4882a593Smuzhiyun size_t n)
54*4882a593Smuzhiyun {
55*4882a593Smuzhiyun long esi, edi;
56*4882a593Smuzhiyun if (!n)
57*4882a593Smuzhiyun return to;
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun switch (n) {
60*4882a593Smuzhiyun case 1:
61*4882a593Smuzhiyun *(char *)to = *(char *)from;
62*4882a593Smuzhiyun return to;
63*4882a593Smuzhiyun case 2:
64*4882a593Smuzhiyun *(short *)to = *(short *)from;
65*4882a593Smuzhiyun return to;
66*4882a593Smuzhiyun case 4:
67*4882a593Smuzhiyun *(int *)to = *(int *)from;
68*4882a593Smuzhiyun return to;
69*4882a593Smuzhiyun case 3:
70*4882a593Smuzhiyun *(short *)to = *(short *)from;
71*4882a593Smuzhiyun *((char *)to + 2) = *((char *)from + 2);
72*4882a593Smuzhiyun return to;
73*4882a593Smuzhiyun case 5:
74*4882a593Smuzhiyun *(int *)to = *(int *)from;
75*4882a593Smuzhiyun *((char *)to + 4) = *((char *)from + 4);
76*4882a593Smuzhiyun return to;
77*4882a593Smuzhiyun case 6:
78*4882a593Smuzhiyun *(int *)to = *(int *)from;
79*4882a593Smuzhiyun *((short *)to + 2) = *((short *)from + 2);
80*4882a593Smuzhiyun return to;
81*4882a593Smuzhiyun case 8:
82*4882a593Smuzhiyun *(int *)to = *(int *)from;
83*4882a593Smuzhiyun *((int *)to + 1) = *((int *)from + 1);
84*4882a593Smuzhiyun return to;
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun esi = (long)from;
88*4882a593Smuzhiyun edi = (long)to;
89*4882a593Smuzhiyun if (n >= 5 * 4) {
90*4882a593Smuzhiyun /* large block: use rep prefix */
91*4882a593Smuzhiyun int ecx;
92*4882a593Smuzhiyun asm volatile("rep ; movsl"
93*4882a593Smuzhiyun : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
94*4882a593Smuzhiyun : "0" (n / 4), "1" (edi), "2" (esi)
95*4882a593Smuzhiyun : "memory"
96*4882a593Smuzhiyun );
97*4882a593Smuzhiyun } else {
98*4882a593Smuzhiyun /* small block: don't clobber ecx + smaller code */
99*4882a593Smuzhiyun if (n >= 4 * 4)
100*4882a593Smuzhiyun asm volatile("movsl"
101*4882a593Smuzhiyun : "=&D"(edi), "=&S"(esi)
102*4882a593Smuzhiyun : "0"(edi), "1"(esi)
103*4882a593Smuzhiyun : "memory");
104*4882a593Smuzhiyun if (n >= 3 * 4)
105*4882a593Smuzhiyun asm volatile("movsl"
106*4882a593Smuzhiyun : "=&D"(edi), "=&S"(esi)
107*4882a593Smuzhiyun : "0"(edi), "1"(esi)
108*4882a593Smuzhiyun : "memory");
109*4882a593Smuzhiyun if (n >= 2 * 4)
110*4882a593Smuzhiyun asm volatile("movsl"
111*4882a593Smuzhiyun : "=&D"(edi), "=&S"(esi)
112*4882a593Smuzhiyun : "0"(edi), "1"(esi)
113*4882a593Smuzhiyun : "memory");
114*4882a593Smuzhiyun if (n >= 1 * 4)
115*4882a593Smuzhiyun asm volatile("movsl"
116*4882a593Smuzhiyun : "=&D"(edi), "=&S"(esi)
117*4882a593Smuzhiyun : "0"(edi), "1"(esi)
118*4882a593Smuzhiyun : "memory");
119*4882a593Smuzhiyun }
120*4882a593Smuzhiyun switch (n % 4) {
121*4882a593Smuzhiyun /* tail */
122*4882a593Smuzhiyun case 0:
123*4882a593Smuzhiyun return to;
124*4882a593Smuzhiyun case 1:
125*4882a593Smuzhiyun asm volatile("movsb"
126*4882a593Smuzhiyun : "=&D"(edi), "=&S"(esi)
127*4882a593Smuzhiyun : "0"(edi), "1"(esi)
128*4882a593Smuzhiyun : "memory");
129*4882a593Smuzhiyun return to;
130*4882a593Smuzhiyun case 2:
131*4882a593Smuzhiyun asm volatile("movsw"
132*4882a593Smuzhiyun : "=&D"(edi), "=&S"(esi)
133*4882a593Smuzhiyun : "0"(edi), "1"(esi)
134*4882a593Smuzhiyun : "memory");
135*4882a593Smuzhiyun return to;
136*4882a593Smuzhiyun default:
137*4882a593Smuzhiyun asm volatile("movsw\n\tmovsb"
138*4882a593Smuzhiyun : "=&D"(edi), "=&S"(esi)
139*4882a593Smuzhiyun : "0"(edi), "1"(esi)
140*4882a593Smuzhiyun : "memory");
141*4882a593Smuzhiyun return to;
142*4882a593Smuzhiyun }
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun #define __HAVE_ARCH_MEMCPY
146*4882a593Smuzhiyun extern void *memcpy(void *, const void *, size_t);
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun #ifndef CONFIG_FORTIFY_SOURCE
149*4882a593Smuzhiyun #ifdef CONFIG_X86_USE_3DNOW
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun #include <asm/mmx.h>
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun /*
154*4882a593Smuzhiyun * This CPU favours 3DNow strongly (eg AMD Athlon)
155*4882a593Smuzhiyun */
156*4882a593Smuzhiyun
__constant_memcpy3d(void * to,const void * from,size_t len)157*4882a593Smuzhiyun static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
158*4882a593Smuzhiyun {
159*4882a593Smuzhiyun if (len < 512)
160*4882a593Smuzhiyun return __constant_memcpy(to, from, len);
161*4882a593Smuzhiyun return _mmx_memcpy(to, from, len);
162*4882a593Smuzhiyun }
163*4882a593Smuzhiyun
__memcpy3d(void * to,const void * from,size_t len)164*4882a593Smuzhiyun static inline void *__memcpy3d(void *to, const void *from, size_t len)
165*4882a593Smuzhiyun {
166*4882a593Smuzhiyun if (len < 512)
167*4882a593Smuzhiyun return __memcpy(to, from, len);
168*4882a593Smuzhiyun return _mmx_memcpy(to, from, len);
169*4882a593Smuzhiyun }
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun #define memcpy(t, f, n) \
172*4882a593Smuzhiyun (__builtin_constant_p((n)) \
173*4882a593Smuzhiyun ? __constant_memcpy3d((t), (f), (n)) \
174*4882a593Smuzhiyun : __memcpy3d((t), (f), (n)))
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun #else
177*4882a593Smuzhiyun
178*4882a593Smuzhiyun /*
179*4882a593Smuzhiyun * No 3D Now!
180*4882a593Smuzhiyun */
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun #endif
185*4882a593Smuzhiyun #endif /* !CONFIG_FORTIFY_SOURCE */
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun #define __HAVE_ARCH_MEMMOVE
188*4882a593Smuzhiyun void *memmove(void *dest, const void *src, size_t n);
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun extern int memcmp(const void *, const void *, size_t);
191*4882a593Smuzhiyun #ifndef CONFIG_FORTIFY_SOURCE
192*4882a593Smuzhiyun #define memcmp __builtin_memcmp
193*4882a593Smuzhiyun #endif
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun #define __HAVE_ARCH_MEMCHR
196*4882a593Smuzhiyun extern void *memchr(const void *cs, int c, size_t count);
197*4882a593Smuzhiyun
__memset_generic(void * s,char c,size_t count)198*4882a593Smuzhiyun static inline void *__memset_generic(void *s, char c, size_t count)
199*4882a593Smuzhiyun {
200*4882a593Smuzhiyun int d0, d1;
201*4882a593Smuzhiyun asm volatile("rep\n\t"
202*4882a593Smuzhiyun "stosb"
203*4882a593Smuzhiyun : "=&c" (d0), "=&D" (d1)
204*4882a593Smuzhiyun : "a" (c), "1" (s), "0" (count)
205*4882a593Smuzhiyun : "memory");
206*4882a593Smuzhiyun return s;
207*4882a593Smuzhiyun }
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun /* we might want to write optimized versions of these later */
210*4882a593Smuzhiyun #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun /* Added by Gertjan van Wingerde to make minix and sysv module work */
213*4882a593Smuzhiyun #define __HAVE_ARCH_STRNLEN
214*4882a593Smuzhiyun extern size_t strnlen(const char *s, size_t count);
215*4882a593Smuzhiyun /* end of additional stuff */
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun #define __HAVE_ARCH_STRSTR
218*4882a593Smuzhiyun extern char *strstr(const char *cs, const char *ct);
219*4882a593Smuzhiyun
220*4882a593Smuzhiyun #define __memset(s, c, count) \
221*4882a593Smuzhiyun (__builtin_constant_p(count) \
222*4882a593Smuzhiyun ? __constant_count_memset((s), (c), (count)) \
223*4882a593Smuzhiyun : __memset_generic((s), (c), (count)))
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET
226*4882a593Smuzhiyun extern void *memset(void *, int, size_t);
227*4882a593Smuzhiyun #ifndef CONFIG_FORTIFY_SOURCE
228*4882a593Smuzhiyun #define memset(s, c, count) __builtin_memset(s, c, count)
229*4882a593Smuzhiyun #endif /* !CONFIG_FORTIFY_SOURCE */
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET16
memset16(uint16_t * s,uint16_t v,size_t n)232*4882a593Smuzhiyun static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
233*4882a593Smuzhiyun {
234*4882a593Smuzhiyun int d0, d1;
235*4882a593Smuzhiyun asm volatile("rep\n\t"
236*4882a593Smuzhiyun "stosw"
237*4882a593Smuzhiyun : "=&c" (d0), "=&D" (d1)
238*4882a593Smuzhiyun : "a" (v), "1" (s), "0" (n)
239*4882a593Smuzhiyun : "memory");
240*4882a593Smuzhiyun return s;
241*4882a593Smuzhiyun }
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET32
memset32(uint32_t * s,uint32_t v,size_t n)244*4882a593Smuzhiyun static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun int d0, d1;
247*4882a593Smuzhiyun asm volatile("rep\n\t"
248*4882a593Smuzhiyun "stosl"
249*4882a593Smuzhiyun : "=&c" (d0), "=&D" (d1)
250*4882a593Smuzhiyun : "a" (v), "1" (s), "0" (n)
251*4882a593Smuzhiyun : "memory");
252*4882a593Smuzhiyun return s;
253*4882a593Smuzhiyun }
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun /*
256*4882a593Smuzhiyun * find the first occurrence of byte 'c', or 1 past the area if none
257*4882a593Smuzhiyun */
258*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSCAN
259*4882a593Smuzhiyun extern void *memscan(void *addr, int c, size_t size);
260*4882a593Smuzhiyun
261*4882a593Smuzhiyun #endif /* __KERNEL__ */
262*4882a593Smuzhiyun
263*4882a593Smuzhiyun #endif /* _ASM_X86_STRING_32_H */
264