1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * include/asm-xtensa/string.h
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * These trivial string functions are considered part of the public domain.
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * This file is subject to the terms and conditions of the GNU General Public
7*4882a593Smuzhiyun * License. See the file "COPYING" in the main directory of this archive
8*4882a593Smuzhiyun * for more details.
9*4882a593Smuzhiyun *
10*4882a593Smuzhiyun * Copyright (C) 2001 - 2005 Tensilica Inc.
11*4882a593Smuzhiyun */
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun /* We should optimize these. See arch/xtensa/lib/strncpy_user.S */
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #ifndef _XTENSA_STRING_H
16*4882a593Smuzhiyun #define _XTENSA_STRING_H
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun #define __HAVE_ARCH_STRCPY
strcpy(char * __dest,const char * __src)19*4882a593Smuzhiyun static inline char *strcpy(char *__dest, const char *__src)
20*4882a593Smuzhiyun {
21*4882a593Smuzhiyun register char *__xdest = __dest;
22*4882a593Smuzhiyun unsigned long __dummy;
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun __asm__ __volatile__("1:\n\t"
25*4882a593Smuzhiyun "l8ui %2, %1, 0\n\t"
26*4882a593Smuzhiyun "s8i %2, %0, 0\n\t"
27*4882a593Smuzhiyun "addi %1, %1, 1\n\t"
28*4882a593Smuzhiyun "addi %0, %0, 1\n\t"
29*4882a593Smuzhiyun "bnez %2, 1b\n\t"
30*4882a593Smuzhiyun : "=r" (__dest), "=r" (__src), "=&r" (__dummy)
31*4882a593Smuzhiyun : "0" (__dest), "1" (__src)
32*4882a593Smuzhiyun : "memory");
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun return __xdest;
35*4882a593Smuzhiyun }
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun #define __HAVE_ARCH_STRNCPY
strncpy(char * __dest,const char * __src,size_t __n)38*4882a593Smuzhiyun static inline char *strncpy(char *__dest, const char *__src, size_t __n)
39*4882a593Smuzhiyun {
40*4882a593Smuzhiyun register char *__xdest = __dest;
41*4882a593Smuzhiyun unsigned long __dummy;
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun if (__n == 0)
44*4882a593Smuzhiyun return __xdest;
45*4882a593Smuzhiyun
46*4882a593Smuzhiyun __asm__ __volatile__(
47*4882a593Smuzhiyun "1:\n\t"
48*4882a593Smuzhiyun "l8ui %2, %1, 0\n\t"
49*4882a593Smuzhiyun "s8i %2, %0, 0\n\t"
50*4882a593Smuzhiyun "addi %1, %1, 1\n\t"
51*4882a593Smuzhiyun "addi %0, %0, 1\n\t"
52*4882a593Smuzhiyun "beqz %2, 2f\n\t"
53*4882a593Smuzhiyun "bne %1, %5, 1b\n"
54*4882a593Smuzhiyun "2:"
55*4882a593Smuzhiyun : "=r" (__dest), "=r" (__src), "=&r" (__dummy)
56*4882a593Smuzhiyun : "0" (__dest), "1" (__src), "r" ((uintptr_t)__src+__n)
57*4882a593Smuzhiyun : "memory");
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun return __xdest;
60*4882a593Smuzhiyun }
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun #define __HAVE_ARCH_STRCMP
strcmp(const char * __cs,const char * __ct)63*4882a593Smuzhiyun static inline int strcmp(const char *__cs, const char *__ct)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun register int __res;
66*4882a593Smuzhiyun unsigned long __dummy;
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun __asm__ __volatile__(
69*4882a593Smuzhiyun "1:\n\t"
70*4882a593Smuzhiyun "l8ui %3, %1, 0\n\t"
71*4882a593Smuzhiyun "addi %1, %1, 1\n\t"
72*4882a593Smuzhiyun "l8ui %2, %0, 0\n\t"
73*4882a593Smuzhiyun "addi %0, %0, 1\n\t"
74*4882a593Smuzhiyun "beqz %2, 2f\n\t"
75*4882a593Smuzhiyun "beq %2, %3, 1b\n"
76*4882a593Smuzhiyun "2:\n\t"
77*4882a593Smuzhiyun "sub %2, %2, %3"
78*4882a593Smuzhiyun : "=r" (__cs), "=r" (__ct), "=&r" (__res), "=&r" (__dummy)
79*4882a593Smuzhiyun : "0" (__cs), "1" (__ct));
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun return __res;
82*4882a593Smuzhiyun }
83*4882a593Smuzhiyun
84*4882a593Smuzhiyun #define __HAVE_ARCH_STRNCMP
strncmp(const char * __cs,const char * __ct,size_t __n)85*4882a593Smuzhiyun static inline int strncmp(const char *__cs, const char *__ct, size_t __n)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun register int __res;
88*4882a593Smuzhiyun unsigned long __dummy;
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun __asm__ __volatile__(
91*4882a593Smuzhiyun "mov %2, %3\n"
92*4882a593Smuzhiyun "1:\n\t"
93*4882a593Smuzhiyun "beq %0, %6, 2f\n\t"
94*4882a593Smuzhiyun "l8ui %3, %1, 0\n\t"
95*4882a593Smuzhiyun "addi %1, %1, 1\n\t"
96*4882a593Smuzhiyun "l8ui %2, %0, 0\n\t"
97*4882a593Smuzhiyun "addi %0, %0, 1\n\t"
98*4882a593Smuzhiyun "beqz %2, 2f\n\t"
99*4882a593Smuzhiyun "beqz %3, 2f\n\t"
100*4882a593Smuzhiyun "beq %2, %3, 1b\n"
101*4882a593Smuzhiyun "2:\n\t"
102*4882a593Smuzhiyun "sub %2, %2, %3"
103*4882a593Smuzhiyun : "=r" (__cs), "=r" (__ct), "=&r" (__res), "=&r" (__dummy)
104*4882a593Smuzhiyun : "0" (__cs), "1" (__ct), "r" ((uintptr_t)__cs+__n));
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun return __res;
107*4882a593Smuzhiyun }
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET
110*4882a593Smuzhiyun extern void *memset(void *__s, int __c, size_t __count);
111*4882a593Smuzhiyun extern void *__memset(void *__s, int __c, size_t __count);
112*4882a593Smuzhiyun
113*4882a593Smuzhiyun #define __HAVE_ARCH_MEMCPY
114*4882a593Smuzhiyun extern void *memcpy(void *__to, __const__ void *__from, size_t __n);
115*4882a593Smuzhiyun extern void *__memcpy(void *__to, __const__ void *__from, size_t __n);
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun #define __HAVE_ARCH_MEMMOVE
118*4882a593Smuzhiyun extern void *memmove(void *__dest, __const__ void *__src, size_t __n);
119*4882a593Smuzhiyun extern void *__memmove(void *__dest, __const__ void *__src, size_t __n);
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun /* Don't build bcopy at all ... */
122*4882a593Smuzhiyun #define __HAVE_ARCH_BCOPY
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun /*
127*4882a593Smuzhiyun * For files that are not instrumented (e.g. mm/slub.c) we
128*4882a593Smuzhiyun * should use not instrumented version of mem* functions.
129*4882a593Smuzhiyun */
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun #define memcpy(dst, src, len) __memcpy(dst, src, len)
132*4882a593Smuzhiyun #define memmove(dst, src, len) __memmove(dst, src, len)
133*4882a593Smuzhiyun #define memset(s, c, n) __memset(s, c, n)
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun #ifndef __NO_FORTIFY
136*4882a593Smuzhiyun #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
137*4882a593Smuzhiyun #endif
138*4882a593Smuzhiyun #endif
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun #endif /* _XTENSA_STRING_H */
141