1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_X86_STRING_64_H
3*4882a593Smuzhiyun #define _ASM_X86_STRING_64_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #ifdef __KERNEL__
6*4882a593Smuzhiyun #include <linux/jump_label.h>
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun /* Written 2002 by Andi Kleen */
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun /* Even with __builtin_ the compiler may decide to use the out of line
11*4882a593Smuzhiyun function. */
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun #define __HAVE_ARCH_MEMCPY 1
14*4882a593Smuzhiyun extern void *memcpy(void *to, const void *from, size_t len);
15*4882a593Smuzhiyun extern void *__memcpy(void *to, const void *from, size_t len);
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET
18*4882a593Smuzhiyun void *memset(void *s, int c, size_t n);
19*4882a593Smuzhiyun void *__memset(void *s, int c, size_t n);
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET16
memset16(uint16_t * s,uint16_t v,size_t n)22*4882a593Smuzhiyun static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
23*4882a593Smuzhiyun {
24*4882a593Smuzhiyun long d0, d1;
25*4882a593Smuzhiyun asm volatile("rep\n\t"
26*4882a593Smuzhiyun "stosw"
27*4882a593Smuzhiyun : "=&c" (d0), "=&D" (d1)
28*4882a593Smuzhiyun : "a" (v), "1" (s), "0" (n)
29*4882a593Smuzhiyun : "memory");
30*4882a593Smuzhiyun return s;
31*4882a593Smuzhiyun }
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET32
memset32(uint32_t * s,uint32_t v,size_t n)34*4882a593Smuzhiyun static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
35*4882a593Smuzhiyun {
36*4882a593Smuzhiyun long d0, d1;
37*4882a593Smuzhiyun asm volatile("rep\n\t"
38*4882a593Smuzhiyun "stosl"
39*4882a593Smuzhiyun : "=&c" (d0), "=&D" (d1)
40*4882a593Smuzhiyun : "a" (v), "1" (s), "0" (n)
41*4882a593Smuzhiyun : "memory");
42*4882a593Smuzhiyun return s;
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun #define __HAVE_ARCH_MEMSET64
memset64(uint64_t * s,uint64_t v,size_t n)46*4882a593Smuzhiyun static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun long d0, d1;
49*4882a593Smuzhiyun asm volatile("rep\n\t"
50*4882a593Smuzhiyun "stosq"
51*4882a593Smuzhiyun : "=&c" (d0), "=&D" (d1)
52*4882a593Smuzhiyun : "a" (v), "1" (s), "0" (n)
53*4882a593Smuzhiyun : "memory");
54*4882a593Smuzhiyun return s;
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun #define __HAVE_ARCH_MEMMOVE
58*4882a593Smuzhiyun void *memmove(void *dest, const void *src, size_t count);
59*4882a593Smuzhiyun void *__memmove(void *dest, const void *src, size_t count);
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun int memcmp(const void *cs, const void *ct, size_t count);
62*4882a593Smuzhiyun size_t strlen(const char *s);
63*4882a593Smuzhiyun char *strcpy(char *dest, const char *src);
64*4882a593Smuzhiyun char *strcat(char *dest, const char *src);
65*4882a593Smuzhiyun int strcmp(const char *cs, const char *ct);
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun /*
70*4882a593Smuzhiyun * For files that not instrumented (e.g. mm/slub.c) we
71*4882a593Smuzhiyun * should use not instrumented version of mem* functions.
72*4882a593Smuzhiyun */
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun #undef memcpy
75*4882a593Smuzhiyun #define memcpy(dst, src, len) __memcpy(dst, src, len)
76*4882a593Smuzhiyun #define memmove(dst, src, len) __memmove(dst, src, len)
77*4882a593Smuzhiyun #define memset(s, c, n) __memset(s, c, n)
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun #ifndef __NO_FORTIFY
80*4882a593Smuzhiyun #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
81*4882a593Smuzhiyun #endif
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun #endif
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun #ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE
86*4882a593Smuzhiyun #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1
87*4882a593Smuzhiyun void __memcpy_flushcache(void *dst, const void *src, size_t cnt);
memcpy_flushcache(void * dst,const void * src,size_t cnt)88*4882a593Smuzhiyun static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt)
89*4882a593Smuzhiyun {
90*4882a593Smuzhiyun if (__builtin_constant_p(cnt)) {
91*4882a593Smuzhiyun switch (cnt) {
92*4882a593Smuzhiyun case 4:
93*4882a593Smuzhiyun asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src));
94*4882a593Smuzhiyun return;
95*4882a593Smuzhiyun case 8:
96*4882a593Smuzhiyun asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
97*4882a593Smuzhiyun return;
98*4882a593Smuzhiyun case 16:
99*4882a593Smuzhiyun asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
100*4882a593Smuzhiyun asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8)));
101*4882a593Smuzhiyun return;
102*4882a593Smuzhiyun }
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun __memcpy_flushcache(dst, src, cnt);
105*4882a593Smuzhiyun }
106*4882a593Smuzhiyun #endif
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun #endif /* __KERNEL__ */
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun #endif /* _ASM_X86_STRING_64_H */
111