1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  *  S390 version
4  *    Copyright IBM Corp. 1999
5  *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
6  */
7 
8 #ifndef _S390_STRING_H_
9 #define _S390_STRING_H_
10 
11 #ifndef _LINUX_TYPES_H
12 #include <linux/types.h>
13 #endif
14 
15 #define __HAVE_ARCH_MEMCPY	/* gcc builtin & arch function */
16 #define __HAVE_ARCH_MEMMOVE	/* gcc builtin & arch function */
17 #define __HAVE_ARCH_MEMSET	/* gcc builtin & arch function */
18 #define __HAVE_ARCH_MEMSET16	/* arch function */
19 #define __HAVE_ARCH_MEMSET32	/* arch function */
20 #define __HAVE_ARCH_MEMSET64	/* arch function */
21 
22 void *memcpy(void *dest, const void *src, size_t n);
23 void *memset(void *s, int c, size_t n);
24 void *memmove(void *dest, const void *src, size_t n);
25 
26 #ifndef CONFIG_KASAN
27 #define __HAVE_ARCH_MEMCHR	/* inline & arch function */
28 #define __HAVE_ARCH_MEMCMP	/* arch function */
29 #define __HAVE_ARCH_MEMSCAN	/* inline & arch function */
30 #define __HAVE_ARCH_STRCAT	/* inline & arch function */
31 #define __HAVE_ARCH_STRCMP	/* arch function */
32 #define __HAVE_ARCH_STRCPY	/* inline & arch function */
33 #define __HAVE_ARCH_STRLCAT	/* arch function */
34 #define __HAVE_ARCH_STRLEN	/* inline & arch function */
35 #define __HAVE_ARCH_STRNCAT	/* arch function */
36 #define __HAVE_ARCH_STRNCPY	/* arch function */
37 #define __HAVE_ARCH_STRNLEN	/* inline & arch function */
38 #define __HAVE_ARCH_STRSTR	/* arch function */
39 
40 /* Prototypes for non-inlined arch strings functions. */
41 int memcmp(const void *s1, const void *s2, size_t n);
42 int strcmp(const char *s1, const char *s2);
43 size_t strlcat(char *dest, const char *src, size_t n);
44 char *strncat(char *dest, const char *src, size_t n);
45 char *strncpy(char *dest, const char *src, size_t n);
46 char *strstr(const char *s1, const char *s2);
47 #endif /* !CONFIG_KASAN */
48 
49 #undef __HAVE_ARCH_STRCHR
50 #undef __HAVE_ARCH_STRNCHR
51 #undef __HAVE_ARCH_STRNCMP
52 #undef __HAVE_ARCH_STRPBRK
53 #undef __HAVE_ARCH_STRSEP
54 #undef __HAVE_ARCH_STRSPN
55 
56 #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
57 
58 extern void *__memcpy(void *dest, const void *src, size_t n);
59 extern void *__memset(void *s, int c, size_t n);
60 extern void *__memmove(void *dest, const void *src, size_t n);
61 
62 /*
63  * For files that are not instrumented (e.g. mm/slub.c) we
64  * should use not instrumented version of mem* functions.
65  */
66 
67 #define memcpy(dst, src, len) __memcpy(dst, src, len)
68 #define memmove(dst, src, len) __memmove(dst, src, len)
69 #define memset(s, c, n) __memset(s, c, n)
70 #define strlen(s) __strlen(s)
71 
72 #define __no_sanitize_prefix_strfunc(x) __##x
73 
74 #ifndef __NO_FORTIFY
75 #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
76 #endif
77 
78 #else
79 #define __no_sanitize_prefix_strfunc(x) x
80 #endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */
81 
82 void *__memset16(uint16_t *s, uint16_t v, size_t count);
83 void *__memset32(uint32_t *s, uint32_t v, size_t count);
84 void *__memset64(uint64_t *s, uint64_t v, size_t count);
85 
memset16(uint16_t * s,uint16_t v,size_t count)86 static inline void *memset16(uint16_t *s, uint16_t v, size_t count)
87 {
88 	return __memset16(s, v, count * sizeof(v));
89 }
90 
memset32(uint32_t * s,uint32_t v,size_t count)91 static inline void *memset32(uint32_t *s, uint32_t v, size_t count)
92 {
93 	return __memset32(s, v, count * sizeof(v));
94 }
95 
memset64(uint64_t * s,uint64_t v,size_t count)96 static inline void *memset64(uint64_t *s, uint64_t v, size_t count)
97 {
98 	return __memset64(s, v, count * sizeof(v));
99 }
100 
101 #if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY))
102 
103 #ifdef __HAVE_ARCH_MEMCHR
memchr(const void * s,int c,size_t n)104 static inline void *memchr(const void * s, int c, size_t n)
105 {
106 	const void *ret = s + n;
107 
108 	asm volatile(
109 		"	lgr	0,%[c]\n"
110 		"0:	srst	%[ret],%[s]\n"
111 		"	jo	0b\n"
112 		"	jl	1f\n"
113 		"	la	%[ret],0\n"
114 		"1:"
115 		: [ret] "+&a" (ret), [s] "+&a" (s)
116 		: [c] "d" (c)
117 		: "cc", "memory", "0");
118 	return (void *) ret;
119 }
120 #endif
121 
122 #ifdef __HAVE_ARCH_MEMSCAN
memscan(void * s,int c,size_t n)123 static inline void *memscan(void *s, int c, size_t n)
124 {
125 	const void *ret = s + n;
126 
127 	asm volatile(
128 		"	lgr	0,%[c]\n"
129 		"0:	srst	%[ret],%[s]\n"
130 		"	jo	0b\n"
131 		: [ret] "+&a" (ret), [s] "+&a" (s)
132 		: [c] "d" (c)
133 		: "cc", "memory", "0");
134 	return (void *) ret;
135 }
136 #endif
137 
138 #ifdef __HAVE_ARCH_STRCAT
strcat(char * dst,const char * src)139 static inline char *strcat(char *dst, const char *src)
140 {
141 	unsigned long dummy = 0;
142 	char *ret = dst;
143 
144 	asm volatile(
145 		"	lghi	0,0\n"
146 		"0:	srst	%[dummy],%[dst]\n"
147 		"	jo	0b\n"
148 		"1:	mvst	%[dummy],%[src]\n"
149 		"	jo	1b"
150 		: [dummy] "+&a" (dummy), [dst] "+&a" (dst), [src] "+&a" (src)
151 		:
152 		: "cc", "memory", "0");
153 	return ret;
154 }
155 #endif
156 
157 #ifdef __HAVE_ARCH_STRCPY
strcpy(char * dst,const char * src)158 static inline char *strcpy(char *dst, const char *src)
159 {
160 	char *ret = dst;
161 
162 	asm volatile(
163 		"	lghi	0,0\n"
164 		"0:	mvst	%[dst],%[src]\n"
165 		"	jo	0b"
166 		: [dst] "+&a" (dst), [src] "+&a" (src)
167 		:
168 		: "cc", "memory", "0");
169 	return ret;
170 }
171 #endif
172 
173 #if defined(__HAVE_ARCH_STRLEN) || (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__))
__no_sanitize_prefix_strfunc(strlen)174 static inline size_t __no_sanitize_prefix_strfunc(strlen)(const char *s)
175 {
176 	unsigned long end = 0;
177 	const char *tmp = s;
178 
179 	asm volatile(
180 		"	lghi	0,0\n"
181 		"0:	srst	%[end],%[tmp]\n"
182 		"	jo	0b"
183 		: [end] "+&a" (end), [tmp] "+&a" (tmp)
184 		:
185 		: "cc", "memory", "0");
186 	return end - (unsigned long)s;
187 }
188 #endif
189 
190 #ifdef __HAVE_ARCH_STRNLEN
strnlen(const char * s,size_t n)191 static inline size_t strnlen(const char * s, size_t n)
192 {
193 	const char *tmp = s;
194 	const char *end = s + n;
195 
196 	asm volatile(
197 		"	lghi	0,0\n"
198 		"0:	srst	%[end],%[tmp]\n"
199 		"	jo	0b"
200 		: [end] "+&a" (end), [tmp] "+&a" (tmp)
201 		:
202 		: "cc", "memory", "0");
203 	return end - s;
204 }
205 #endif
206 #else /* IN_ARCH_STRING_C */
207 void *memchr(const void * s, int c, size_t n);
208 void *memscan(void *s, int c, size_t n);
209 char *strcat(char *dst, const char *src);
210 char *strcpy(char *dst, const char *src);
211 size_t strlen(const char *s);
212 size_t strnlen(const char * s, size_t n);
213 #endif /* !IN_ARCH_STRING_C */
214 
215 #endif /* __S390_STRING_H_ */
216