1 /* Enumerate available IFUNC implementations of a function.  x86-64 version.
2    Copyright (C) 2012-2022 Free Software Foundation, Inc.
3    This file is part of the GNU C Library.
4 
5    The GNU C Library is free software; you can redistribute it and/or
6    modify it under the terms of the GNU Lesser General Public
7    License as published by the Free Software Foundation; either
8    version 2.1 of the License, or (at your option) any later version.
9 
10    The GNU C Library is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13    Lesser General Public License for more details.
14 
15    You should have received a copy of the GNU Lesser General Public
16    License along with the GNU C Library; if not, see
17    <https://www.gnu.org/licenses/>.  */
18 
19 #include <assert.h>
20 #include <string.h>
21 #include <wchar.h>
22 #include <ifunc-impl-list.h>
23 #include <sysdep.h>
24 #include "init-arch.h"
25 
26 /* Fill ARRAY of MAX elements with IFUNC implementations for function
27    NAME supported on target machine and return the number of valid
28    entries.  Each set of implementations for a given function is sorted in
29    descending order by ISA level.  */
30 
31 size_t
__libc_ifunc_impl_list(const char * name,struct libc_ifunc_impl * array,size_t max)32 __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
33 			size_t max)
34 {
35   size_t i = max;
36 
37   /* Support sysdeps/x86_64/multiarch/memcmpeq.c.  */
38   IFUNC_IMPL (i, name, __memcmpeq,
39 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memcmpeq,
40 				     (CPU_FEATURE_USABLE (AVX512VL)
41 				      && CPU_FEATURE_USABLE (AVX512BW)
42 				      && CPU_FEATURE_USABLE (BMI2)),
43 				     __memcmpeq_evex)
44 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memcmpeq,
45 				     (CPU_FEATURE_USABLE (AVX2)
46 				      && CPU_FEATURE_USABLE (BMI2)),
47 				     __memcmpeq_avx2)
48 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memcmpeq,
49 				     (CPU_FEATURE_USABLE (AVX2)
50 				      && CPU_FEATURE_USABLE (BMI2)
51 				      && CPU_FEATURE_USABLE (RTM)),
52 				     __memcmpeq_avx2_rtm)
53 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
54 	         implementation is also used at ISA level 2.  */
55 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memcmpeq,
56 				     1,
57 				     __memcmpeq_sse2))
58 
59   /* Support sysdeps/x86_64/multiarch/memchr.c.  */
60   IFUNC_IMPL (i, name, memchr,
61 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memchr,
62 				     (CPU_FEATURE_USABLE (AVX512VL)
63 				      && CPU_FEATURE_USABLE (AVX512BW)
64 				      && CPU_FEATURE_USABLE (BMI2)),
65 				     __memchr_evex)
66 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memchr,
67 				     (CPU_FEATURE_USABLE (AVX512VL)
68 				      && CPU_FEATURE_USABLE (AVX512BW)
69 				      && CPU_FEATURE_USABLE (BMI2)),
70 				     __memchr_evex_rtm)
71 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memchr,
72 				     CPU_FEATURE_USABLE (AVX2),
73 				     __memchr_avx2)
74 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memchr,
75 				     (CPU_FEATURE_USABLE (AVX2)
76 				      && CPU_FEATURE_USABLE (RTM)),
77 				     __memchr_avx2_rtm)
78 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
79 	         implementation is also used at ISA level 2.  */
80 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memchr,
81 				     1,
82 				     __memchr_sse2))
83 
84   /* Support sysdeps/x86_64/multiarch/memcmp.c.  */
85   IFUNC_IMPL (i, name, memcmp,
86 	      /* NB: If any of these names change or if any new
87 	         implementations are added be sure to update
88 	         sysdeps/x86_64/memcmp-isa-default-impl.h.  */
89 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memcmp,
90 				     (CPU_FEATURE_USABLE (AVX512VL)
91 				      && CPU_FEATURE_USABLE (AVX512BW)
92 				      && CPU_FEATURE_USABLE (BMI2)
93 				      && CPU_FEATURE_USABLE (MOVBE)),
94 				     __memcmp_evex_movbe)
95 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memcmp,
96 				     (CPU_FEATURE_USABLE (AVX2)
97 				      && CPU_FEATURE_USABLE (BMI2)
98 				      && CPU_FEATURE_USABLE (MOVBE)),
99 				     __memcmp_avx2_movbe)
100 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memcmp,
101 				     (CPU_FEATURE_USABLE (AVX2)
102 				      && CPU_FEATURE_USABLE (BMI2)
103 				      && CPU_FEATURE_USABLE (MOVBE)
104 				      && CPU_FEATURE_USABLE (RTM)),
105 				     __memcmp_avx2_movbe_rtm)
106 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
107 	         implementation is also used at ISA level 2.  */
108 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memcmp,
109 				     1,
110 				     __memcmp_sse2))
111 
112 #ifdef SHARED
113   /* Support sysdeps/x86_64/multiarch/memmove_chk.c.  */
114   IFUNC_IMPL (i, name, __memmove_chk,
115 	      IFUNC_IMPL_ADD (array, i, __memmove_chk, 1,
116 			      __memmove_chk_erms)
117 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memmove_chk,
118 				     CPU_FEATURE_USABLE (AVX512F),
119 				     __memmove_chk_avx512_no_vzeroupper)
120 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memmove_chk,
121 				     CPU_FEATURE_USABLE (AVX512VL),
122 				     __memmove_chk_avx512_unaligned)
123 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memmove_chk,
124 				     CPU_FEATURE_USABLE (AVX512VL),
125 				     __memmove_chk_avx512_unaligned_erms)
126 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memmove_chk,
127 				     CPU_FEATURE_USABLE (AVX512VL),
128 				     __memmove_chk_evex_unaligned)
129 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memmove_chk,
130 				     CPU_FEATURE_USABLE (AVX512VL),
131 				     __memmove_chk_evex_unaligned_erms)
132 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memmove_chk,
133 				     CPU_FEATURE_USABLE (AVX),
134 				     __memmove_chk_avx_unaligned)
135 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memmove_chk,
136 				     CPU_FEATURE_USABLE (AVX),
137 				     __memmove_chk_avx_unaligned_erms)
138 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memmove_chk,
139 				     (CPU_FEATURE_USABLE (AVX)
140 				      && CPU_FEATURE_USABLE (RTM)),
141 				     __memmove_chk_avx_unaligned_rtm)
142 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memmove_chk,
143 				     (CPU_FEATURE_USABLE (AVX)
144 				      && CPU_FEATURE_USABLE (RTM)),
145 				     __memmove_chk_avx_unaligned_erms_rtm)
146 	      /* By V3 we assume fast aligned copy.  */
147 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memmove_chk,
148 				     CPU_FEATURE_USABLE (SSSE3),
149 				     __memmove_chk_ssse3)
150 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
151 	         implementation is also used at ISA level 2 (SSSE3 is too
152 	         optimized around aligned copy to be better as general
153 	         purpose memmove).  */
154 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memmove_chk, 1,
155 				     __memmove_chk_sse2_unaligned)
156 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memmove_chk, 1,
157 				     __memmove_chk_sse2_unaligned_erms))
158 #endif
159 
160   /* Support sysdeps/x86_64/multiarch/memmove.c.  */
161   IFUNC_IMPL (i, name, memmove,
162 	      IFUNC_IMPL_ADD (array, i, memmove, 1,
163 			      __memmove_erms)
164 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memmove,
165 				     CPU_FEATURE_USABLE (AVX512F),
166 				     __memmove_avx512_no_vzeroupper)
167 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memmove,
168 				     CPU_FEATURE_USABLE (AVX512VL),
169 				     __memmove_avx512_unaligned)
170 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memmove,
171 				     CPU_FEATURE_USABLE (AVX512VL),
172 				     __memmove_avx512_unaligned_erms)
173 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memmove,
174 				     CPU_FEATURE_USABLE (AVX512VL),
175 				     __memmove_evex_unaligned)
176 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memmove,
177 				     CPU_FEATURE_USABLE (AVX512VL),
178 				     __memmove_evex_unaligned_erms)
179 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memmove,
180 				     CPU_FEATURE_USABLE (AVX),
181 				     __memmove_avx_unaligned)
182 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memmove,
183 				     CPU_FEATURE_USABLE (AVX),
184 				     __memmove_avx_unaligned_erms)
185 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memmove,
186 				     (CPU_FEATURE_USABLE (AVX)
187 				      && CPU_FEATURE_USABLE (RTM)),
188 				     __memmove_avx_unaligned_rtm)
189 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memmove,
190 				     (CPU_FEATURE_USABLE (AVX)
191 				      && CPU_FEATURE_USABLE (RTM)),
192 				     __memmove_avx_unaligned_erms_rtm)
193 	      /* By V3 we assume fast aligned copy.  */
194 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memmove,
195 				     CPU_FEATURE_USABLE (SSSE3),
196 				     __memmove_ssse3)
197 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
198 	         implementation is also used at ISA level 2 (SSSE3 is too
199 	         optimized around aligned copy to be better as general
200 	         purpose memmove).  */
201 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memmove, 1,
202 				     __memmove_sse2_unaligned)
203 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memmove, 1,
204 				     __memmove_sse2_unaligned_erms))
205 
206   /* Support sysdeps/x86_64/multiarch/memrchr.c.  */
207   IFUNC_IMPL (i, name, memrchr,
208 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memrchr,
209 				     (CPU_FEATURE_USABLE (AVX512VL)
210 				      && CPU_FEATURE_USABLE (AVX512BW)),
211 				     __memrchr_evex)
212 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memrchr,
213 				     CPU_FEATURE_USABLE (AVX2),
214 				     __memrchr_avx2)
215 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memrchr,
216 				     (CPU_FEATURE_USABLE (AVX2)
217 				      && CPU_FEATURE_USABLE (RTM)),
218 				     __memrchr_avx2_rtm)
219 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
220 	         implementation is also used at ISA level 2.  */
221 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memrchr,
222 				     1,
223 				     __memrchr_sse2))
224 
225 #ifdef SHARED
226   /* Support sysdeps/x86_64/multiarch/memset_chk.c.  */
227   IFUNC_IMPL (i, name, __memset_chk,
228 	      IFUNC_IMPL_ADD (array, i, __memset_chk, 1,
229 			      __memset_chk_erms)
230 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memset_chk,
231 				     (CPU_FEATURE_USABLE (AVX512VL)
232 				      && CPU_FEATURE_USABLE (AVX512BW)
233 				      && CPU_FEATURE_USABLE (BMI2)),
234 				     __memset_chk_avx512_unaligned_erms)
235 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memset_chk,
236 				     (CPU_FEATURE_USABLE (AVX512VL)
237 				      && CPU_FEATURE_USABLE (AVX512BW)
238 				      && CPU_FEATURE_USABLE (BMI2)),
239 				     __memset_chk_avx512_unaligned)
240 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memset_chk,
241 				     CPU_FEATURE_USABLE (AVX512F),
242 				     __memset_chk_avx512_no_vzeroupper)
243 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memset_chk,
244 				     (CPU_FEATURE_USABLE (AVX512VL)
245 				      && CPU_FEATURE_USABLE (AVX512BW)
246 				      && CPU_FEATURE_USABLE (BMI2)),
247 				     __memset_chk_evex_unaligned)
248 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memset_chk,
249 				     (CPU_FEATURE_USABLE (AVX512VL)
250 				      && CPU_FEATURE_USABLE (AVX512BW)
251 				      && CPU_FEATURE_USABLE (BMI2)),
252 				     __memset_chk_evex_unaligned_erms)
253 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memset_chk,
254 				     CPU_FEATURE_USABLE (AVX2),
255 				     __memset_chk_avx2_unaligned)
256 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memset_chk,
257 				     CPU_FEATURE_USABLE (AVX2),
258 				     __memset_chk_avx2_unaligned_erms)
259 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memset_chk,
260 				     (CPU_FEATURE_USABLE (AVX2)
261 				      && CPU_FEATURE_USABLE (RTM)),
262 				     __memset_chk_avx2_unaligned_rtm)
263 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memset_chk,
264 				     (CPU_FEATURE_USABLE (AVX2)
265 				      && CPU_FEATURE_USABLE (RTM)),
266 				     __memset_chk_avx2_unaligned_erms_rtm)
267 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
268 	         implementation is also used at ISA level 2.  */
269 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memset_chk, 1,
270 				     __memset_chk_sse2_unaligned)
271 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memset_chk, 1,
272 				     __memset_chk_sse2_unaligned_erms)
273 	      )
274 #endif
275 
276   /* Support sysdeps/x86_64/multiarch/memset.c.  */
277   IFUNC_IMPL (i, name, memset,
278 	      IFUNC_IMPL_ADD (array, i, memset, 1,
279 			      __memset_erms)
280 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memset,
281 				     (CPU_FEATURE_USABLE (AVX512VL)
282 				      && CPU_FEATURE_USABLE (AVX512BW)
283 				      && CPU_FEATURE_USABLE (BMI2)),
284 				     __memset_avx512_unaligned_erms)
285 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memset,
286 				     (CPU_FEATURE_USABLE (AVX512VL)
287 				      && CPU_FEATURE_USABLE (AVX512BW)
288 				      && CPU_FEATURE_USABLE (BMI2)),
289 				     __memset_avx512_unaligned)
290 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memset,
291 				     CPU_FEATURE_USABLE (AVX512F),
292 				     __memset_avx512_no_vzeroupper)
293 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memset,
294 				     (CPU_FEATURE_USABLE (AVX512VL)
295 				      && CPU_FEATURE_USABLE (AVX512BW)
296 				      && CPU_FEATURE_USABLE (BMI2)),
297 				     __memset_evex_unaligned)
298 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memset,
299 				     (CPU_FEATURE_USABLE (AVX512VL)
300 				      && CPU_FEATURE_USABLE (AVX512BW)
301 				      && CPU_FEATURE_USABLE (BMI2)),
302 				     __memset_evex_unaligned_erms)
303 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memset,
304 				     CPU_FEATURE_USABLE (AVX2),
305 				     __memset_avx2_unaligned)
306 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memset,
307 				     CPU_FEATURE_USABLE (AVX2),
308 				     __memset_avx2_unaligned_erms)
309 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memset,
310 				     (CPU_FEATURE_USABLE (AVX2)
311 				      && CPU_FEATURE_USABLE (RTM)),
312 				     __memset_avx2_unaligned_rtm)
313 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memset,
314 				     (CPU_FEATURE_USABLE (AVX2)
315 				      && CPU_FEATURE_USABLE (RTM)),
316 				     __memset_avx2_unaligned_erms_rtm)
317 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
318 	         implementation is also used at ISA level 2.  */
319 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memset, 1,
320 				     __memset_sse2_unaligned)
321 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memset, 1,
322 				     __memset_sse2_unaligned_erms)
323 	     )
324 
325   /* Support sysdeps/x86_64/multiarch/rawmemchr.c.  */
326   IFUNC_IMPL (i, name, rawmemchr,
327 	      X86_IFUNC_IMPL_ADD_V4 (array, i, rawmemchr,
328 				     (CPU_FEATURE_USABLE (AVX512VL)
329 				      && CPU_FEATURE_USABLE (AVX512BW)
330 				      && CPU_FEATURE_USABLE (BMI2)),
331 				     __rawmemchr_evex)
332 	      X86_IFUNC_IMPL_ADD_V4 (array, i, rawmemchr,
333 				     (CPU_FEATURE_USABLE (AVX512VL)
334 				      && CPU_FEATURE_USABLE (AVX512BW)
335 				      && CPU_FEATURE_USABLE (BMI2)),
336 				     __rawmemchr_evex_rtm)
337 	      X86_IFUNC_IMPL_ADD_V3 (array, i, rawmemchr,
338 				     CPU_FEATURE_USABLE (AVX2),
339 				     __rawmemchr_avx2)
340 	      X86_IFUNC_IMPL_ADD_V3 (array, i, rawmemchr,
341 				     (CPU_FEATURE_USABLE (AVX2)
342 				      && CPU_FEATURE_USABLE (RTM)),
343 				     __rawmemchr_avx2_rtm)
344 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
345 	         implementation is also used at ISA level 2.  */
346 	      X86_IFUNC_IMPL_ADD_V2 (array, i, rawmemchr,
347 				     1,
348 				     __rawmemchr_sse2))
349 
350   /* Support sysdeps/x86_64/multiarch/strlen.c.  */
351   IFUNC_IMPL (i, name, strlen,
352 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strlen,
353 				     (CPU_FEATURE_USABLE (AVX512VL)
354 				      && CPU_FEATURE_USABLE (AVX512BW)
355 				      && CPU_FEATURE_USABLE (BMI2)),
356 				     __strlen_evex)
357 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strlen,
358 				     (CPU_FEATURE_USABLE (AVX512VL)
359 				      && CPU_FEATURE_USABLE (AVX512BW)
360 				      && CPU_FEATURE_USABLE (BMI2)),
361 				     __strlen_evex512)
362 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strlen,
363 				     (CPU_FEATURE_USABLE (AVX2)
364 				      && CPU_FEATURE_USABLE (BMI2)),
365 				     __strlen_avx2)
366 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strlen,
367 				     (CPU_FEATURE_USABLE (AVX2)
368 				      && CPU_FEATURE_USABLE (BMI2)
369 				      && CPU_FEATURE_USABLE (RTM)),
370 				     __strlen_avx2_rtm)
371 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
372 	         implementation is also used at ISA level 2.  */
373 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strlen,
374 				     1,
375 				     __strlen_sse2))
376 
377   /* Support sysdeps/x86_64/multiarch/strnlen.c.  */
378   IFUNC_IMPL (i, name, strnlen,
379 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strnlen,
380 				     (CPU_FEATURE_USABLE (AVX512VL)
381 				      && CPU_FEATURE_USABLE (AVX512BW)
382 				      && CPU_FEATURE_USABLE (BMI2)),
383 				     __strnlen_evex)
384 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strnlen,
385 				     (CPU_FEATURE_USABLE (AVX512VL)
386 				      && CPU_FEATURE_USABLE (AVX512BW)
387 				      && CPU_FEATURE_USABLE (BMI2)),
388 				     __strnlen_evex512)
389 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strnlen,
390 				     (CPU_FEATURE_USABLE (AVX2)
391 				      && CPU_FEATURE_USABLE (BMI2)),
392 				     __strnlen_avx2)
393 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strnlen,
394 				     (CPU_FEATURE_USABLE (AVX2)
395 				      && CPU_FEATURE_USABLE (BMI2)
396 				      && CPU_FEATURE_USABLE (RTM)),
397 				     __strnlen_avx2_rtm)
398 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
399 	         implementation is also used at ISA level 2.  */
400 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strnlen,
401 				     1,
402 				     __strnlen_sse2))
403 
404   /* Support sysdeps/x86_64/multiarch/stpncpy.c.  */
405   IFUNC_IMPL (i, name, stpncpy,
406 	      X86_IFUNC_IMPL_ADD_V4 (array, i, stpncpy,
407 				     (CPU_FEATURE_USABLE (AVX512VL)
408 				      && CPU_FEATURE_USABLE (AVX512BW)),
409 				     __stpncpy_evex)
410 	      X86_IFUNC_IMPL_ADD_V3 (array, i, stpncpy,
411 				     CPU_FEATURE_USABLE (AVX2),
412 				     __stpncpy_avx2)
413 	      X86_IFUNC_IMPL_ADD_V3 (array, i, stpncpy,
414 				     (CPU_FEATURE_USABLE (AVX2)
415 				      && CPU_FEATURE_USABLE (RTM)),
416 				     __stpncpy_avx2_rtm)
417 	      /* ISA V2 wrapper for sse2_unaligned implementation because
418 	         the sse2_unaligned implementation is also used at ISA
419 	         level 2.  */
420 	      X86_IFUNC_IMPL_ADD_V2 (array, i, stpncpy,
421 				     1,
422 				     __stpncpy_sse2_unaligned))
423 
424   /* Support sysdeps/x86_64/multiarch/stpcpy.c.  */
425   IFUNC_IMPL (i, name, stpcpy,
426 	      X86_IFUNC_IMPL_ADD_V4 (array, i, stpcpy,
427 				     (CPU_FEATURE_USABLE (AVX512VL)
428 				      && CPU_FEATURE_USABLE (AVX512BW)),
429 				     __stpcpy_evex)
430 	      X86_IFUNC_IMPL_ADD_V3 (array, i, stpcpy,
431 				     CPU_FEATURE_USABLE (AVX2),
432 				     __stpcpy_avx2)
433 	      X86_IFUNC_IMPL_ADD_V3 (array, i, stpcpy,
434 				     (CPU_FEATURE_USABLE (AVX2)
435 				      && CPU_FEATURE_USABLE (RTM)),
436 				     __stpcpy_avx2_rtm)
437 	      /* ISA V2 wrapper for sse2_unaligned implementation because
438 	         the sse2_unaligned implementation is also used at ISA
439 	         level 2.  */
440 	      X86_IFUNC_IMPL_ADD_V2 (array, i, stpcpy,
441 				     1,
442 				     __stpcpy_sse2_unaligned)
443 	      X86_IFUNC_IMPL_ADD_V1 (array, i, stpcpy,
444 				     1,
445 				     __stpcpy_sse2))
446 
447   /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c.  */
448   IFUNC_IMPL (i, name, strcasecmp,
449 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strcasecmp,
450 				     (CPU_FEATURE_USABLE (AVX512VL)
451 				      && CPU_FEATURE_USABLE (AVX512BW)),
452 				     __strcasecmp_evex)
453 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcasecmp,
454 				     CPU_FEATURE_USABLE (AVX2),
455 				     __strcasecmp_avx2)
456 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcasecmp,
457 				     (CPU_FEATURE_USABLE (AVX2)
458 				      && CPU_FEATURE_USABLE (RTM)),
459 				     __strcasecmp_avx2_rtm)
460 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcasecmp,
461 				     CPU_FEATURE_USABLE (SSE4_2),
462 				     __strcasecmp_sse42)
463 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
464 	         implementation is also used at ISA level 2.  */
465 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcasecmp,
466 				     1,
467 				     __strcasecmp_sse2))
468 
469   /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c.  */
470   IFUNC_IMPL (i, name, strcasecmp_l,
471 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strcasecmp,
472 				     (CPU_FEATURE_USABLE (AVX512VL)
473 				      && CPU_FEATURE_USABLE (AVX512BW)),
474 				     __strcasecmp_l_evex)
475 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcasecmp,
476 				     CPU_FEATURE_USABLE (AVX2),
477 				     __strcasecmp_l_avx2)
478 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcasecmp,
479 				     (CPU_FEATURE_USABLE (AVX2)
480 				      && CPU_FEATURE_USABLE (RTM)),
481 				     __strcasecmp_l_avx2_rtm)
482 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcasecmp_l,
483 				     CPU_FEATURE_USABLE (SSE4_2),
484 				     __strcasecmp_l_sse42)
485 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
486 	         implementation is also used at ISA level 2.  */
487 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcasecmp_l,
488 				     1,
489 				     __strcasecmp_l_sse2))
490 
491   /* Support sysdeps/x86_64/multiarch/strcat.c.  */
492   IFUNC_IMPL (i, name, strcat,
493 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strcat,
494 				     (CPU_FEATURE_USABLE (AVX512VL)
495 				      && CPU_FEATURE_USABLE (AVX512BW)),
496 				     __strcat_evex)
497 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcat,
498 				     CPU_FEATURE_USABLE (AVX2),
499 				     __strcat_avx2)
500 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcat,
501 				     (CPU_FEATURE_USABLE (AVX2)
502 				      && CPU_FEATURE_USABLE (RTM)),
503 				     __strcat_avx2_rtm)
504 	      /* ISA V2 wrapper for sse2_unaligned implementation because
505 	         the sse2_unaligned implementation is also used at ISA
506 	         level 2.  */
507 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcat,
508 				     1,
509 				     __strcat_sse2_unaligned)
510 	      X86_IFUNC_IMPL_ADD_V1 (array, i, strcat,
511 				     1,
512 				     __strcat_sse2))
513 
514   /* Support sysdeps/x86_64/multiarch/strchr.c.  */
515   IFUNC_IMPL (i, name, strchr,
516 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strchr,
517 				     (CPU_FEATURE_USABLE (AVX512VL)
518 				      && CPU_FEATURE_USABLE (AVX512BW)
519 				      && CPU_FEATURE_USABLE (BMI2)),
520 				     __strchr_evex)
521 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strchr,
522 				     (CPU_FEATURE_USABLE (AVX2)
523 				      && CPU_FEATURE_USABLE (BMI2)),
524 				     __strchr_avx2)
525 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strchr,
526 				     (CPU_FEATURE_USABLE (AVX2)
527 				      && CPU_FEATURE_USABLE (BMI2)
528 				      && CPU_FEATURE_USABLE (RTM)),
529 				     __strchr_avx2_rtm)
530 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
531 	         implementation is also used at ISA level 2.  */
532 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strchr,
533 				     1,
534 				     __strchr_sse2)
535 	      X86_IFUNC_IMPL_ADD_V1 (array, i, strchr,
536 				     1,
537 				     __strchr_sse2_no_bsf))
538 
539   /* Support sysdeps/x86_64/multiarch/strchrnul.c.  */
540   IFUNC_IMPL (i, name, strchrnul,
541 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strchrnul,
542 				     (CPU_FEATURE_USABLE (AVX512VL)
543 				      && CPU_FEATURE_USABLE (AVX512BW)
544 				      && CPU_FEATURE_USABLE (BMI2)),
545 				     __strchrnul_evex)
546 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strchrnul,
547 				     (CPU_FEATURE_USABLE (AVX2)
548 				      && CPU_FEATURE_USABLE (BMI2)),
549 				     __strchrnul_avx2)
550 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strchrnul,
551 				     (CPU_FEATURE_USABLE (AVX2)
552 				      && CPU_FEATURE_USABLE (BMI2)
553 				      && CPU_FEATURE_USABLE (RTM)),
554 				     __strchrnul_avx2_rtm)
555 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
556 	         implementation is also used at ISA level 2.  */
557 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strchrnul,
558 				     1,
559 				     __strchrnul_sse2))
560 
561   /* Support sysdeps/x86_64/multiarch/strrchr.c.  */
562   IFUNC_IMPL (i, name, strrchr,
563 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strrchr,
564 				     (CPU_FEATURE_USABLE (AVX512VL)
565 				      && CPU_FEATURE_USABLE (AVX512BW)),
566 				     __strrchr_evex)
567 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strrchr,
568 				     CPU_FEATURE_USABLE (AVX2),
569 				     __strrchr_avx2)
570 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strrchr,
571 				     (CPU_FEATURE_USABLE (AVX2)
572 				      && CPU_FEATURE_USABLE (RTM)),
573 				     __strrchr_avx2_rtm)
574 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
575 	         implementation is also used at ISA level 2.  */
576 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strrchr,
577 				     1,
578 				     __strrchr_sse2))
579 
580   /* Support sysdeps/x86_64/multiarch/strcmp.c.  */
581   IFUNC_IMPL (i, name, strcmp,
582 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strcmp,
583 				     (CPU_FEATURE_USABLE (AVX512VL)
584 				      && CPU_FEATURE_USABLE (AVX512BW)
585 				      && CPU_FEATURE_USABLE (BMI2)),
586 				     __strcmp_evex)
587 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcmp,
588 				     CPU_FEATURE_USABLE (AVX2),
589 				     __strcmp_avx2)
590 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcmp,
591 				     (CPU_FEATURE_USABLE (AVX2)
592 				      && CPU_FEATURE_USABLE (RTM)),
593 				     __strcmp_avx2_rtm)
594 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcmp,
595                                  CPU_FEATURE_USABLE (SSE4_2),
596                                  __strcmp_sse42)
597 	      /* ISA V2 wrapper for SSE2 implementations because the SSE2
598 	         implementations are also used at ISA level 2.  */
599 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcmp,
600 				     1,
601 				     __strcmp_sse2_unaligned)
602 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcmp,
603 				     1,
604 				     __strcmp_sse2))
605 
606   /* Support sysdeps/x86_64/multiarch/strcpy.c.  */
607   IFUNC_IMPL (i, name, strcpy,
608 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strcpy,
609 				     (CPU_FEATURE_USABLE (AVX512VL)
610 				      && CPU_FEATURE_USABLE (AVX512BW)),
611 				     __strcpy_evex)
612 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcpy,
613 				     CPU_FEATURE_USABLE (AVX2),
614 				     __strcpy_avx2)
615 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strcpy,
616 				     (CPU_FEATURE_USABLE (AVX2)
617 				      && CPU_FEATURE_USABLE (RTM)),
618 				     __strcpy_avx2_rtm)
619 	      /* ISA V2 wrapper for sse2_unaligned implementation because
620 	         the sse2_unaligned implementation is also used at ISA
621 	         level 2.  */
622 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strcpy,
623 				     1,
624 				     __strcpy_sse2_unaligned)
625 	      X86_IFUNC_IMPL_ADD_V1 (array, i, strcpy,
626 				     1,
627 				     __strcpy_sse2))
628 
629   /* Support sysdeps/x86_64/multiarch/strcspn.c.  */
630   IFUNC_IMPL (i, name, strcspn,
631 	      /* All implementations of strcspn are built at all ISA
632 	         levels.  */
633 	      IFUNC_IMPL_ADD (array, i, strcspn, CPU_FEATURE_USABLE (SSE4_2),
634 			      __strcspn_sse42)
635 	      IFUNC_IMPL_ADD (array, i, strcspn, 1, __strcspn_generic))
636 
637   /* Support sysdeps/x86_64/multiarch/strncase_l.c.  */
638   IFUNC_IMPL (i, name, strncasecmp,
639 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strncasecmp,
640 				     (CPU_FEATURE_USABLE (AVX512VL)
641 				      && CPU_FEATURE_USABLE (AVX512BW)),
642 				     __strncasecmp_evex)
643 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncasecmp,
644 				     CPU_FEATURE_USABLE (AVX2),
645 				     __strncasecmp_avx2)
646 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncasecmp,
647 				     (CPU_FEATURE_USABLE (AVX2)
648 				      && CPU_FEATURE_USABLE (RTM)),
649 				     __strncasecmp_avx2_rtm)
650 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncasecmp,
651 				     CPU_FEATURE_USABLE (SSE4_2),
652 				     __strncasecmp_sse42)
653 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
654 	         implementation is also used at ISA level 2.  */
655 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncasecmp,
656 				     1,
657 				     __strncasecmp_sse2))
658 
659   /* Support sysdeps/x86_64/multiarch/strncase_l.c.  */
660   IFUNC_IMPL (i, name, strncasecmp_l,
661 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strncasecmp,
662 				     (CPU_FEATURE_USABLE (AVX512VL)
663 				      && CPU_FEATURE_USABLE (AVX512BW)),
664 				     __strncasecmp_l_evex)
665 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncasecmp,
666 				     CPU_FEATURE_USABLE (AVX2),
667 				     __strncasecmp_l_avx2)
668 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncasecmp,
669 				     (CPU_FEATURE_USABLE (AVX2)
670 				      && CPU_FEATURE_USABLE (RTM)),
671 				     __strncasecmp_l_avx2_rtm)
672 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncasecmp_l,
673 				     CPU_FEATURE_USABLE (SSE4_2),
674 				     __strncasecmp_l_sse42)
675 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
676 	         implementation is also used at ISA level 2.  */
677 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncasecmp_l,
678 				     1,
679 				     __strncasecmp_l_sse2))
680 
681   /* Support sysdeps/x86_64/multiarch/strncat.c.  */
682   IFUNC_IMPL (i, name, strncat,
683 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strncat,
684 				     (CPU_FEATURE_USABLE (AVX512VL)
685 				      && CPU_FEATURE_USABLE (AVX512BW)),
686 				     __strncat_evex)
687 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncat,
688 				     CPU_FEATURE_USABLE (AVX2),
689 				     __strncat_avx2)
690 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncat,
691 				     (CPU_FEATURE_USABLE (AVX2)
692 				      && CPU_FEATURE_USABLE (RTM)),
693 				     __strncat_avx2_rtm)
694 	      /* ISA V2 wrapper for sse2_unaligned implementation because
695 	         the sse2_unaligned implementation is also used at ISA
696 	         level 2.  */
697 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncat,
698 				     1,
699 				     __strncat_sse2_unaligned))
700 
701   /* Support sysdeps/x86_64/multiarch/strncpy.c.  */
702   IFUNC_IMPL (i, name, strncpy,
703 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strncpy,
704 				     (CPU_FEATURE_USABLE (AVX512VL)
705 				      && CPU_FEATURE_USABLE (AVX512BW)),
706 				     __strncpy_evex)
707 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncpy,
708 				     CPU_FEATURE_USABLE (AVX2),
709 				     __strncpy_avx2)
710 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncpy,
711 				     (CPU_FEATURE_USABLE (AVX2)
712 				      && CPU_FEATURE_USABLE (RTM)),
713 				     __strncpy_avx2_rtm)
714 	      /* ISA V2 wrapper for sse2_unaligned implementation because
715 	         the sse2_unaligned implementation is also used at ISA
716 	         level 2.  */
717 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncpy,
718 				     1,
719 				     __strncpy_sse2_unaligned))
720 
721   /* Support sysdeps/x86_64/multiarch/strpbrk.c.  */
722   IFUNC_IMPL (i, name, strpbrk,
723 	      /* All implementations of strpbrk are built at all ISA
724 	         levels.  */
725 	      IFUNC_IMPL_ADD (array, i, strpbrk, CPU_FEATURE_USABLE (SSE4_2),
726 			      __strpbrk_sse42)
727 	      IFUNC_IMPL_ADD (array, i, strpbrk, 1, __strpbrk_generic))
728 
729 
730   /* Support sysdeps/x86_64/multiarch/strspn.c.  */
731   IFUNC_IMPL (i, name, strspn,
732 	      /* All implementations of strspn are built at all ISA
733 	         levels.  */
734 	      IFUNC_IMPL_ADD (array, i, strspn, CPU_FEATURE_USABLE (SSE4_2),
735 			      __strspn_sse42)
736 	      IFUNC_IMPL_ADD (array, i, strspn, 1, __strspn_generic))
737 
738   /* Support sysdeps/x86_64/multiarch/strstr.c.  */
739   IFUNC_IMPL (i, name, strstr,
740               IFUNC_IMPL_ADD (array, i, strstr,
741                               (CPU_FEATURE_USABLE (AVX512VL)
742                                && CPU_FEATURE_USABLE (AVX512BW)
743                                && CPU_FEATURE_USABLE (AVX512DQ)
744                                && CPU_FEATURE_USABLE (BMI2)),
745                               __strstr_avx512)
746 	      IFUNC_IMPL_ADD (array, i, strstr, 1, __strstr_sse2_unaligned)
747 	      IFUNC_IMPL_ADD (array, i, strstr, 1, __strstr_generic))
748 
749   /* Support sysdeps/x86_64/multiarch/wcschr.c.  */
750   IFUNC_IMPL (i, name, wcschr,
751 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcschr,
752 				     (CPU_FEATURE_USABLE (AVX512VL)
753 				      && CPU_FEATURE_USABLE (AVX512BW)
754 				      && CPU_FEATURE_USABLE (BMI2)),
755 				     __wcschr_evex)
756 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcschr,
757 				     (CPU_FEATURE_USABLE (AVX2)
758 				      && CPU_FEATURE_USABLE (BMI2)),
759 				     __wcschr_avx2)
760 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcschr,
761 				     (CPU_FEATURE_USABLE (AVX2)
762 				      && CPU_FEATURE_USABLE (BMI2)
763 				      && CPU_FEATURE_USABLE (RTM)),
764 				     __wcschr_avx2_rtm)
765 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
766 	         implementation is also used at ISA level 2.  */
767 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wcschr,
768 				     1,
769 				     __wcschr_sse2))
770 
771   /* Support sysdeps/x86_64/multiarch/wcsrchr.c.  */
772   IFUNC_IMPL (i, name, wcsrchr,
773 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcsrchr,
774 				     (CPU_FEATURE_USABLE (AVX512VL)
775 				      && CPU_FEATURE_USABLE (AVX512BW)
776 				      && CPU_FEATURE_USABLE (BMI2)),
777 				     __wcsrchr_evex)
778 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcsrchr,
779 				     CPU_FEATURE_USABLE (AVX2),
780 				     __wcsrchr_avx2)
781 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcsrchr,
782 				     (CPU_FEATURE_USABLE (AVX2)
783 				      && CPU_FEATURE_USABLE (RTM)),
784 				     __wcsrchr_avx2_rtm)
785 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
786 	         implementation is also used at ISA level 2.  */
787 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wcsrchr,
788 				     1,
789 				     __wcsrchr_sse2))
790 
791   /* Support sysdeps/x86_64/multiarch/wcscmp.c.  */
792   IFUNC_IMPL (i, name, wcscmp,
793 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcscmp,
794 				     (CPU_FEATURE_USABLE (AVX512VL)
795 				      && CPU_FEATURE_USABLE (AVX512BW)
796 				      && CPU_FEATURE_USABLE (BMI2)),
797 				     __wcscmp_evex)
798 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcscmp,
799 				     CPU_FEATURE_USABLE (AVX2),
800 				     __wcscmp_avx2)
801 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcscmp,
802 				     (CPU_FEATURE_USABLE (AVX2)
803 				      && CPU_FEATURE_USABLE (RTM)),
804 				     __wcscmp_avx2_rtm)
805 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
806 	         implementation is also used at ISA level 2.  */
807 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wcscmp,
808 				     1,
809 				     __wcscmp_sse2))
810 
811   /* Support sysdeps/x86_64/multiarch/wcsncmp.c.  */
812   IFUNC_IMPL (i, name, wcsncmp,
813 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcsncmp,
814 				     (CPU_FEATURE_USABLE (AVX512VL)
815 				      && CPU_FEATURE_USABLE (AVX512BW)
816 				      && CPU_FEATURE_USABLE (BMI2)),
817 				     __wcsncmp_evex)
818 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcsncmp,
819 				     CPU_FEATURE_USABLE (AVX2),
820 				     __wcsncmp_avx2)
821 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcsncmp,
822 				     (CPU_FEATURE_USABLE (AVX2)
823 				      && CPU_FEATURE_USABLE (RTM)),
824 				     __wcsncmp_avx2_rtm)
825 	      /* ISA V2 wrapper for GENERIC implementation because the
826 	         GENERIC implementation is also used at ISA level 2.  */
827 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wcsncmp,
828 				     1,
829 				     __wcsncmp_generic))
830 
831   /* Support sysdeps/x86_64/multiarch/wcscpy.c.  */
832   IFUNC_IMPL (i, name, wcscpy,
833 	      /* ISA V4 wrapper for SSSE3 implementation because
834 	         the SSSE3 implementation is also used at ISA
835 	         level 3/4.  */
836 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcscpy,
837 				     CPU_FEATURE_USABLE (SSSE3),
838 				     __wcscpy_ssse3)
839 	      X86_IFUNC_IMPL_ADD_V1 (array, i, wcscpy,
840 				     1,
841 				     __wcscpy_generic))
842 
843   /* Support sysdeps/x86_64/multiarch/wcslen.c.  */
844   IFUNC_IMPL (i, name, wcslen,
845 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcslen,
846 				     (CPU_FEATURE_USABLE (AVX512VL)
847 				      && CPU_FEATURE_USABLE (AVX512BW)
848 				      && CPU_FEATURE_USABLE (BMI2)),
849 				     __wcslen_evex)
850 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcslen,
851 				     (CPU_FEATURE_USABLE (AVX512VL)
852 				      && CPU_FEATURE_USABLE (AVX512BW)
853 				      && CPU_FEATURE_USABLE (BMI2)),
854 				     __wcslen_evex512)
855 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcslen,
856 				     (CPU_FEATURE_USABLE (AVX2)
857 				      && CPU_FEATURE_USABLE (BMI2)),
858 				     __wcslen_avx2)
859 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcslen,
860 				     (CPU_FEATURE_USABLE (AVX2)
861 				      && CPU_FEATURE_USABLE (BMI2)
862 				      && CPU_FEATURE_USABLE (RTM)),
863 				     __wcslen_avx2_rtm)
864 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wcslen,
865 				     CPU_FEATURE_USABLE (SSE4_1),
866 				     __wcslen_sse4_1)
867 	      X86_IFUNC_IMPL_ADD_V1 (array, i, wcslen,
868 				     1,
869 				     __wcslen_sse2))
870 
871   /* Support sysdeps/x86_64/multiarch/wcsnlen.c.  */
872   IFUNC_IMPL (i, name, wcsnlen,
873 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcsnlen,
874 				     (CPU_FEATURE_USABLE (AVX512VL)
875 				      && CPU_FEATURE_USABLE (AVX512BW)
876 				      && CPU_FEATURE_USABLE (BMI2)),
877 				     __wcsnlen_evex)
878 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wcsnlen,
879 				     (CPU_FEATURE_USABLE (AVX512VL)
880 				      && CPU_FEATURE_USABLE (AVX512BW)
881 				      && CPU_FEATURE_USABLE (BMI2)),
882 				     __wcsnlen_evex512)
883 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcsnlen,
884 				     (CPU_FEATURE_USABLE (AVX2)
885 				      && CPU_FEATURE_USABLE (BMI2)),
886 				     __wcsnlen_avx2)
887 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wcsnlen,
888 				     (CPU_FEATURE_USABLE (AVX2)
889 				      && CPU_FEATURE_USABLE (BMI2)
890 				      && CPU_FEATURE_USABLE (RTM)),
891 				     __wcsnlen_avx2_rtm)
892 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wcsnlen,
893 				     CPU_FEATURE_USABLE (SSE4_1),
894 				     __wcsnlen_sse4_1)
895 	      X86_IFUNC_IMPL_ADD_V1 (array, i, wcsnlen,
896 				     1,
897 				     __wcsnlen_generic))
898 
899   /* Support sysdeps/x86_64/multiarch/wmemchr.c.  */
900   IFUNC_IMPL (i, name, wmemchr,
901 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wmemchr,
902 				     (CPU_FEATURE_USABLE (AVX512VL)
903 				      && CPU_FEATURE_USABLE (AVX512BW)
904 				      && CPU_FEATURE_USABLE (BMI2)),
905 				     __wmemchr_evex)
906 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wmemchr,
907 				     (CPU_FEATURE_USABLE (AVX512VL)
908 				      && CPU_FEATURE_USABLE (AVX512BW)
909 				      && CPU_FEATURE_USABLE (BMI2)),
910 				     __wmemchr_evex_rtm)
911 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wmemchr,
912 				     CPU_FEATURE_USABLE (AVX2),
913 				     __wmemchr_avx2)
914 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wmemchr,
915 				     (CPU_FEATURE_USABLE (AVX2)
916 				      && CPU_FEATURE_USABLE (RTM)),
917 				     __wmemchr_avx2_rtm)
918 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
919 	         implementation is also used at ISA level 2.  */
920 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wmemchr,
921 				     1,
922 				     __wmemchr_sse2))
923 
924   /* Support sysdeps/x86_64/multiarch/wmemcmp.c.  */
925   IFUNC_IMPL (i, name, wmemcmp,
926 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wmemcmp,
927 				     (CPU_FEATURE_USABLE (AVX512VL)
928 				      && CPU_FEATURE_USABLE (AVX512BW)
929 				      && CPU_FEATURE_USABLE (BMI2)
930 				      && CPU_FEATURE_USABLE (MOVBE)),
931 				     __wmemcmp_evex_movbe)
932 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wmemcmp,
933 				     (CPU_FEATURE_USABLE (AVX2)
934 				      && CPU_FEATURE_USABLE (BMI2)
935 				      && CPU_FEATURE_USABLE (MOVBE)),
936 				     __wmemcmp_avx2_movbe)
937 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wmemcmp,
938 				     (CPU_FEATURE_USABLE (AVX2)
939 				      && CPU_FEATURE_USABLE (BMI2)
940 				      && CPU_FEATURE_USABLE (MOVBE)
941 				      && CPU_FEATURE_USABLE (RTM)),
942 				     __wmemcmp_avx2_movbe_rtm)
943 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
944 	         implementation is also used at ISA level 2.  */
945 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wmemcmp,
946 				     1,
947 				     __wmemcmp_sse2))
948 
949   /* Support sysdeps/x86_64/multiarch/wmemset.c.  */
950   IFUNC_IMPL (i, name, wmemset,
951 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wmemset,
952 				     (CPU_FEATURE_USABLE (AVX512VL)
953 				      && CPU_FEATURE_USABLE (AVX512BW)
954 				      && CPU_FEATURE_USABLE (BMI2)),
955 				     __wmemset_evex_unaligned)
956 	      X86_IFUNC_IMPL_ADD_V4 (array, i, wmemset,
957 				     (CPU_FEATURE_USABLE (AVX512VL)
958 				      && CPU_FEATURE_USABLE (AVX512BW)
959 				      && CPU_FEATURE_USABLE (BMI2)),
960 				     __wmemset_avx512_unaligned)
961 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wmemset,
962 				     CPU_FEATURE_USABLE (AVX2),
963 				     __wmemset_avx2_unaligned)
964 	      X86_IFUNC_IMPL_ADD_V3 (array, i, wmemset,
965 				     (CPU_FEATURE_USABLE (AVX2)
966 				      && CPU_FEATURE_USABLE (RTM)),
967 				     __wmemset_avx2_unaligned_rtm)
968 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
969 	         implementation is also used at ISA level 2.  */
970 	      X86_IFUNC_IMPL_ADD_V2 (array, i, wmemset, 1,
971 				     __wmemset_sse2_unaligned))
972 
973 #ifdef SHARED
974   /* Support sysdeps/x86_64/multiarch/memcpy_chk.c.  */
975   IFUNC_IMPL (i, name, __memcpy_chk,
976 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk, 1,
977 			      __memcpy_chk_erms)
978 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memcpy_chk,
979 				     CPU_FEATURE_USABLE (AVX512F),
980 				     __memcpy_chk_avx512_no_vzeroupper)
981 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memcpy_chk,
982 				     CPU_FEATURE_USABLE (AVX512VL),
983 				     __memcpy_chk_avx512_unaligned)
984 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memcpy_chk,
985 				     CPU_FEATURE_USABLE (AVX512VL),
986 				     __memcpy_chk_avx512_unaligned_erms)
987 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memcpy_chk,
988 				     CPU_FEATURE_USABLE (AVX512VL),
989 				     __memcpy_chk_evex_unaligned)
990 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __memcpy_chk,
991 				     CPU_FEATURE_USABLE (AVX512VL),
992 				     __memcpy_chk_evex_unaligned_erms)
993 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memcpy_chk,
994 				     CPU_FEATURE_USABLE (AVX),
995 				     __memcpy_chk_avx_unaligned)
996 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memcpy_chk,
997 				     CPU_FEATURE_USABLE (AVX),
998 				     __memcpy_chk_avx_unaligned_erms)
999 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memcpy_chk,
1000 				     (CPU_FEATURE_USABLE (AVX)
1001 				      && CPU_FEATURE_USABLE (RTM)),
1002 				     __memcpy_chk_avx_unaligned_rtm)
1003 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __memcpy_chk,
1004 				     (CPU_FEATURE_USABLE (AVX)
1005 				      && CPU_FEATURE_USABLE (RTM)),
1006 				     __memcpy_chk_avx_unaligned_erms_rtm)
1007 	      /* By V3 we assume fast aligned copy.  */
1008 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memcpy_chk,
1009 				     CPU_FEATURE_USABLE (SSSE3),
1010 				     __memcpy_chk_ssse3)
1011 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
1012 	         implementation is also used at ISA level 2 (SSSE3 is too
1013 	         optimized around aligned copy to be better as general
1014 	         purpose memmove).  */
1015 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memcpy_chk, 1,
1016 				     __memcpy_chk_sse2_unaligned)
1017 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __memcpy_chk, 1,
1018 				     __memcpy_chk_sse2_unaligned_erms))
1019 #endif
1020 
1021   /* Support sysdeps/x86_64/multiarch/memcpy.c.  */
1022   IFUNC_IMPL (i, name, memcpy,
1023 	      IFUNC_IMPL_ADD (array, i, memcpy, 1,
1024 			      __memcpy_erms)
1025 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memcpy,
1026 				     CPU_FEATURE_USABLE (AVX512F),
1027 				     __memcpy_avx512_no_vzeroupper)
1028 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memcpy,
1029 				     CPU_FEATURE_USABLE (AVX512VL),
1030 				     __memcpy_avx512_unaligned)
1031 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memcpy,
1032 				     CPU_FEATURE_USABLE (AVX512VL),
1033 				     __memcpy_avx512_unaligned_erms)
1034 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memcpy,
1035 				     CPU_FEATURE_USABLE (AVX512VL),
1036 				     __memcpy_evex_unaligned)
1037 	      X86_IFUNC_IMPL_ADD_V4 (array, i, memcpy,
1038 				     CPU_FEATURE_USABLE (AVX512VL),
1039 				     __memcpy_evex_unaligned_erms)
1040 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memcpy,
1041 				     CPU_FEATURE_USABLE (AVX),
1042 				     __memcpy_avx_unaligned)
1043 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memcpy,
1044 				     CPU_FEATURE_USABLE (AVX),
1045 				     __memcpy_avx_unaligned_erms)
1046 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memcpy,
1047 				     (CPU_FEATURE_USABLE (AVX)
1048 				      && CPU_FEATURE_USABLE (RTM)),
1049 				     __memcpy_avx_unaligned_rtm)
1050 	      X86_IFUNC_IMPL_ADD_V3 (array, i, memcpy,
1051 				     (CPU_FEATURE_USABLE (AVX)
1052 				      && CPU_FEATURE_USABLE (RTM)),
1053 				     __memcpy_avx_unaligned_erms_rtm)
1054 	      /* By V3 we assume fast aligned copy.  */
1055 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memcpy,
1056 				     CPU_FEATURE_USABLE (SSSE3),
1057 				     __memcpy_ssse3)
1058 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
1059 	         implementation is also used at ISA level 2 (SSSE3 is too
1060 	         optimized around aligned copy to be better as general
1061 	         purpose memmove).  */
1062 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memcpy, 1,
1063 				     __memcpy_sse2_unaligned)
1064 	      X86_IFUNC_IMPL_ADD_V2 (array, i, memcpy, 1,
1065 				     __memcpy_sse2_unaligned_erms))
1066 
1067 #ifdef SHARED
1068   /* Support sysdeps/x86_64/multiarch/mempcpy_chk.c.  */
1069   IFUNC_IMPL (i, name, __mempcpy_chk,
1070 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk, 1,
1071 			      __mempcpy_chk_erms)
1072 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __mempcpy_chk,
1073 				     CPU_FEATURE_USABLE (AVX512F),
1074 				     __mempcpy_chk_avx512_no_vzeroupper)
1075 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __mempcpy_chk,
1076 				     CPU_FEATURE_USABLE (AVX512VL),
1077 				     __mempcpy_chk_avx512_unaligned)
1078 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __mempcpy_chk,
1079 				     CPU_FEATURE_USABLE (AVX512VL),
1080 				     __mempcpy_chk_avx512_unaligned_erms)
1081 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __mempcpy_chk,
1082 				     CPU_FEATURE_USABLE (AVX512VL),
1083 				     __mempcpy_chk_evex_unaligned)
1084 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __mempcpy_chk,
1085 				     CPU_FEATURE_USABLE (AVX512VL),
1086 				     __mempcpy_chk_evex_unaligned_erms)
1087 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __mempcpy_chk,
1088 				     CPU_FEATURE_USABLE (AVX),
1089 				     __mempcpy_chk_avx_unaligned)
1090 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __mempcpy_chk,
1091 				     CPU_FEATURE_USABLE (AVX),
1092 				     __mempcpy_chk_avx_unaligned_erms)
1093 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __mempcpy_chk,
1094 				     (CPU_FEATURE_USABLE (AVX)
1095 				      && CPU_FEATURE_USABLE (RTM)),
1096 				     __mempcpy_chk_avx_unaligned_rtm)
1097 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __mempcpy_chk,
1098 				     (CPU_FEATURE_USABLE (AVX)
1099 				      && CPU_FEATURE_USABLE (RTM)),
1100 				     __mempcpy_chk_avx_unaligned_erms_rtm)
1101 	      /* By V3 we assume fast aligned copy.  */
1102 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __mempcpy_chk,
1103 				     CPU_FEATURE_USABLE (SSSE3),
1104 				     __mempcpy_chk_ssse3)
1105 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
1106 	         implementation is also used at ISA level 2 (SSSE3 is too
1107 	         optimized around aligned copy to be better as general
1108 	         purpose memmove).  */
1109 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __mempcpy_chk, 1,
1110 				     __mempcpy_chk_sse2_unaligned)
1111 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __mempcpy_chk, 1,
1112 				     __mempcpy_chk_sse2_unaligned_erms))
1113 #endif
1114 
1115   /* Support sysdeps/x86_64/multiarch/mempcpy.c.  */
1116   IFUNC_IMPL (i, name, mempcpy,
1117 	      IFUNC_IMPL_ADD (array, i, mempcpy, 1,
1118 			      __mempcpy_erms)
1119 	      X86_IFUNC_IMPL_ADD_V4 (array, i, mempcpy,
1120 				     CPU_FEATURE_USABLE (AVX512F),
1121 				     __mempcpy_avx512_no_vzeroupper)
1122 	      X86_IFUNC_IMPL_ADD_V4 (array, i, mempcpy,
1123 				     CPU_FEATURE_USABLE (AVX512VL),
1124 				     __mempcpy_avx512_unaligned)
1125 	      X86_IFUNC_IMPL_ADD_V4 (array, i, mempcpy,
1126 				     CPU_FEATURE_USABLE (AVX512VL),
1127 				     __mempcpy_avx512_unaligned_erms)
1128 	      X86_IFUNC_IMPL_ADD_V4 (array, i, mempcpy,
1129 				     CPU_FEATURE_USABLE (AVX512VL),
1130 				     __mempcpy_evex_unaligned)
1131 	      X86_IFUNC_IMPL_ADD_V4 (array, i, mempcpy,
1132 				     CPU_FEATURE_USABLE (AVX512VL),
1133 				     __mempcpy_evex_unaligned_erms)
1134 	      X86_IFUNC_IMPL_ADD_V3 (array, i, mempcpy,
1135 				     CPU_FEATURE_USABLE (AVX),
1136 				     __mempcpy_avx_unaligned)
1137 	      X86_IFUNC_IMPL_ADD_V3 (array, i, mempcpy,
1138 				     CPU_FEATURE_USABLE (AVX),
1139 				     __mempcpy_avx_unaligned_erms)
1140 	      X86_IFUNC_IMPL_ADD_V3 (array, i, mempcpy,
1141 				     (CPU_FEATURE_USABLE (AVX)
1142 				      && CPU_FEATURE_USABLE (RTM)),
1143 				     __mempcpy_avx_unaligned_rtm)
1144 	      X86_IFUNC_IMPL_ADD_V3 (array, i, mempcpy,
1145 				     (CPU_FEATURE_USABLE (AVX)
1146 				      && CPU_FEATURE_USABLE (RTM)),
1147 				     __mempcpy_avx_unaligned_erms_rtm)
1148 	      /* By V3 we assume fast aligned copy.  */
1149 	      X86_IFUNC_IMPL_ADD_V2 (array, i, mempcpy,
1150 				     CPU_FEATURE_USABLE (SSSE3),
1151 				     __mempcpy_ssse3)
1152 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
1153 	         implementation is also used at ISA level 2 (SSSE3 is too
1154 	         optimized around aligned copy to be better as general
1155 	         purpose memmove).  */
1156 	      X86_IFUNC_IMPL_ADD_V2 (array, i, mempcpy, 1,
1157 				     __mempcpy_sse2_unaligned)
1158 	      X86_IFUNC_IMPL_ADD_V2 (array, i, mempcpy, 1,
1159 				     __mempcpy_sse2_unaligned_erms))
1160 
1161   /* Support sysdeps/x86_64/multiarch/strncmp.c.  */
1162   IFUNC_IMPL (i, name, strncmp,
1163 	      X86_IFUNC_IMPL_ADD_V4 (array, i, strncmp,
1164 				     (CPU_FEATURE_USABLE (AVX512VL)
1165 				      && CPU_FEATURE_USABLE (AVX512BW)),
1166 				     __strncmp_evex)
1167 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncmp,
1168 				     CPU_FEATURE_USABLE (AVX2),
1169 				     __strncmp_avx2)
1170 	      X86_IFUNC_IMPL_ADD_V3 (array, i, strncmp,
1171 				     (CPU_FEATURE_USABLE (AVX2)
1172 				      && CPU_FEATURE_USABLE (RTM)),
1173 				     __strncmp_avx2_rtm)
1174 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncmp,
1175 				     CPU_FEATURE_USABLE (SSE4_2),
1176 				     __strncmp_sse42)
1177 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
1178 	         implementation is also used at ISA level 2.  */
1179 	      X86_IFUNC_IMPL_ADD_V2 (array, i, strncmp,
1180 				     1,
1181 				     __strncmp_sse2))
1182 
1183 #ifdef SHARED
1184   /* Support sysdeps/x86_64/multiarch/wmemset_chk.c.  */
1185   IFUNC_IMPL (i, name, __wmemset_chk,
1186 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __wmemset_chk,
1187 				     (CPU_FEATURE_USABLE (AVX512VL)
1188 				      && CPU_FEATURE_USABLE (AVX512BW)
1189 				      && CPU_FEATURE_USABLE (BMI2)),
1190 				     __wmemset_chk_evex_unaligned)
1191 	      X86_IFUNC_IMPL_ADD_V4 (array, i, __wmemset_chk,
1192 				     (CPU_FEATURE_USABLE (AVX512VL)
1193 				      && CPU_FEATURE_USABLE (AVX512BW)
1194 				      && CPU_FEATURE_USABLE (BMI2)),
1195 				     __wmemset_chk_avx512_unaligned)
1196 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __wmemset_chk,
1197 				     CPU_FEATURE_USABLE (AVX2),
1198 				     __wmemset_chk_avx2_unaligned)
1199 	      X86_IFUNC_IMPL_ADD_V3 (array, i, __wmemset_chk,
1200 				     (CPU_FEATURE_USABLE (AVX2)
1201 				      && CPU_FEATURE_USABLE (RTM)),
1202 				     __wmemset_chk_avx2_unaligned_rtm)
1203 	      /* ISA V2 wrapper for SSE2 implementation because the SSE2
1204 	         implementation is also used at ISA level 2.  */
1205 	      X86_IFUNC_IMPL_ADD_V2 (array, i, __wmemset_chk, 1,
1206 				     __wmemset_chk_sse2_unaligned))
1207 #endif
1208 
1209   return 0;
1210 }
1211