1 /* Enumerate available IFUNC implementations of a function. x86-64 version.
2 Copyright (C) 2012-2023 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <https://www.gnu.org/licenses/>. */
22 #include <ifunc-impl-list.h>
24 #include "init-arch.h"
26 /* Fill ARRAY of MAX elements with IFUNC implementations for function
27 NAME supported on target machine and return the number of valid
28 entries. Each set of implementations for a given function is sorted in
29 descending order by ISA level. */
32 __libc_ifunc_impl_list (const char *name
, struct libc_ifunc_impl
*array
,
37 /* Support sysdeps/x86_64/multiarch/memcmpeq.c. */
38 IFUNC_IMPL (i
, name
, __memcmpeq
,
39 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcmpeq
,
40 (CPU_FEATURE_USABLE (AVX512VL
)
41 && CPU_FEATURE_USABLE (AVX512BW
)
42 && CPU_FEATURE_USABLE (BMI2
)),
44 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcmpeq
,
45 (CPU_FEATURE_USABLE (AVX2
)
46 && CPU_FEATURE_USABLE (BMI2
)),
48 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcmpeq
,
49 (CPU_FEATURE_USABLE (AVX2
)
50 && CPU_FEATURE_USABLE (BMI2
)
51 && CPU_FEATURE_USABLE (RTM
)),
53 /* ISA V2 wrapper for SSE2 implementation because the SSE2
54 implementation is also used at ISA level 2. */
55 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcmpeq
,
59 /* Support sysdeps/x86_64/multiarch/memchr.c. */
60 IFUNC_IMPL (i
, name
, memchr
,
61 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memchr
,
62 (CPU_FEATURE_USABLE (AVX512VL
)
63 && CPU_FEATURE_USABLE (AVX512BW
)
64 && CPU_FEATURE_USABLE (BMI2
)),
66 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memchr
,
67 (CPU_FEATURE_USABLE (AVX512VL
)
68 && CPU_FEATURE_USABLE (AVX512BW
)
69 && CPU_FEATURE_USABLE (BMI2
)),
71 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memchr
,
72 (CPU_FEATURE_USABLE (AVX512VL
)
73 && CPU_FEATURE_USABLE (AVX512BW
)
74 && CPU_FEATURE_USABLE (BMI2
)),
76 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memchr
,
77 (CPU_FEATURE_USABLE (AVX2
)
78 && CPU_FEATURE_USABLE (BMI2
)),
80 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memchr
,
81 (CPU_FEATURE_USABLE (AVX2
)
82 && CPU_FEATURE_USABLE (BMI2
)
83 && CPU_FEATURE_USABLE (RTM
)),
85 /* ISA V2 wrapper for SSE2 implementation because the SSE2
86 implementation is also used at ISA level 2. */
87 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memchr
,
91 /* Support sysdeps/x86_64/multiarch/memcmp.c. */
92 IFUNC_IMPL (i
, name
, memcmp
,
93 /* NB: If any of these names change or if any new
94 implementations are added be sure to update
95 sysdeps/x86_64/memcmp-isa-default-impl.h. */
96 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcmp
,
97 (CPU_FEATURE_USABLE (AVX512VL
)
98 && CPU_FEATURE_USABLE (AVX512BW
)
99 && CPU_FEATURE_USABLE (BMI2
)
100 && CPU_FEATURE_USABLE (MOVBE
)),
102 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcmp
,
103 (CPU_FEATURE_USABLE (AVX2
)
104 && CPU_FEATURE_USABLE (BMI2
)
105 && CPU_FEATURE_USABLE (MOVBE
)),
107 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcmp
,
108 (CPU_FEATURE_USABLE (AVX2
)
109 && CPU_FEATURE_USABLE (BMI2
)
110 && CPU_FEATURE_USABLE (MOVBE
)
111 && CPU_FEATURE_USABLE (RTM
)),
112 __memcmp_avx2_movbe_rtm
)
113 /* ISA V2 wrapper for SSE2 implementation because the SSE2
114 implementation is also used at ISA level 2. */
115 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcmp
,
120 /* Support sysdeps/x86_64/multiarch/memmove_chk.c. */
121 IFUNC_IMPL (i
, name
, __memmove_chk
,
122 IFUNC_IMPL_ADD (array
, i
, __memmove_chk
, 1,
124 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
125 CPU_FEATURE_USABLE (AVX512F
),
126 __memmove_chk_avx512_no_vzeroupper
)
127 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
128 CPU_FEATURE_USABLE (AVX512VL
),
129 __memmove_chk_avx512_unaligned
)
130 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
131 CPU_FEATURE_USABLE (AVX512VL
),
132 __memmove_chk_avx512_unaligned_erms
)
133 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
134 CPU_FEATURE_USABLE (AVX512VL
),
135 __memmove_chk_evex_unaligned
)
136 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
137 CPU_FEATURE_USABLE (AVX512VL
),
138 __memmove_chk_evex_unaligned_erms
)
139 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
140 CPU_FEATURE_USABLE (AVX
),
141 __memmove_chk_avx_unaligned
)
142 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
143 CPU_FEATURE_USABLE (AVX
),
144 __memmove_chk_avx_unaligned_erms
)
145 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
146 (CPU_FEATURE_USABLE (AVX
)
147 && CPU_FEATURE_USABLE (RTM
)),
148 __memmove_chk_avx_unaligned_rtm
)
149 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
150 (CPU_FEATURE_USABLE (AVX
)
151 && CPU_FEATURE_USABLE (RTM
)),
152 __memmove_chk_avx_unaligned_erms_rtm
)
153 /* By V3 we assume fast aligned copy. */
154 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memmove_chk
,
155 CPU_FEATURE_USABLE (SSSE3
),
157 /* ISA V2 wrapper for SSE2 implementation because the SSE2
158 implementation is also used at ISA level 2 (SSSE3 is too
159 optimized around aligned copy to be better as general
161 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memmove_chk
, 1,
162 __memmove_chk_sse2_unaligned
)
163 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memmove_chk
, 1,
164 __memmove_chk_sse2_unaligned_erms
))
167 /* Support sysdeps/x86_64/multiarch/memmove.c. */
168 IFUNC_IMPL (i
, name
, memmove
,
169 IFUNC_IMPL_ADD (array
, i
, memmove
, 1,
171 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
172 CPU_FEATURE_USABLE (AVX512F
),
173 __memmove_avx512_no_vzeroupper
)
174 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
175 CPU_FEATURE_USABLE (AVX512VL
),
176 __memmove_avx512_unaligned
)
177 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
178 CPU_FEATURE_USABLE (AVX512VL
),
179 __memmove_avx512_unaligned_erms
)
180 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
181 CPU_FEATURE_USABLE (AVX512VL
),
182 __memmove_evex_unaligned
)
183 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
184 CPU_FEATURE_USABLE (AVX512VL
),
185 __memmove_evex_unaligned_erms
)
186 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
187 CPU_FEATURE_USABLE (AVX
),
188 __memmove_avx_unaligned
)
189 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
190 CPU_FEATURE_USABLE (AVX
),
191 __memmove_avx_unaligned_erms
)
192 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
193 (CPU_FEATURE_USABLE (AVX
)
194 && CPU_FEATURE_USABLE (RTM
)),
195 __memmove_avx_unaligned_rtm
)
196 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
197 (CPU_FEATURE_USABLE (AVX
)
198 && CPU_FEATURE_USABLE (RTM
)),
199 __memmove_avx_unaligned_erms_rtm
)
200 /* By V3 we assume fast aligned copy. */
201 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memmove
,
202 CPU_FEATURE_USABLE (SSSE3
),
204 /* ISA V2 wrapper for SSE2 implementation because the SSE2
205 implementation is also used at ISA level 2 (SSSE3 is too
206 optimized around aligned copy to be better as general
208 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memmove
, 1,
209 __memmove_sse2_unaligned
)
210 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memmove
, 1,
211 __memmove_sse2_unaligned_erms
))
213 /* Support sysdeps/x86_64/multiarch/memrchr.c. */
214 IFUNC_IMPL (i
, name
, memrchr
,
215 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memrchr
,
216 (CPU_FEATURE_USABLE (AVX512VL
)
217 && CPU_FEATURE_USABLE (AVX512BW
)
218 && CPU_FEATURE_USABLE (BMI2
)
219 && CPU_FEATURE_USABLE (LZCNT
)),
221 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memrchr
,
222 (CPU_FEATURE_USABLE (AVX2
)
223 && CPU_FEATURE_USABLE (BMI2
)
224 && CPU_FEATURE_USABLE (LZCNT
)),
226 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memrchr
,
227 (CPU_FEATURE_USABLE (AVX2
)
228 && CPU_FEATURE_USABLE (BMI2
)
229 && CPU_FEATURE_USABLE (LZCNT
)
230 && CPU_FEATURE_USABLE (RTM
)),
232 /* ISA V2 wrapper for SSE2 implementation because the SSE2
233 implementation is also used at ISA level 2. */
234 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memrchr
,
239 /* Support sysdeps/x86_64/multiarch/memset_chk.c. */
240 IFUNC_IMPL (i
, name
, __memset_chk
,
241 IFUNC_IMPL_ADD (array
, i
, __memset_chk
, 1,
243 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
244 (CPU_FEATURE_USABLE (AVX512VL
)
245 && CPU_FEATURE_USABLE (AVX512BW
)
246 && CPU_FEATURE_USABLE (BMI2
)),
247 __memset_chk_avx512_unaligned_erms
)
248 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
249 (CPU_FEATURE_USABLE (AVX512VL
)
250 && CPU_FEATURE_USABLE (AVX512BW
)
251 && CPU_FEATURE_USABLE (BMI2
)),
252 __memset_chk_avx512_unaligned
)
253 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
254 CPU_FEATURE_USABLE (AVX512F
),
255 __memset_chk_avx512_no_vzeroupper
)
256 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
257 (CPU_FEATURE_USABLE (AVX512VL
)
258 && CPU_FEATURE_USABLE (AVX512BW
)
259 && CPU_FEATURE_USABLE (BMI2
)),
260 __memset_chk_evex_unaligned
)
261 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
262 (CPU_FEATURE_USABLE (AVX512VL
)
263 && CPU_FEATURE_USABLE (AVX512BW
)
264 && CPU_FEATURE_USABLE (BMI2
)),
265 __memset_chk_evex_unaligned_erms
)
266 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
267 CPU_FEATURE_USABLE (AVX2
),
268 __memset_chk_avx2_unaligned
)
269 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
270 CPU_FEATURE_USABLE (AVX2
),
271 __memset_chk_avx2_unaligned_erms
)
272 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
273 (CPU_FEATURE_USABLE (AVX2
)
274 && CPU_FEATURE_USABLE (RTM
)),
275 __memset_chk_avx2_unaligned_rtm
)
276 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
277 (CPU_FEATURE_USABLE (AVX2
)
278 && CPU_FEATURE_USABLE (RTM
)),
279 __memset_chk_avx2_unaligned_erms_rtm
)
280 /* ISA V2 wrapper for SSE2 implementation because the SSE2
281 implementation is also used at ISA level 2. */
282 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memset_chk
, 1,
283 __memset_chk_sse2_unaligned
)
284 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memset_chk
, 1,
285 __memset_chk_sse2_unaligned_erms
)
289 /* Support sysdeps/x86_64/multiarch/memset.c. */
290 IFUNC_IMPL (i
, name
, memset
,
291 IFUNC_IMPL_ADD (array
, i
, memset
, 1,
293 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
294 (CPU_FEATURE_USABLE (AVX512VL
)
295 && CPU_FEATURE_USABLE (AVX512BW
)
296 && CPU_FEATURE_USABLE (BMI2
)),
297 __memset_avx512_unaligned_erms
)
298 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
299 (CPU_FEATURE_USABLE (AVX512VL
)
300 && CPU_FEATURE_USABLE (AVX512BW
)
301 && CPU_FEATURE_USABLE (BMI2
)),
302 __memset_avx512_unaligned
)
303 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
304 CPU_FEATURE_USABLE (AVX512F
),
305 __memset_avx512_no_vzeroupper
)
306 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
307 (CPU_FEATURE_USABLE (AVX512VL
)
308 && CPU_FEATURE_USABLE (AVX512BW
)
309 && CPU_FEATURE_USABLE (BMI2
)),
310 __memset_evex_unaligned
)
311 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
312 (CPU_FEATURE_USABLE (AVX512VL
)
313 && CPU_FEATURE_USABLE (AVX512BW
)
314 && CPU_FEATURE_USABLE (BMI2
)),
315 __memset_evex_unaligned_erms
)
316 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
317 CPU_FEATURE_USABLE (AVX2
),
318 __memset_avx2_unaligned
)
319 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
320 CPU_FEATURE_USABLE (AVX2
),
321 __memset_avx2_unaligned_erms
)
322 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
323 (CPU_FEATURE_USABLE (AVX2
)
324 && CPU_FEATURE_USABLE (RTM
)),
325 __memset_avx2_unaligned_rtm
)
326 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
327 (CPU_FEATURE_USABLE (AVX2
)
328 && CPU_FEATURE_USABLE (RTM
)),
329 __memset_avx2_unaligned_erms_rtm
)
330 /* ISA V2 wrapper for SSE2 implementation because the SSE2
331 implementation is also used at ISA level 2. */
332 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memset
, 1,
333 __memset_sse2_unaligned
)
334 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memset
, 1,
335 __memset_sse2_unaligned_erms
)
338 /* Support sysdeps/x86_64/multiarch/rawmemchr.c. */
339 IFUNC_IMPL (i
, name
, rawmemchr
,
340 X86_IFUNC_IMPL_ADD_V4 (array
, i
, rawmemchr
,
341 (CPU_FEATURE_USABLE (AVX512VL
)
342 && CPU_FEATURE_USABLE (AVX512BW
)
343 && CPU_FEATURE_USABLE (BMI2
)),
345 X86_IFUNC_IMPL_ADD_V4 (array
, i
, rawmemchr
,
346 (CPU_FEATURE_USABLE (AVX512VL
)
347 && CPU_FEATURE_USABLE (AVX512BW
)
348 && CPU_FEATURE_USABLE (BMI2
)),
350 X86_IFUNC_IMPL_ADD_V4 (array
, i
, rawmemchr
,
351 (CPU_FEATURE_USABLE (AVX512VL
)
352 && CPU_FEATURE_USABLE (AVX512BW
)
353 && CPU_FEATURE_USABLE (BMI2
)),
354 __rawmemchr_evex_rtm
)
355 X86_IFUNC_IMPL_ADD_V3 (array
, i
, rawmemchr
,
356 (CPU_FEATURE_USABLE (AVX2
)
357 && CPU_FEATURE_USABLE (BMI2
)),
359 X86_IFUNC_IMPL_ADD_V3 (array
, i
, rawmemchr
,
360 (CPU_FEATURE_USABLE (AVX2
)
361 && CPU_FEATURE_USABLE (BMI2
)
362 && CPU_FEATURE_USABLE (RTM
)),
363 __rawmemchr_avx2_rtm
)
364 /* ISA V2 wrapper for SSE2 implementation because the SSE2
365 implementation is also used at ISA level 2. */
366 X86_IFUNC_IMPL_ADD_V2 (array
, i
, rawmemchr
,
370 /* Support sysdeps/x86_64/multiarch/strlen.c. */
371 IFUNC_IMPL (i
, name
, strlen
,
372 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strlen
,
373 (CPU_FEATURE_USABLE (AVX512VL
)
374 && CPU_FEATURE_USABLE (AVX512BW
)
375 && CPU_FEATURE_USABLE (BMI2
)),
377 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strlen
,
378 (CPU_FEATURE_USABLE (AVX512VL
)
379 && CPU_FEATURE_USABLE (AVX512BW
)
380 && CPU_FEATURE_USABLE (BMI2
)),
382 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strlen
,
383 (CPU_FEATURE_USABLE (AVX2
)
384 && CPU_FEATURE_USABLE (BMI2
)),
386 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strlen
,
387 (CPU_FEATURE_USABLE (AVX2
)
388 && CPU_FEATURE_USABLE (BMI2
)
389 && CPU_FEATURE_USABLE (RTM
)),
391 /* ISA V2 wrapper for SSE2 implementation because the SSE2
392 implementation is also used at ISA level 2. */
393 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strlen
,
397 /* Support sysdeps/x86_64/multiarch/strnlen.c. */
398 IFUNC_IMPL (i
, name
, strnlen
,
399 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strnlen
,
400 (CPU_FEATURE_USABLE (AVX512VL
)
401 && CPU_FEATURE_USABLE (AVX512BW
)
402 && CPU_FEATURE_USABLE (BMI2
)),
404 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strnlen
,
405 (CPU_FEATURE_USABLE (AVX512VL
)
406 && CPU_FEATURE_USABLE (AVX512BW
)
407 && CPU_FEATURE_USABLE (BMI2
)),
409 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strnlen
,
410 (CPU_FEATURE_USABLE (AVX2
)
411 && CPU_FEATURE_USABLE (BMI2
)),
413 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strnlen
,
414 (CPU_FEATURE_USABLE (AVX2
)
415 && CPU_FEATURE_USABLE (BMI2
)
416 && CPU_FEATURE_USABLE (RTM
)),
418 /* ISA V2 wrapper for SSE2 implementation because the SSE2
419 implementation is also used at ISA level 2. */
420 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strnlen
,
424 /* Support sysdeps/x86_64/multiarch/stpncpy.c. */
425 IFUNC_IMPL (i
, name
, stpncpy
,
426 X86_IFUNC_IMPL_ADD_V4 (array
, i
, stpncpy
,
427 (CPU_FEATURE_USABLE (AVX512VL
)
428 && CPU_FEATURE_USABLE (AVX512BW
)),
430 X86_IFUNC_IMPL_ADD_V3 (array
, i
, stpncpy
,
431 CPU_FEATURE_USABLE (AVX2
),
433 X86_IFUNC_IMPL_ADD_V3 (array
, i
, stpncpy
,
434 (CPU_FEATURE_USABLE (AVX2
)
435 && CPU_FEATURE_USABLE (RTM
)),
437 /* ISA V2 wrapper for sse2_unaligned implementation because
438 the sse2_unaligned implementation is also used at ISA
440 X86_IFUNC_IMPL_ADD_V2 (array
, i
, stpncpy
,
442 __stpncpy_sse2_unaligned
))
444 /* Support sysdeps/x86_64/multiarch/stpcpy.c. */
445 IFUNC_IMPL (i
, name
, stpcpy
,
446 X86_IFUNC_IMPL_ADD_V4 (array
, i
, stpcpy
,
447 (CPU_FEATURE_USABLE (AVX512VL
)
448 && CPU_FEATURE_USABLE (AVX512BW
)),
450 X86_IFUNC_IMPL_ADD_V3 (array
, i
, stpcpy
,
451 CPU_FEATURE_USABLE (AVX2
),
453 X86_IFUNC_IMPL_ADD_V3 (array
, i
, stpcpy
,
454 (CPU_FEATURE_USABLE (AVX2
)
455 && CPU_FEATURE_USABLE (RTM
)),
457 /* ISA V2 wrapper for sse2_unaligned implementation because
458 the sse2_unaligned implementation is also used at ISA
460 X86_IFUNC_IMPL_ADD_V2 (array
, i
, stpcpy
,
462 __stpcpy_sse2_unaligned
)
463 X86_IFUNC_IMPL_ADD_V1 (array
, i
, stpcpy
,
467 /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c. */
468 IFUNC_IMPL (i
, name
, strcasecmp
,
469 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcasecmp
,
470 (CPU_FEATURE_USABLE (AVX512VL
)
471 && CPU_FEATURE_USABLE (AVX512BW
)
472 && CPU_FEATURE_USABLE (BMI2
)),
474 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
475 (CPU_FEATURE_USABLE (AVX2
)
476 && CPU_FEATURE_USABLE (BMI2
)),
478 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
479 (CPU_FEATURE_USABLE (AVX2
)
480 && CPU_FEATURE_USABLE (BMI2
)
481 && CPU_FEATURE_USABLE (RTM
)),
482 __strcasecmp_avx2_rtm
)
483 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp
,
484 CPU_FEATURE_USABLE (SSE4_2
),
486 /* ISA V2 wrapper for SSE2 implementation because the SSE2
487 implementation is also used at ISA level 2. */
488 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp
,
492 /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c. */
493 IFUNC_IMPL (i
, name
, strcasecmp_l
,
494 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcasecmp
,
495 (CPU_FEATURE_USABLE (AVX512VL
)
496 && CPU_FEATURE_USABLE (AVX512BW
)
497 && CPU_FEATURE_USABLE (BMI2
)),
499 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
500 (CPU_FEATURE_USABLE (AVX2
)
501 && CPU_FEATURE_USABLE (BMI2
)),
503 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
504 (CPU_FEATURE_USABLE (AVX2
)
505 && CPU_FEATURE_USABLE (BMI2
)
506 && CPU_FEATURE_USABLE (RTM
)),
507 __strcasecmp_l_avx2_rtm
)
508 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp_l
,
509 CPU_FEATURE_USABLE (SSE4_2
),
510 __strcasecmp_l_sse42
)
511 /* ISA V2 wrapper for SSE2 implementation because the SSE2
512 implementation is also used at ISA level 2. */
513 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp_l
,
515 __strcasecmp_l_sse2
))
517 /* Support sysdeps/x86_64/multiarch/strcat.c. */
518 IFUNC_IMPL (i
, name
, strcat
,
519 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcat
,
520 (CPU_FEATURE_USABLE (AVX512VL
)
521 && CPU_FEATURE_USABLE (AVX512BW
)),
523 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcat
,
524 CPU_FEATURE_USABLE (AVX2
),
526 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcat
,
527 (CPU_FEATURE_USABLE (AVX2
)
528 && CPU_FEATURE_USABLE (RTM
)),
530 /* ISA V2 wrapper for sse2_unaligned implementation because
531 the sse2_unaligned implementation is also used at ISA
533 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcat
,
535 __strcat_sse2_unaligned
)
536 X86_IFUNC_IMPL_ADD_V1 (array
, i
, strcat
,
540 /* Support sysdeps/x86_64/multiarch/strchr.c. */
541 IFUNC_IMPL (i
, name
, strchr
,
542 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strchr
,
543 (CPU_FEATURE_USABLE (AVX512VL
)
544 && CPU_FEATURE_USABLE (AVX512BW
)
545 && CPU_FEATURE_USABLE (BMI2
)),
547 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strchr
,
548 (CPU_FEATURE_USABLE (AVX512VL
)
549 && CPU_FEATURE_USABLE (AVX512BW
)),
551 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchr
,
552 (CPU_FEATURE_USABLE (AVX2
)
553 && CPU_FEATURE_USABLE (BMI2
)),
555 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchr
,
556 (CPU_FEATURE_USABLE (AVX2
)
557 && CPU_FEATURE_USABLE (BMI2
)
558 && CPU_FEATURE_USABLE (RTM
)),
560 /* ISA V2 wrapper for SSE2 implementation because the SSE2
561 implementation is also used at ISA level 2. */
562 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strchr
,
565 X86_IFUNC_IMPL_ADD_V1 (array
, i
, strchr
,
567 __strchr_sse2_no_bsf
))
569 /* Support sysdeps/x86_64/multiarch/strchrnul.c. */
570 IFUNC_IMPL (i
, name
, strchrnul
,
571 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strchrnul
,
572 (CPU_FEATURE_USABLE (AVX512VL
)
573 && CPU_FEATURE_USABLE (AVX512BW
)
574 && CPU_FEATURE_USABLE (BMI2
)),
576 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strchrnul
,
577 (CPU_FEATURE_USABLE (AVX512VL
)
578 && CPU_FEATURE_USABLE (AVX512BW
)),
580 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchrnul
,
581 (CPU_FEATURE_USABLE (AVX2
)
582 && CPU_FEATURE_USABLE (BMI2
)),
584 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchrnul
,
585 (CPU_FEATURE_USABLE (AVX2
)
586 && CPU_FEATURE_USABLE (BMI2
)
587 && CPU_FEATURE_USABLE (RTM
)),
588 __strchrnul_avx2_rtm
)
589 /* ISA V2 wrapper for SSE2 implementation because the SSE2
590 implementation is also used at ISA level 2. */
591 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strchrnul
,
595 /* Support sysdeps/x86_64/multiarch/strrchr.c. */
596 IFUNC_IMPL (i
, name
, strrchr
,
597 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strrchr
,
598 (CPU_FEATURE_USABLE (AVX512VL
)
599 && CPU_FEATURE_USABLE (AVX512BW
)
600 && CPU_FEATURE_USABLE (BMI1
)
601 && CPU_FEATURE_USABLE (BMI2
)),
603 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strrchr
,
604 (CPU_FEATURE_USABLE (AVX512VL
)
605 && CPU_FEATURE_USABLE (AVX512BW
)
606 && CPU_FEATURE_USABLE (BMI2
)),
608 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strrchr
,
609 (CPU_FEATURE_USABLE (AVX2
)
610 && CPU_FEATURE_USABLE (BMI1
)
611 && CPU_FEATURE_USABLE (BMI2
)),
613 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strrchr
,
614 (CPU_FEATURE_USABLE (AVX2
)
615 && CPU_FEATURE_USABLE (BMI1
)
616 && CPU_FEATURE_USABLE (BMI2
)
617 && CPU_FEATURE_USABLE (RTM
)),
619 /* ISA V2 wrapper for SSE2 implementation because the SSE2
620 implementation is also used at ISA level 2. */
621 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strrchr
,
625 /* Support sysdeps/x86_64/multiarch/strcmp.c. */
626 IFUNC_IMPL (i
, name
, strcmp
,
627 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcmp
,
628 (CPU_FEATURE_USABLE (AVX512VL
)
629 && CPU_FEATURE_USABLE (AVX512BW
)
630 && CPU_FEATURE_USABLE (BMI2
)),
632 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcmp
,
633 (CPU_FEATURE_USABLE (AVX2
)
634 && CPU_FEATURE_USABLE (BMI2
)),
636 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcmp
,
637 (CPU_FEATURE_USABLE (AVX2
)
638 && CPU_FEATURE_USABLE (BMI2
)
639 && CPU_FEATURE_USABLE (RTM
)),
641 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcmp
,
642 CPU_FEATURE_USABLE (SSE4_2
),
644 /* ISA V2 wrapper for SSE2 implementations because the SSE2
645 implementations are also used at ISA level 2. */
646 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcmp
,
648 __strcmp_sse2_unaligned
)
649 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcmp
,
653 /* Support sysdeps/x86_64/multiarch/strcpy.c. */
654 IFUNC_IMPL (i
, name
, strcpy
,
655 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcpy
,
656 (CPU_FEATURE_USABLE (AVX512VL
)
657 && CPU_FEATURE_USABLE (AVX512BW
)),
659 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcpy
,
660 CPU_FEATURE_USABLE (AVX2
),
662 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcpy
,
663 (CPU_FEATURE_USABLE (AVX2
)
664 && CPU_FEATURE_USABLE (RTM
)),
666 /* ISA V2 wrapper for sse2_unaligned implementation because
667 the sse2_unaligned implementation is also used at ISA
669 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcpy
,
671 __strcpy_sse2_unaligned
)
672 X86_IFUNC_IMPL_ADD_V1 (array
, i
, strcpy
,
676 /* Support sysdeps/x86_64/multiarch/strcspn.c. */
677 IFUNC_IMPL (i
, name
, strcspn
,
678 /* All implementations of strcspn are built at all ISA
680 IFUNC_IMPL_ADD (array
, i
, strcspn
, CPU_FEATURE_USABLE (SSE4_2
),
682 IFUNC_IMPL_ADD (array
, i
, strcspn
, 1, __strcspn_generic
))
684 /* Support sysdeps/x86_64/multiarch/strncase_l.c. */
685 IFUNC_IMPL (i
, name
, strncasecmp
,
686 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncasecmp
,
687 (CPU_FEATURE_USABLE (AVX512VL
)
688 && CPU_FEATURE_USABLE (AVX512BW
)
689 && CPU_FEATURE_USABLE (BMI2
)),
691 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
692 (CPU_FEATURE_USABLE (AVX2
)
693 && CPU_FEATURE_USABLE (BMI2
)),
695 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
696 (CPU_FEATURE_USABLE (AVX2
)
697 && CPU_FEATURE_USABLE (BMI2
)
698 && CPU_FEATURE_USABLE (RTM
)),
699 __strncasecmp_avx2_rtm
)
700 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp
,
701 CPU_FEATURE_USABLE (SSE4_2
),
703 /* ISA V2 wrapper for SSE2 implementation because the SSE2
704 implementation is also used at ISA level 2. */
705 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp
,
709 /* Support sysdeps/x86_64/multiarch/strncase_l.c. */
710 IFUNC_IMPL (i
, name
, strncasecmp_l
,
711 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncasecmp
,
712 (CPU_FEATURE_USABLE (AVX512VL
)
713 & CPU_FEATURE_USABLE (AVX512BW
)
714 && CPU_FEATURE_USABLE (BMI2
)),
715 __strncasecmp_l_evex
)
716 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
717 (CPU_FEATURE_USABLE (AVX2
)
718 && CPU_FEATURE_USABLE (BMI2
)),
719 __strncasecmp_l_avx2
)
720 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
721 (CPU_FEATURE_USABLE (AVX2
)
722 && CPU_FEATURE_USABLE (BMI2
)
723 && CPU_FEATURE_USABLE (RTM
)),
724 __strncasecmp_l_avx2_rtm
)
725 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp_l
,
726 CPU_FEATURE_USABLE (SSE4_2
),
727 __strncasecmp_l_sse42
)
728 /* ISA V2 wrapper for SSE2 implementation because the SSE2
729 implementation is also used at ISA level 2. */
730 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp_l
,
732 __strncasecmp_l_sse2
))
734 /* Support sysdeps/x86_64/multiarch/strncat.c. */
735 IFUNC_IMPL (i
, name
, strncat
,
736 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncat
,
737 (CPU_FEATURE_USABLE (AVX512VL
)
738 && CPU_FEATURE_USABLE (AVX512BW
)),
740 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncat
,
741 CPU_FEATURE_USABLE (AVX2
),
743 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncat
,
744 (CPU_FEATURE_USABLE (AVX2
)
745 && CPU_FEATURE_USABLE (RTM
)),
747 /* ISA V2 wrapper for sse2_unaligned implementation because
748 the sse2_unaligned implementation is also used at ISA
750 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncat
,
752 __strncat_sse2_unaligned
))
754 /* Support sysdeps/x86_64/multiarch/strncpy.c. */
755 IFUNC_IMPL (i
, name
, strncpy
,
756 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncpy
,
757 (CPU_FEATURE_USABLE (AVX512VL
)
758 && CPU_FEATURE_USABLE (AVX512BW
)),
760 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncpy
,
761 CPU_FEATURE_USABLE (AVX2
),
763 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncpy
,
764 (CPU_FEATURE_USABLE (AVX2
)
765 && CPU_FEATURE_USABLE (RTM
)),
767 /* ISA V2 wrapper for sse2_unaligned implementation because
768 the sse2_unaligned implementation is also used at ISA
770 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncpy
,
772 __strncpy_sse2_unaligned
))
774 /* Support sysdeps/x86_64/multiarch/strpbrk.c. */
775 IFUNC_IMPL (i
, name
, strpbrk
,
776 /* All implementations of strpbrk are built at all ISA
778 IFUNC_IMPL_ADD (array
, i
, strpbrk
, CPU_FEATURE_USABLE (SSE4_2
),
780 IFUNC_IMPL_ADD (array
, i
, strpbrk
, 1, __strpbrk_generic
))
783 /* Support sysdeps/x86_64/multiarch/strspn.c. */
784 IFUNC_IMPL (i
, name
, strspn
,
785 /* All implementations of strspn are built at all ISA
787 IFUNC_IMPL_ADD (array
, i
, strspn
, CPU_FEATURE_USABLE (SSE4_2
),
789 IFUNC_IMPL_ADD (array
, i
, strspn
, 1, __strspn_generic
))
791 /* Support sysdeps/x86_64/multiarch/strstr.c. */
792 IFUNC_IMPL (i
, name
, strstr
,
793 IFUNC_IMPL_ADD (array
, i
, strstr
,
794 (CPU_FEATURE_USABLE (AVX512VL
)
795 && CPU_FEATURE_USABLE (AVX512BW
)
796 && CPU_FEATURE_USABLE (AVX512DQ
)
797 && CPU_FEATURE_USABLE (BMI2
)),
799 IFUNC_IMPL_ADD (array
, i
, strstr
, 1, __strstr_sse2_unaligned
)
800 IFUNC_IMPL_ADD (array
, i
, strstr
, 1, __strstr_generic
))
802 /* Support sysdeps/x86_64/multiarch/wcschr.c. */
803 IFUNC_IMPL (i
, name
, wcschr
,
804 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcschr
,
805 (CPU_FEATURE_USABLE (AVX512VL
)
806 && CPU_FEATURE_USABLE (AVX512BW
)
807 && CPU_FEATURE_USABLE (BMI2
)),
809 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcschr
,
810 (CPU_FEATURE_USABLE (AVX512VL
)
811 && CPU_FEATURE_USABLE (AVX512BW
)),
813 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcschr
,
814 (CPU_FEATURE_USABLE (AVX2
)
815 && CPU_FEATURE_USABLE (BMI2
)),
817 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcschr
,
818 (CPU_FEATURE_USABLE (AVX2
)
819 && CPU_FEATURE_USABLE (BMI2
)
820 && CPU_FEATURE_USABLE (RTM
)),
822 /* ISA V2 wrapper for SSE2 implementation because the SSE2
823 implementation is also used at ISA level 2. */
824 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcschr
,
828 /* Support sysdeps/x86_64/multiarch/wcsrchr.c. */
829 IFUNC_IMPL (i
, name
, wcsrchr
,
830 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsrchr
,
831 (CPU_FEATURE_USABLE (AVX512VL
)
832 && CPU_FEATURE_USABLE (AVX512BW
)
833 && CPU_FEATURE_USABLE (BMI1
)
834 && CPU_FEATURE_USABLE (BMI2
)),
836 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsrchr
,
837 (CPU_FEATURE_USABLE (AVX512VL
)
838 && CPU_FEATURE_USABLE (AVX512BW
)
839 && CPU_FEATURE_USABLE (BMI2
)),
841 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsrchr
,
842 (CPU_FEATURE_USABLE (AVX2
)
843 && CPU_FEATURE_USABLE (BMI1
)
844 && CPU_FEATURE_USABLE (BMI2
)),
846 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsrchr
,
847 (CPU_FEATURE_USABLE (AVX2
)
848 && CPU_FEATURE_USABLE (BMI1
)
849 && CPU_FEATURE_USABLE (BMI2
)
850 && CPU_FEATURE_USABLE (RTM
)),
852 /* ISA V2 wrapper for SSE2 implementation because the SSE2
853 implementation is also used at ISA level 2. */
854 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsrchr
,
858 /* Support sysdeps/x86_64/multiarch/wcscmp.c. */
859 IFUNC_IMPL (i
, name
, wcscmp
,
860 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcscmp
,
861 (CPU_FEATURE_USABLE (AVX512VL
)
862 && CPU_FEATURE_USABLE (AVX512BW
)
863 && CPU_FEATURE_USABLE (BMI2
)),
865 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcscmp
,
866 (CPU_FEATURE_USABLE (AVX2
)
867 && CPU_FEATURE_USABLE (BMI2
)),
869 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcscmp
,
870 (CPU_FEATURE_USABLE (AVX2
)
871 && CPU_FEATURE_USABLE (BMI2
)
872 && CPU_FEATURE_USABLE (RTM
)),
874 /* ISA V2 wrapper for SSE2 implementation because the SSE2
875 implementation is also used at ISA level 2. */
876 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcscmp
,
880 /* Support sysdeps/x86_64/multiarch/wcsncmp.c. */
881 IFUNC_IMPL (i
, name
, wcsncmp
,
882 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsncmp
,
883 (CPU_FEATURE_USABLE (AVX512VL
)
884 && CPU_FEATURE_USABLE (AVX512BW
)
885 && CPU_FEATURE_USABLE (BMI2
)),
887 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsncmp
,
888 (CPU_FEATURE_USABLE (AVX2
)
889 && CPU_FEATURE_USABLE (BMI2
)),
891 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsncmp
,
892 (CPU_FEATURE_USABLE (AVX2
)
893 && CPU_FEATURE_USABLE (BMI2
)
894 && CPU_FEATURE_USABLE (RTM
)),
896 /* ISA V2 wrapper for GENERIC implementation because the
897 GENERIC implementation is also used at ISA level 2. */
898 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsncmp
,
902 /* Support sysdeps/x86_64/multiarch/wcscpy.c. */
903 IFUNC_IMPL (i
, name
, wcscpy
,
904 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcscpy
,
905 (CPU_FEATURE_USABLE (AVX512VL
)
906 && CPU_FEATURE_USABLE (AVX512BW
)
907 && CPU_FEATURE_USABLE (BMI2
)),
909 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcscpy
,
910 (CPU_FEATURE_USABLE (AVX2
)
911 && CPU_FEATURE_USABLE (BMI2
)),
913 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcscpy
,
914 CPU_FEATURE_USABLE (SSSE3
),
916 X86_IFUNC_IMPL_ADD_V1 (array
, i
, wcscpy
,
920 /* Support sysdeps/x86_64/multiarch/wcsncpy.c. */
921 IFUNC_IMPL (i
, name
, wcsncpy
,
922 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsncpy
,
923 (CPU_FEATURE_USABLE (AVX512VL
)
924 && CPU_FEATURE_USABLE (AVX512BW
)
925 && CPU_FEATURE_USABLE (BMI2
)),
927 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsncpy
,
928 (CPU_FEATURE_USABLE (AVX2
)
929 && CPU_FEATURE_USABLE (BMI2
)),
931 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcpncpy
,
935 /* Support sysdeps/x86_64/multiarch/wcpcpy.c. */
936 IFUNC_IMPL (i
, name
, wcpcpy
,
937 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcpcpy
,
938 (CPU_FEATURE_USABLE (AVX512VL
)
939 && CPU_FEATURE_USABLE (AVX512BW
)
940 && CPU_FEATURE_USABLE (BMI2
)),
942 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcpcpy
,
943 (CPU_FEATURE_USABLE (AVX2
)
944 && CPU_FEATURE_USABLE (BMI2
)),
946 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcpcpy
,
950 /* Support sysdeps/x86_64/multiarch/wcpncpy.c. */
951 IFUNC_IMPL (i
, name
, wcpncpy
,
952 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcpncpy
,
953 (CPU_FEATURE_USABLE (AVX512VL
)
954 && CPU_FEATURE_USABLE (AVX512BW
)
955 && CPU_FEATURE_USABLE (BMI2
)),
957 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcpncpy
,
958 (CPU_FEATURE_USABLE (AVX2
)
959 && CPU_FEATURE_USABLE (BMI2
)),
961 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsncpy
,
965 /* Support sysdeps/x86_64/multiarch/wcscat.c. */
966 IFUNC_IMPL (i
, name
, wcscat
,
967 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcscat
,
968 (CPU_FEATURE_USABLE (AVX512VL
)
969 && CPU_FEATURE_USABLE (AVX512BW
)
970 && CPU_FEATURE_USABLE (BMI2
)),
972 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcscat
,
973 (CPU_FEATURE_USABLE (AVX2
)
974 && CPU_FEATURE_USABLE (BMI2
)),
976 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcscat
,
980 /* Support sysdeps/x86_64/multiarch/wcsncat.c. */
981 IFUNC_IMPL (i
, name
, wcsncat
,
982 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsncat
,
983 (CPU_FEATURE_USABLE (AVX512VL
)
984 && CPU_FEATURE_USABLE (AVX512BW
)
985 && CPU_FEATURE_USABLE (BMI2
)),
987 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsncat
,
988 (CPU_FEATURE_USABLE (AVX2
)
989 && CPU_FEATURE_USABLE (BMI2
)),
991 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsncat
,
995 /* Support sysdeps/x86_64/multiarch/wcslen.c. */
996 IFUNC_IMPL (i
, name
, wcslen
,
997 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcslen
,
998 (CPU_FEATURE_USABLE (AVX512VL
)
999 && CPU_FEATURE_USABLE (AVX512BW
)
1000 && CPU_FEATURE_USABLE (BMI2
)),
1002 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcslen
,
1003 (CPU_FEATURE_USABLE (AVX512VL
)
1004 && CPU_FEATURE_USABLE (AVX512BW
)
1005 && CPU_FEATURE_USABLE (BMI2
)),
1007 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcslen
,
1008 (CPU_FEATURE_USABLE (AVX2
)
1009 && CPU_FEATURE_USABLE (BMI2
)),
1011 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcslen
,
1012 (CPU_FEATURE_USABLE (AVX2
)
1013 && CPU_FEATURE_USABLE (BMI2
)
1014 && CPU_FEATURE_USABLE (RTM
)),
1016 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcslen
,
1017 CPU_FEATURE_USABLE (SSE4_1
),
1019 X86_IFUNC_IMPL_ADD_V1 (array
, i
, wcslen
,
1023 /* Support sysdeps/x86_64/multiarch/wcsnlen.c. */
1024 IFUNC_IMPL (i
, name
, wcsnlen
,
1025 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsnlen
,
1026 (CPU_FEATURE_USABLE (AVX512VL
)
1027 && CPU_FEATURE_USABLE (AVX512BW
)
1028 && CPU_FEATURE_USABLE (BMI2
)),
1030 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsnlen
,
1031 (CPU_FEATURE_USABLE (AVX512VL
)
1032 && CPU_FEATURE_USABLE (AVX512BW
)
1033 && CPU_FEATURE_USABLE (BMI2
)),
1035 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsnlen
,
1036 (CPU_FEATURE_USABLE (AVX2
)
1037 && CPU_FEATURE_USABLE (BMI2
)),
1039 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsnlen
,
1040 (CPU_FEATURE_USABLE (AVX2
)
1041 && CPU_FEATURE_USABLE (BMI2
)
1042 && CPU_FEATURE_USABLE (RTM
)),
1044 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsnlen
,
1045 CPU_FEATURE_USABLE (SSE4_1
),
1047 X86_IFUNC_IMPL_ADD_V1 (array
, i
, wcsnlen
,
1051 /* Support sysdeps/x86_64/multiarch/wmemchr.c. */
1052 IFUNC_IMPL (i
, name
, wmemchr
,
1053 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemchr
,
1054 (CPU_FEATURE_USABLE (AVX512VL
)
1055 && CPU_FEATURE_USABLE (AVX512BW
)
1056 && CPU_FEATURE_USABLE (BMI2
)),
1058 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemchr
,
1059 (CPU_FEATURE_USABLE (AVX512VL
)
1060 && CPU_FEATURE_USABLE (AVX512BW
)
1061 && CPU_FEATURE_USABLE (BMI2
)),
1063 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemchr
,
1064 (CPU_FEATURE_USABLE (AVX512VL
)
1065 && CPU_FEATURE_USABLE (AVX512BW
)
1066 && CPU_FEATURE_USABLE (BMI2
)),
1068 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemchr
,
1069 (CPU_FEATURE_USABLE (AVX2
)
1070 && CPU_FEATURE_USABLE (BMI2
)),
1072 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemchr
,
1073 (CPU_FEATURE_USABLE (AVX2
)
1074 && CPU_FEATURE_USABLE (BMI2
)
1075 && CPU_FEATURE_USABLE (RTM
)),
1077 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1078 implementation is also used at ISA level 2. */
1079 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wmemchr
,
1083 /* Support sysdeps/x86_64/multiarch/wmemcmp.c. */
1084 IFUNC_IMPL (i
, name
, wmemcmp
,
1085 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemcmp
,
1086 (CPU_FEATURE_USABLE (AVX512VL
)
1087 && CPU_FEATURE_USABLE (AVX512BW
)
1088 && CPU_FEATURE_USABLE (BMI2
)
1089 && CPU_FEATURE_USABLE (MOVBE
)),
1090 __wmemcmp_evex_movbe
)
1091 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemcmp
,
1092 (CPU_FEATURE_USABLE (AVX2
)
1093 && CPU_FEATURE_USABLE (BMI2
)
1094 && CPU_FEATURE_USABLE (MOVBE
)),
1095 __wmemcmp_avx2_movbe
)
1096 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemcmp
,
1097 (CPU_FEATURE_USABLE (AVX2
)
1098 && CPU_FEATURE_USABLE (BMI2
)
1099 && CPU_FEATURE_USABLE (MOVBE
)
1100 && CPU_FEATURE_USABLE (RTM
)),
1101 __wmemcmp_avx2_movbe_rtm
)
1102 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1103 implementation is also used at ISA level 2. */
1104 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wmemcmp
,
1108 /* Support sysdeps/x86_64/multiarch/wmemset.c. */
1109 IFUNC_IMPL (i
, name
, wmemset
,
1110 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemset
,
1111 (CPU_FEATURE_USABLE (AVX512VL
)
1112 && CPU_FEATURE_USABLE (AVX512BW
)
1113 && CPU_FEATURE_USABLE (BMI2
)),
1114 __wmemset_evex_unaligned
)
1115 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemset
,
1116 (CPU_FEATURE_USABLE (AVX512VL
)
1117 && CPU_FEATURE_USABLE (AVX512BW
)
1118 && CPU_FEATURE_USABLE (BMI2
)),
1119 __wmemset_avx512_unaligned
)
1120 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemset
,
1121 CPU_FEATURE_USABLE (AVX2
),
1122 __wmemset_avx2_unaligned
)
1123 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemset
,
1124 (CPU_FEATURE_USABLE (AVX2
)
1125 && CPU_FEATURE_USABLE (RTM
)),
1126 __wmemset_avx2_unaligned_rtm
)
1127 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1128 implementation is also used at ISA level 2. */
1129 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wmemset
, 1,
1130 __wmemset_sse2_unaligned
))
1133 /* Support sysdeps/x86_64/multiarch/memcpy_chk.c. */
1134 IFUNC_IMPL (i
, name
, __memcpy_chk
,
1135 IFUNC_IMPL_ADD (array
, i
, __memcpy_chk
, 1,
1137 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
1138 CPU_FEATURE_USABLE (AVX512F
),
1139 __memcpy_chk_avx512_no_vzeroupper
)
1140 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
1141 CPU_FEATURE_USABLE (AVX512VL
),
1142 __memcpy_chk_avx512_unaligned
)
1143 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
1144 CPU_FEATURE_USABLE (AVX512VL
),
1145 __memcpy_chk_avx512_unaligned_erms
)
1146 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
1147 CPU_FEATURE_USABLE (AVX512VL
),
1148 __memcpy_chk_evex_unaligned
)
1149 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
1150 CPU_FEATURE_USABLE (AVX512VL
),
1151 __memcpy_chk_evex_unaligned_erms
)
1152 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
1153 CPU_FEATURE_USABLE (AVX
),
1154 __memcpy_chk_avx_unaligned
)
1155 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
1156 CPU_FEATURE_USABLE (AVX
),
1157 __memcpy_chk_avx_unaligned_erms
)
1158 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
1159 (CPU_FEATURE_USABLE (AVX
)
1160 && CPU_FEATURE_USABLE (RTM
)),
1161 __memcpy_chk_avx_unaligned_rtm
)
1162 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
1163 (CPU_FEATURE_USABLE (AVX
)
1164 && CPU_FEATURE_USABLE (RTM
)),
1165 __memcpy_chk_avx_unaligned_erms_rtm
)
1166 /* By V3 we assume fast aligned copy. */
1167 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcpy_chk
,
1168 CPU_FEATURE_USABLE (SSSE3
),
1170 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1171 implementation is also used at ISA level 2 (SSSE3 is too
1172 optimized around aligned copy to be better as general
1173 purpose memmove). */
1174 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcpy_chk
, 1,
1175 __memcpy_chk_sse2_unaligned
)
1176 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcpy_chk
, 1,
1177 __memcpy_chk_sse2_unaligned_erms
))
1180 /* Support sysdeps/x86_64/multiarch/memcpy.c. */
1181 IFUNC_IMPL (i
, name
, memcpy
,
1182 IFUNC_IMPL_ADD (array
, i
, memcpy
, 1,
1184 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
1185 CPU_FEATURE_USABLE (AVX512F
),
1186 __memcpy_avx512_no_vzeroupper
)
1187 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
1188 CPU_FEATURE_USABLE (AVX512VL
),
1189 __memcpy_avx512_unaligned
)
1190 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
1191 CPU_FEATURE_USABLE (AVX512VL
),
1192 __memcpy_avx512_unaligned_erms
)
1193 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
1194 CPU_FEATURE_USABLE (AVX512VL
),
1195 __memcpy_evex_unaligned
)
1196 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
1197 CPU_FEATURE_USABLE (AVX512VL
),
1198 __memcpy_evex_unaligned_erms
)
1199 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
1200 CPU_FEATURE_USABLE (AVX
),
1201 __memcpy_avx_unaligned
)
1202 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
1203 CPU_FEATURE_USABLE (AVX
),
1204 __memcpy_avx_unaligned_erms
)
1205 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
1206 (CPU_FEATURE_USABLE (AVX
)
1207 && CPU_FEATURE_USABLE (RTM
)),
1208 __memcpy_avx_unaligned_rtm
)
1209 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
1210 (CPU_FEATURE_USABLE (AVX
)
1211 && CPU_FEATURE_USABLE (RTM
)),
1212 __memcpy_avx_unaligned_erms_rtm
)
1213 /* By V3 we assume fast aligned copy. */
1214 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcpy
,
1215 CPU_FEATURE_USABLE (SSSE3
),
1217 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1218 implementation is also used at ISA level 2 (SSSE3 is too
1219 optimized around aligned copy to be better as general
1220 purpose memmove). */
1221 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcpy
, 1,
1222 __memcpy_sse2_unaligned
)
1223 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcpy
, 1,
1224 __memcpy_sse2_unaligned_erms
))
1227 /* Support sysdeps/x86_64/multiarch/mempcpy_chk.c. */
1228 IFUNC_IMPL (i
, name
, __mempcpy_chk
,
1229 IFUNC_IMPL_ADD (array
, i
, __mempcpy_chk
, 1,
1231 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1232 CPU_FEATURE_USABLE (AVX512F
),
1233 __mempcpy_chk_avx512_no_vzeroupper
)
1234 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1235 CPU_FEATURE_USABLE (AVX512VL
),
1236 __mempcpy_chk_avx512_unaligned
)
1237 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1238 CPU_FEATURE_USABLE (AVX512VL
),
1239 __mempcpy_chk_avx512_unaligned_erms
)
1240 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1241 CPU_FEATURE_USABLE (AVX512VL
),
1242 __mempcpy_chk_evex_unaligned
)
1243 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1244 CPU_FEATURE_USABLE (AVX512VL
),
1245 __mempcpy_chk_evex_unaligned_erms
)
1246 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1247 CPU_FEATURE_USABLE (AVX
),
1248 __mempcpy_chk_avx_unaligned
)
1249 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1250 CPU_FEATURE_USABLE (AVX
),
1251 __mempcpy_chk_avx_unaligned_erms
)
1252 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1253 (CPU_FEATURE_USABLE (AVX
)
1254 && CPU_FEATURE_USABLE (RTM
)),
1255 __mempcpy_chk_avx_unaligned_rtm
)
1256 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1257 (CPU_FEATURE_USABLE (AVX
)
1258 && CPU_FEATURE_USABLE (RTM
)),
1259 __mempcpy_chk_avx_unaligned_erms_rtm
)
1260 /* By V3 we assume fast aligned copy. */
1261 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __mempcpy_chk
,
1262 CPU_FEATURE_USABLE (SSSE3
),
1263 __mempcpy_chk_ssse3
)
1264 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1265 implementation is also used at ISA level 2 (SSSE3 is too
1266 optimized around aligned copy to be better as general
1267 purpose memmove). */
1268 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __mempcpy_chk
, 1,
1269 __mempcpy_chk_sse2_unaligned
)
1270 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __mempcpy_chk
, 1,
1271 __mempcpy_chk_sse2_unaligned_erms
))
1274 /* Support sysdeps/x86_64/multiarch/mempcpy.c. */
1275 IFUNC_IMPL (i
, name
, mempcpy
,
1276 IFUNC_IMPL_ADD (array
, i
, mempcpy
, 1,
1278 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1279 CPU_FEATURE_USABLE (AVX512F
),
1280 __mempcpy_avx512_no_vzeroupper
)
1281 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1282 CPU_FEATURE_USABLE (AVX512VL
),
1283 __mempcpy_avx512_unaligned
)
1284 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1285 CPU_FEATURE_USABLE (AVX512VL
),
1286 __mempcpy_avx512_unaligned_erms
)
1287 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1288 CPU_FEATURE_USABLE (AVX512VL
),
1289 __mempcpy_evex_unaligned
)
1290 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1291 CPU_FEATURE_USABLE (AVX512VL
),
1292 __mempcpy_evex_unaligned_erms
)
1293 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1294 CPU_FEATURE_USABLE (AVX
),
1295 __mempcpy_avx_unaligned
)
1296 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1297 CPU_FEATURE_USABLE (AVX
),
1298 __mempcpy_avx_unaligned_erms
)
1299 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1300 (CPU_FEATURE_USABLE (AVX
)
1301 && CPU_FEATURE_USABLE (RTM
)),
1302 __mempcpy_avx_unaligned_rtm
)
1303 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1304 (CPU_FEATURE_USABLE (AVX
)
1305 && CPU_FEATURE_USABLE (RTM
)),
1306 __mempcpy_avx_unaligned_erms_rtm
)
1307 /* By V3 we assume fast aligned copy. */
1308 X86_IFUNC_IMPL_ADD_V2 (array
, i
, mempcpy
,
1309 CPU_FEATURE_USABLE (SSSE3
),
1311 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1312 implementation is also used at ISA level 2 (SSSE3 is too
1313 optimized around aligned copy to be better as general
1314 purpose memmove). */
1315 X86_IFUNC_IMPL_ADD_V2 (array
, i
, mempcpy
, 1,
1316 __mempcpy_sse2_unaligned
)
1317 X86_IFUNC_IMPL_ADD_V2 (array
, i
, mempcpy
, 1,
1318 __mempcpy_sse2_unaligned_erms
))
1320 /* Support sysdeps/x86_64/multiarch/strncmp.c. */
1321 IFUNC_IMPL (i
, name
, strncmp
,
1322 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncmp
,
1323 (CPU_FEATURE_USABLE (AVX512VL
)
1324 && CPU_FEATURE_USABLE (AVX512BW
)
1325 && CPU_FEATURE_USABLE (BMI2
)),
1327 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncmp
,
1328 (CPU_FEATURE_USABLE (AVX2
)
1329 && CPU_FEATURE_USABLE (BMI2
)),
1331 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncmp
,
1332 (CPU_FEATURE_USABLE (AVX2
)
1333 && CPU_FEATURE_USABLE (BMI2
)
1334 && CPU_FEATURE_USABLE (RTM
)),
1336 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncmp
,
1337 CPU_FEATURE_USABLE (SSE4_2
),
1339 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1340 implementation is also used at ISA level 2. */
1341 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncmp
,
1346 /* Support sysdeps/x86_64/multiarch/wmemset_chk.c. */
1347 IFUNC_IMPL (i
, name
, __wmemset_chk
,
1348 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __wmemset_chk
,
1349 (CPU_FEATURE_USABLE (AVX512VL
)
1350 && CPU_FEATURE_USABLE (AVX512BW
)
1351 && CPU_FEATURE_USABLE (BMI2
)),
1352 __wmemset_chk_evex_unaligned
)
1353 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __wmemset_chk
,
1354 (CPU_FEATURE_USABLE (AVX512VL
)
1355 && CPU_FEATURE_USABLE (AVX512BW
)
1356 && CPU_FEATURE_USABLE (BMI2
)),
1357 __wmemset_chk_avx512_unaligned
)
1358 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __wmemset_chk
,
1359 CPU_FEATURE_USABLE (AVX2
),
1360 __wmemset_chk_avx2_unaligned
)
1361 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __wmemset_chk
,
1362 (CPU_FEATURE_USABLE (AVX2
)
1363 && CPU_FEATURE_USABLE (RTM
)),
1364 __wmemset_chk_avx2_unaligned_rtm
)
1365 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1366 implementation is also used at ISA level 2. */
1367 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __wmemset_chk
, 1,
1368 __wmemset_chk_sse2_unaligned
))