Fix PR82941 and PR82942 by adding proper vzeroupper generation on SKX.
[official-gcc.git] / gcc / config / i386 / avx512vlintrin.h
blob05550516e448c3cab799753b1fa37921a833d3c6
1 /* Copyright (C) 2014-2017 Free Software Foundation, Inc.
3 This file is part of GCC.
5 GCC is free software; you can redistribute it and/or modify
6 it under the terms of the GNU General Public License as published by
7 the Free Software Foundation; either version 3, or (at your option)
8 any later version.
10 GCC is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 Under Section 7 of GPL version 3, you are granted additional
16 permissions described in the GCC Runtime Library Exception, version
17 3.1, as published by the Free Software Foundation.
19 You should have received a copy of the GNU General Public License and
20 a copy of the GCC Runtime Library Exception along with this program;
21 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
22 <http://www.gnu.org/licenses/>. */
24 #ifndef _IMMINTRIN_H_INCLUDED
25 #error "Never use <avx512vlintrin.h> directly; include <immintrin.h> instead."
26 #endif
28 #ifndef _AVX512VLINTRIN_H_INCLUDED
29 #define _AVX512VLINTRIN_H_INCLUDED
31 #ifndef __AVX512VL__
32 #pragma GCC push_options
33 #pragma GCC target("avx512vl")
34 #define __DISABLE_AVX512VL__
35 #endif /* __AVX512VL__ */
37 /* Internal data types for implementing the intrinsics. */
38 typedef unsigned int __mmask32;
40 extern __inline __m256d
41 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
42 _mm256_mask_mov_pd (__m256d __W, __mmask8 __U, __m256d __A)
44 return (__m256d) __builtin_ia32_movapd256_mask ((__v4df) __A,
45 (__v4df) __W,
46 (__mmask8) __U);
49 extern __inline __m256d
50 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
51 _mm256_maskz_mov_pd (__mmask8 __U, __m256d __A)
53 return (__m256d) __builtin_ia32_movapd256_mask ((__v4df) __A,
54 (__v4df)
55 _mm256_setzero_pd (),
56 (__mmask8) __U);
59 extern __inline __m128d
60 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
61 _mm_mask_mov_pd (__m128d __W, __mmask8 __U, __m128d __A)
63 return (__m128d) __builtin_ia32_movapd128_mask ((__v2df) __A,
64 (__v2df) __W,
65 (__mmask8) __U);
68 extern __inline __m128d
69 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
70 _mm_maskz_mov_pd (__mmask8 __U, __m128d __A)
72 return (__m128d) __builtin_ia32_movapd128_mask ((__v2df) __A,
73 (__v2df)
74 _mm_setzero_pd (),
75 (__mmask8) __U);
78 extern __inline __m256d
79 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
80 _mm256_mask_load_pd (__m256d __W, __mmask8 __U, void const *__P)
82 return (__m256d) __builtin_ia32_loadapd256_mask ((__v4df *) __P,
83 (__v4df) __W,
84 (__mmask8) __U);
87 extern __inline __m256d
88 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
89 _mm256_maskz_load_pd (__mmask8 __U, void const *__P)
91 return (__m256d) __builtin_ia32_loadapd256_mask ((__v4df *) __P,
92 (__v4df)
93 _mm256_setzero_pd (),
94 (__mmask8) __U);
97 extern __inline __m128d
98 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
99 _mm_mask_load_pd (__m128d __W, __mmask8 __U, void const *__P)
101 return (__m128d) __builtin_ia32_loadapd128_mask ((__v2df *) __P,
102 (__v2df) __W,
103 (__mmask8) __U);
106 extern __inline __m128d
107 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
108 _mm_maskz_load_pd (__mmask8 __U, void const *__P)
110 return (__m128d) __builtin_ia32_loadapd128_mask ((__v2df *) __P,
111 (__v2df)
112 _mm_setzero_pd (),
113 (__mmask8) __U);
116 extern __inline void
117 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
118 _mm256_mask_store_pd (void *__P, __mmask8 __U, __m256d __A)
120 __builtin_ia32_storeapd256_mask ((__v4df *) __P,
121 (__v4df) __A,
122 (__mmask8) __U);
125 extern __inline void
126 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
127 _mm_mask_store_pd (void *__P, __mmask8 __U, __m128d __A)
129 __builtin_ia32_storeapd128_mask ((__v2df *) __P,
130 (__v2df) __A,
131 (__mmask8) __U);
134 extern __inline __m256
135 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
136 _mm256_mask_mov_ps (__m256 __W, __mmask8 __U, __m256 __A)
138 return (__m256) __builtin_ia32_movaps256_mask ((__v8sf) __A,
139 (__v8sf) __W,
140 (__mmask8) __U);
143 extern __inline __m256
144 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
145 _mm256_maskz_mov_ps (__mmask8 __U, __m256 __A)
147 return (__m256) __builtin_ia32_movaps256_mask ((__v8sf) __A,
148 (__v8sf)
149 _mm256_setzero_ps (),
150 (__mmask8) __U);
153 extern __inline __m128
154 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
155 _mm_mask_mov_ps (__m128 __W, __mmask8 __U, __m128 __A)
157 return (__m128) __builtin_ia32_movaps128_mask ((__v4sf) __A,
158 (__v4sf) __W,
159 (__mmask8) __U);
162 extern __inline __m128
163 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
164 _mm_maskz_mov_ps (__mmask8 __U, __m128 __A)
166 return (__m128) __builtin_ia32_movaps128_mask ((__v4sf) __A,
167 (__v4sf)
168 _mm_setzero_ps (),
169 (__mmask8) __U);
172 extern __inline __m256
173 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
174 _mm256_mask_load_ps (__m256 __W, __mmask8 __U, void const *__P)
176 return (__m256) __builtin_ia32_loadaps256_mask ((__v8sf *) __P,
177 (__v8sf) __W,
178 (__mmask8) __U);
181 extern __inline __m256
182 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
183 _mm256_maskz_load_ps (__mmask8 __U, void const *__P)
185 return (__m256) __builtin_ia32_loadaps256_mask ((__v8sf *) __P,
186 (__v8sf)
187 _mm256_setzero_ps (),
188 (__mmask8) __U);
191 extern __inline __m128
192 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
193 _mm_mask_load_ps (__m128 __W, __mmask8 __U, void const *__P)
195 return (__m128) __builtin_ia32_loadaps128_mask ((__v4sf *) __P,
196 (__v4sf) __W,
197 (__mmask8) __U);
200 extern __inline __m128
201 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
202 _mm_maskz_load_ps (__mmask8 __U, void const *__P)
204 return (__m128) __builtin_ia32_loadaps128_mask ((__v4sf *) __P,
205 (__v4sf)
206 _mm_setzero_ps (),
207 (__mmask8) __U);
210 extern __inline void
211 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
212 _mm256_mask_store_ps (void *__P, __mmask8 __U, __m256 __A)
214 __builtin_ia32_storeaps256_mask ((__v8sf *) __P,
215 (__v8sf) __A,
216 (__mmask8) __U);
219 extern __inline void
220 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
221 _mm_mask_store_ps (void *__P, __mmask8 __U, __m128 __A)
223 __builtin_ia32_storeaps128_mask ((__v4sf *) __P,
224 (__v4sf) __A,
225 (__mmask8) __U);
228 extern __inline __m256i
229 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
230 _mm256_mask_mov_epi64 (__m256i __W, __mmask8 __U, __m256i __A)
232 return (__m256i) __builtin_ia32_movdqa64_256_mask ((__v4di) __A,
233 (__v4di) __W,
234 (__mmask8) __U);
237 extern __inline __m256i
238 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
239 _mm256_maskz_mov_epi64 (__mmask8 __U, __m256i __A)
241 return (__m256i) __builtin_ia32_movdqa64_256_mask ((__v4di) __A,
242 (__v4di)
243 _mm256_setzero_si256 (),
244 (__mmask8) __U);
247 extern __inline __m128i
248 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
249 _mm_mask_mov_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
251 return (__m128i) __builtin_ia32_movdqa64_128_mask ((__v2di) __A,
252 (__v2di) __W,
253 (__mmask8) __U);
256 extern __inline __m128i
257 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
258 _mm_maskz_mov_epi64 (__mmask8 __U, __m128i __A)
260 return (__m128i) __builtin_ia32_movdqa64_128_mask ((__v2di) __A,
261 (__v2di)
262 _mm_setzero_si128 (),
263 (__mmask8) __U);
266 extern __inline __m256i
267 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
268 _mm256_mask_load_epi64 (__m256i __W, __mmask8 __U, void const *__P)
270 return (__m256i) __builtin_ia32_movdqa64load256_mask ((__v4di *) __P,
271 (__v4di) __W,
272 (__mmask8)
273 __U);
276 extern __inline __m256i
277 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
278 _mm256_maskz_load_epi64 (__mmask8 __U, void const *__P)
280 return (__m256i) __builtin_ia32_movdqa64load256_mask ((__v4di *) __P,
281 (__v4di)
282 _mm256_setzero_si256 (),
283 (__mmask8)
284 __U);
287 extern __inline __m128i
288 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
289 _mm_mask_load_epi64 (__m128i __W, __mmask8 __U, void const *__P)
291 return (__m128i) __builtin_ia32_movdqa64load128_mask ((__v2di *) __P,
292 (__v2di) __W,
293 (__mmask8)
294 __U);
297 extern __inline __m128i
298 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
299 _mm_maskz_load_epi64 (__mmask8 __U, void const *__P)
301 return (__m128i) __builtin_ia32_movdqa64load128_mask ((__v2di *) __P,
302 (__v2di)
303 _mm_setzero_si128 (),
304 (__mmask8)
305 __U);
308 extern __inline void
309 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
310 _mm256_mask_store_epi64 (void *__P, __mmask8 __U, __m256i __A)
312 __builtin_ia32_movdqa64store256_mask ((__v4di *) __P,
313 (__v4di) __A,
314 (__mmask8) __U);
317 extern __inline void
318 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
319 _mm_mask_store_epi64 (void *__P, __mmask8 __U, __m128i __A)
321 __builtin_ia32_movdqa64store128_mask ((__v2di *) __P,
322 (__v2di) __A,
323 (__mmask8) __U);
326 extern __inline __m256i
327 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
328 _mm256_mask_mov_epi32 (__m256i __W, __mmask8 __U, __m256i __A)
330 return (__m256i) __builtin_ia32_movdqa32_256_mask ((__v8si) __A,
331 (__v8si) __W,
332 (__mmask8) __U);
335 extern __inline __m256i
336 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
337 _mm256_maskz_mov_epi32 (__mmask8 __U, __m256i __A)
339 return (__m256i) __builtin_ia32_movdqa32_256_mask ((__v8si) __A,
340 (__v8si)
341 _mm256_setzero_si256 (),
342 (__mmask8) __U);
345 extern __inline __m128i
346 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
347 _mm_mask_mov_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
349 return (__m128i) __builtin_ia32_movdqa32_128_mask ((__v4si) __A,
350 (__v4si) __W,
351 (__mmask8) __U);
354 extern __inline __m128i
355 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
356 _mm_maskz_mov_epi32 (__mmask8 __U, __m128i __A)
358 return (__m128i) __builtin_ia32_movdqa32_128_mask ((__v4si) __A,
359 (__v4si)
360 _mm_setzero_si128 (),
361 (__mmask8) __U);
364 extern __inline __m256i
365 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
366 _mm256_mask_load_epi32 (__m256i __W, __mmask8 __U, void const *__P)
368 return (__m256i) __builtin_ia32_movdqa32load256_mask ((__v8si *) __P,
369 (__v8si) __W,
370 (__mmask8)
371 __U);
374 extern __inline __m256i
375 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
376 _mm256_maskz_load_epi32 (__mmask8 __U, void const *__P)
378 return (__m256i) __builtin_ia32_movdqa32load256_mask ((__v8si *) __P,
379 (__v8si)
380 _mm256_setzero_si256 (),
381 (__mmask8)
382 __U);
385 extern __inline __m128i
386 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
387 _mm_mask_load_epi32 (__m128i __W, __mmask8 __U, void const *__P)
389 return (__m128i) __builtin_ia32_movdqa32load128_mask ((__v4si *) __P,
390 (__v4si) __W,
391 (__mmask8)
392 __U);
395 extern __inline __m128i
396 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
397 _mm_maskz_load_epi32 (__mmask8 __U, void const *__P)
399 return (__m128i) __builtin_ia32_movdqa32load128_mask ((__v4si *) __P,
400 (__v4si)
401 _mm_setzero_si128 (),
402 (__mmask8)
403 __U);
406 extern __inline void
407 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
408 _mm256_mask_store_epi32 (void *__P, __mmask8 __U, __m256i __A)
410 __builtin_ia32_movdqa32store256_mask ((__v8si *) __P,
411 (__v8si) __A,
412 (__mmask8) __U);
415 extern __inline void
416 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
417 _mm_mask_store_epi32 (void *__P, __mmask8 __U, __m128i __A)
419 __builtin_ia32_movdqa32store128_mask ((__v4si *) __P,
420 (__v4si) __A,
421 (__mmask8) __U);
424 extern __inline __m128d
425 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
426 _mm_mask_add_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B)
428 return (__m128d) __builtin_ia32_addpd128_mask ((__v2df) __A,
429 (__v2df) __B,
430 (__v2df) __W,
431 (__mmask8) __U);
434 extern __inline __m128d
435 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
436 _mm_maskz_add_pd (__mmask8 __U, __m128d __A, __m128d __B)
438 return (__m128d) __builtin_ia32_addpd128_mask ((__v2df) __A,
439 (__v2df) __B,
440 (__v2df)
441 _mm_setzero_pd (),
442 (__mmask8) __U);
445 extern __inline __m256d
446 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
447 _mm256_mask_add_pd (__m256d __W, __mmask8 __U, __m256d __A,
448 __m256d __B)
450 return (__m256d) __builtin_ia32_addpd256_mask ((__v4df) __A,
451 (__v4df) __B,
452 (__v4df) __W,
453 (__mmask8) __U);
456 extern __inline __m256d
457 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
458 _mm256_maskz_add_pd (__mmask8 __U, __m256d __A, __m256d __B)
460 return (__m256d) __builtin_ia32_addpd256_mask ((__v4df) __A,
461 (__v4df) __B,
462 (__v4df)
463 _mm256_setzero_pd (),
464 (__mmask8) __U);
467 extern __inline __m128
468 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
469 _mm_mask_add_ps (__m128 __W, __mmask16 __U, __m128 __A, __m128 __B)
471 return (__m128) __builtin_ia32_addps128_mask ((__v4sf) __A,
472 (__v4sf) __B,
473 (__v4sf) __W,
474 (__mmask8) __U);
477 extern __inline __m128
478 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
479 _mm_maskz_add_ps (__mmask16 __U, __m128 __A, __m128 __B)
481 return (__m128) __builtin_ia32_addps128_mask ((__v4sf) __A,
482 (__v4sf) __B,
483 (__v4sf)
484 _mm_setzero_ps (),
485 (__mmask8) __U);
488 extern __inline __m256
489 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
490 _mm256_mask_add_ps (__m256 __W, __mmask16 __U, __m256 __A, __m256 __B)
492 return (__m256) __builtin_ia32_addps256_mask ((__v8sf) __A,
493 (__v8sf) __B,
494 (__v8sf) __W,
495 (__mmask8) __U);
498 extern __inline __m256
499 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
500 _mm256_maskz_add_ps (__mmask16 __U, __m256 __A, __m256 __B)
502 return (__m256) __builtin_ia32_addps256_mask ((__v8sf) __A,
503 (__v8sf) __B,
504 (__v8sf)
505 _mm256_setzero_ps (),
506 (__mmask8) __U);
509 extern __inline __m128d
510 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
511 _mm_mask_sub_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B)
513 return (__m128d) __builtin_ia32_subpd128_mask ((__v2df) __A,
514 (__v2df) __B,
515 (__v2df) __W,
516 (__mmask8) __U);
519 extern __inline __m128d
520 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
521 _mm_maskz_sub_pd (__mmask8 __U, __m128d __A, __m128d __B)
523 return (__m128d) __builtin_ia32_subpd128_mask ((__v2df) __A,
524 (__v2df) __B,
525 (__v2df)
526 _mm_setzero_pd (),
527 (__mmask8) __U);
530 extern __inline __m256d
531 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
532 _mm256_mask_sub_pd (__m256d __W, __mmask8 __U, __m256d __A,
533 __m256d __B)
535 return (__m256d) __builtin_ia32_subpd256_mask ((__v4df) __A,
536 (__v4df) __B,
537 (__v4df) __W,
538 (__mmask8) __U);
541 extern __inline __m256d
542 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
543 _mm256_maskz_sub_pd (__mmask8 __U, __m256d __A, __m256d __B)
545 return (__m256d) __builtin_ia32_subpd256_mask ((__v4df) __A,
546 (__v4df) __B,
547 (__v4df)
548 _mm256_setzero_pd (),
549 (__mmask8) __U);
552 extern __inline __m128
553 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
554 _mm_mask_sub_ps (__m128 __W, __mmask16 __U, __m128 __A, __m128 __B)
556 return (__m128) __builtin_ia32_subps128_mask ((__v4sf) __A,
557 (__v4sf) __B,
558 (__v4sf) __W,
559 (__mmask8) __U);
562 extern __inline __m128
563 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
564 _mm_maskz_sub_ps (__mmask16 __U, __m128 __A, __m128 __B)
566 return (__m128) __builtin_ia32_subps128_mask ((__v4sf) __A,
567 (__v4sf) __B,
568 (__v4sf)
569 _mm_setzero_ps (),
570 (__mmask8) __U);
573 extern __inline __m256
574 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
575 _mm256_mask_sub_ps (__m256 __W, __mmask16 __U, __m256 __A, __m256 __B)
577 return (__m256) __builtin_ia32_subps256_mask ((__v8sf) __A,
578 (__v8sf) __B,
579 (__v8sf) __W,
580 (__mmask8) __U);
583 extern __inline __m256
584 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
585 _mm256_maskz_sub_ps (__mmask16 __U, __m256 __A, __m256 __B)
587 return (__m256) __builtin_ia32_subps256_mask ((__v8sf) __A,
588 (__v8sf) __B,
589 (__v8sf)
590 _mm256_setzero_ps (),
591 (__mmask8) __U);
594 extern __inline void
595 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
596 _mm256_store_epi64 (void *__P, __m256i __A)
598 *(__m256i *) __P = __A;
601 extern __inline void
602 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
603 _mm_store_epi64 (void *__P, __m128i __A)
605 *(__m128i *) __P = __A;
608 extern __inline __m256d
609 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
610 _mm256_mask_loadu_pd (__m256d __W, __mmask8 __U, void const *__P)
612 return (__m256d) __builtin_ia32_loadupd256_mask ((const double *) __P,
613 (__v4df) __W,
614 (__mmask8) __U);
617 extern __inline __m256d
618 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
619 _mm256_maskz_loadu_pd (__mmask8 __U, void const *__P)
621 return (__m256d) __builtin_ia32_loadupd256_mask ((const double *) __P,
622 (__v4df)
623 _mm256_setzero_pd (),
624 (__mmask8) __U);
627 extern __inline __m128d
628 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
629 _mm_mask_loadu_pd (__m128d __W, __mmask8 __U, void const *__P)
631 return (__m128d) __builtin_ia32_loadupd128_mask ((const double *) __P,
632 (__v2df) __W,
633 (__mmask8) __U);
636 extern __inline __m128d
637 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
638 _mm_maskz_loadu_pd (__mmask8 __U, void const *__P)
640 return (__m128d) __builtin_ia32_loadupd128_mask ((const double *) __P,
641 (__v2df)
642 _mm_setzero_pd (),
643 (__mmask8) __U);
646 extern __inline void
647 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
648 _mm256_mask_storeu_pd (void *__P, __mmask8 __U, __m256d __A)
650 __builtin_ia32_storeupd256_mask ((double *) __P,
651 (__v4df) __A,
652 (__mmask8) __U);
655 extern __inline void
656 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
657 _mm_mask_storeu_pd (void *__P, __mmask8 __U, __m128d __A)
659 __builtin_ia32_storeupd128_mask ((double *) __P,
660 (__v2df) __A,
661 (__mmask8) __U);
664 extern __inline __m256
665 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
666 _mm256_mask_loadu_ps (__m256 __W, __mmask8 __U, void const *__P)
668 return (__m256) __builtin_ia32_loadups256_mask ((const float *) __P,
669 (__v8sf) __W,
670 (__mmask8) __U);
673 extern __inline __m256
674 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
675 _mm256_maskz_loadu_ps (__mmask8 __U, void const *__P)
677 return (__m256) __builtin_ia32_loadups256_mask ((const float *) __P,
678 (__v8sf)
679 _mm256_setzero_ps (),
680 (__mmask8) __U);
683 extern __inline __m128
684 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
685 _mm_mask_loadu_ps (__m128 __W, __mmask8 __U, void const *__P)
687 return (__m128) __builtin_ia32_loadups128_mask ((const float *) __P,
688 (__v4sf) __W,
689 (__mmask8) __U);
692 extern __inline __m128
693 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
694 _mm_maskz_loadu_ps (__mmask8 __U, void const *__P)
696 return (__m128) __builtin_ia32_loadups128_mask ((const float *) __P,
697 (__v4sf)
698 _mm_setzero_ps (),
699 (__mmask8) __U);
702 extern __inline void
703 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
704 _mm256_mask_storeu_ps (void *__P, __mmask8 __U, __m256 __A)
706 __builtin_ia32_storeups256_mask ((float *) __P,
707 (__v8sf) __A,
708 (__mmask8) __U);
711 extern __inline void
712 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
713 _mm_mask_storeu_ps (void *__P, __mmask8 __U, __m128 __A)
715 __builtin_ia32_storeups128_mask ((float *) __P,
716 (__v4sf) __A,
717 (__mmask8) __U);
720 extern __inline __m256i
721 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
722 _mm256_mask_loadu_epi64 (__m256i __W, __mmask8 __U, void const *__P)
724 return (__m256i) __builtin_ia32_loaddqudi256_mask ((const long long *) __P,
725 (__v4di) __W,
726 (__mmask8) __U);
729 extern __inline __m256i
730 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
731 _mm256_maskz_loadu_epi64 (__mmask8 __U, void const *__P)
733 return (__m256i) __builtin_ia32_loaddqudi256_mask ((const long long *) __P,
734 (__v4di)
735 _mm256_setzero_si256 (),
736 (__mmask8) __U);
739 extern __inline __m128i
740 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
741 _mm_mask_loadu_epi64 (__m128i __W, __mmask8 __U, void const *__P)
743 return (__m128i) __builtin_ia32_loaddqudi128_mask ((const long long *) __P,
744 (__v2di) __W,
745 (__mmask8) __U);
748 extern __inline __m128i
749 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
750 _mm_maskz_loadu_epi64 (__mmask8 __U, void const *__P)
752 return (__m128i) __builtin_ia32_loaddqudi128_mask ((const long long *) __P,
753 (__v2di)
754 _mm_setzero_si128 (),
755 (__mmask8) __U);
758 extern __inline void
759 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
760 _mm256_mask_storeu_epi64 (void *__P, __mmask8 __U, __m256i __A)
762 __builtin_ia32_storedqudi256_mask ((long long *) __P,
763 (__v4di) __A,
764 (__mmask8) __U);
767 extern __inline void
768 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
769 _mm_mask_storeu_epi64 (void *__P, __mmask8 __U, __m128i __A)
771 __builtin_ia32_storedqudi128_mask ((long long *) __P,
772 (__v2di) __A,
773 (__mmask8) __U);
776 extern __inline __m256i
777 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
778 _mm256_mask_loadu_epi32 (__m256i __W, __mmask8 __U, void const *__P)
780 return (__m256i) __builtin_ia32_loaddqusi256_mask ((const int *) __P,
781 (__v8si) __W,
782 (__mmask8) __U);
785 extern __inline __m256i
786 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
787 _mm256_maskz_loadu_epi32 (__mmask8 __U, void const *__P)
789 return (__m256i) __builtin_ia32_loaddqusi256_mask ((const int *) __P,
790 (__v8si)
791 _mm256_setzero_si256 (),
792 (__mmask8) __U);
795 extern __inline __m128i
796 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
797 _mm_mask_loadu_epi32 (__m128i __W, __mmask8 __U, void const *__P)
799 return (__m128i) __builtin_ia32_loaddqusi128_mask ((const int *) __P,
800 (__v4si) __W,
801 (__mmask8) __U);
804 extern __inline __m128i
805 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
806 _mm_maskz_loadu_epi32 (__mmask8 __U, void const *__P)
808 return (__m128i) __builtin_ia32_loaddqusi128_mask ((const int *) __P,
809 (__v4si)
810 _mm_setzero_si128 (),
811 (__mmask8) __U);
814 extern __inline void
815 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
816 _mm256_mask_storeu_epi32 (void *__P, __mmask8 __U, __m256i __A)
818 __builtin_ia32_storedqusi256_mask ((int *) __P,
819 (__v8si) __A,
820 (__mmask8) __U);
823 extern __inline void
824 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
825 _mm_mask_storeu_epi32 (void *__P, __mmask8 __U, __m128i __A)
827 __builtin_ia32_storedqusi128_mask ((int *) __P,
828 (__v4si) __A,
829 (__mmask8) __U);
832 extern __inline __m256i
833 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
834 _mm256_mask_abs_epi32 (__m256i __W, __mmask8 __U, __m256i __A)
836 return (__m256i) __builtin_ia32_pabsd256_mask ((__v8si) __A,
837 (__v8si) __W,
838 (__mmask8) __U);
841 extern __inline __m256i
842 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
843 _mm256_maskz_abs_epi32 (__mmask8 __U, __m256i __A)
845 return (__m256i) __builtin_ia32_pabsd256_mask ((__v8si) __A,
846 (__v8si)
847 _mm256_setzero_si256 (),
848 (__mmask8) __U);
851 extern __inline __m128i
852 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
853 _mm_mask_abs_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
855 return (__m128i) __builtin_ia32_pabsd128_mask ((__v4si) __A,
856 (__v4si) __W,
857 (__mmask8) __U);
860 extern __inline __m128i
861 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
862 _mm_maskz_abs_epi32 (__mmask8 __U, __m128i __A)
864 return (__m128i) __builtin_ia32_pabsd128_mask ((__v4si) __A,
865 (__v4si)
866 _mm_setzero_si128 (),
867 (__mmask8) __U);
870 extern __inline __m256i
871 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
872 _mm256_abs_epi64 (__m256i __A)
874 return (__m256i) __builtin_ia32_pabsq256_mask ((__v4di) __A,
875 (__v4di)
876 _mm256_setzero_si256 (),
877 (__mmask8) -1);
880 extern __inline __m256i
881 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
882 _mm256_mask_abs_epi64 (__m256i __W, __mmask8 __U, __m256i __A)
884 return (__m256i) __builtin_ia32_pabsq256_mask ((__v4di) __A,
885 (__v4di) __W,
886 (__mmask8) __U);
889 extern __inline __m256i
890 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
891 _mm256_maskz_abs_epi64 (__mmask8 __U, __m256i __A)
893 return (__m256i) __builtin_ia32_pabsq256_mask ((__v4di) __A,
894 (__v4di)
895 _mm256_setzero_si256 (),
896 (__mmask8) __U);
899 extern __inline __m128i
900 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
901 _mm_abs_epi64 (__m128i __A)
903 return (__m128i) __builtin_ia32_pabsq128_mask ((__v2di) __A,
904 (__v2di)
905 _mm_setzero_si128 (),
906 (__mmask8) -1);
909 extern __inline __m128i
910 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
911 _mm_mask_abs_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
913 return (__m128i) __builtin_ia32_pabsq128_mask ((__v2di) __A,
914 (__v2di) __W,
915 (__mmask8) __U);
918 extern __inline __m128i
919 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
920 _mm_maskz_abs_epi64 (__mmask8 __U, __m128i __A)
922 return (__m128i) __builtin_ia32_pabsq128_mask ((__v2di) __A,
923 (__v2di)
924 _mm_setzero_si128 (),
925 (__mmask8) __U);
928 extern __inline __m128i
929 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
930 _mm256_cvtpd_epu32 (__m256d __A)
932 return (__m128i) __builtin_ia32_cvtpd2udq256_mask ((__v4df) __A,
933 (__v4si)
934 _mm_setzero_si128 (),
935 (__mmask8) -1);
938 extern __inline __m128i
939 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
940 _mm256_mask_cvtpd_epu32 (__m128i __W, __mmask8 __U, __m256d __A)
942 return (__m128i) __builtin_ia32_cvtpd2udq256_mask ((__v4df) __A,
943 (__v4si) __W,
944 (__mmask8) __U);
947 extern __inline __m128i
948 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
949 _mm256_maskz_cvtpd_epu32 (__mmask8 __U, __m256d __A)
951 return (__m128i) __builtin_ia32_cvtpd2udq256_mask ((__v4df) __A,
952 (__v4si)
953 _mm_setzero_si128 (),
954 (__mmask8) __U);
957 extern __inline __m128i
958 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
959 _mm_cvtpd_epu32 (__m128d __A)
961 return (__m128i) __builtin_ia32_cvtpd2udq128_mask ((__v2df) __A,
962 (__v4si)
963 _mm_setzero_si128 (),
964 (__mmask8) -1);
967 extern __inline __m128i
968 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
969 _mm_mask_cvtpd_epu32 (__m128i __W, __mmask8 __U, __m128d __A)
971 return (__m128i) __builtin_ia32_cvtpd2udq128_mask ((__v2df) __A,
972 (__v4si) __W,
973 (__mmask8) __U);
976 extern __inline __m128i
977 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
978 _mm_maskz_cvtpd_epu32 (__mmask8 __U, __m128d __A)
980 return (__m128i) __builtin_ia32_cvtpd2udq128_mask ((__v2df) __A,
981 (__v4si)
982 _mm_setzero_si128 (),
983 (__mmask8) __U);
986 extern __inline __m256i
987 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
988 _mm256_mask_cvttps_epi32 (__m256i __W, __mmask8 __U, __m256 __A)
990 return (__m256i) __builtin_ia32_cvttps2dq256_mask ((__v8sf) __A,
991 (__v8si) __W,
992 (__mmask8) __U);
995 extern __inline __m256i
996 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
997 _mm256_maskz_cvttps_epi32 (__mmask8 __U, __m256 __A)
999 return (__m256i) __builtin_ia32_cvttps2dq256_mask ((__v8sf) __A,
1000 (__v8si)
1001 _mm256_setzero_si256 (),
1002 (__mmask8) __U);
1005 extern __inline __m128i
1006 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1007 _mm_mask_cvttps_epi32 (__m128i __W, __mmask8 __U, __m128 __A)
1009 return (__m128i) __builtin_ia32_cvttps2dq128_mask ((__v4sf) __A,
1010 (__v4si) __W,
1011 (__mmask8) __U);
1014 extern __inline __m128i
1015 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1016 _mm_maskz_cvttps_epi32 (__mmask8 __U, __m128 __A)
1018 return (__m128i) __builtin_ia32_cvttps2dq128_mask ((__v4sf) __A,
1019 (__v4si)
1020 _mm_setzero_si128 (),
1021 (__mmask8) __U);
1024 extern __inline __m256i
1025 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1026 _mm256_cvttps_epu32 (__m256 __A)
1028 return (__m256i) __builtin_ia32_cvttps2udq256_mask ((__v8sf) __A,
1029 (__v8si)
1030 _mm256_setzero_si256 (),
1031 (__mmask8) -1);
1034 extern __inline __m256i
1035 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1036 _mm256_mask_cvttps_epu32 (__m256i __W, __mmask8 __U, __m256 __A)
1038 return (__m256i) __builtin_ia32_cvttps2udq256_mask ((__v8sf) __A,
1039 (__v8si) __W,
1040 (__mmask8) __U);
1043 extern __inline __m256i
1044 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1045 _mm256_maskz_cvttps_epu32 (__mmask8 __U, __m256 __A)
1047 return (__m256i) __builtin_ia32_cvttps2udq256_mask ((__v8sf) __A,
1048 (__v8si)
1049 _mm256_setzero_si256 (),
1050 (__mmask8) __U);
1053 extern __inline __m128i
1054 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1055 _mm_cvttps_epu32 (__m128 __A)
1057 return (__m128i) __builtin_ia32_cvttps2udq128_mask ((__v4sf) __A,
1058 (__v4si)
1059 _mm_setzero_si128 (),
1060 (__mmask8) -1);
1063 extern __inline __m128i
1064 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1065 _mm_mask_cvttps_epu32 (__m128i __W, __mmask8 __U, __m128 __A)
1067 return (__m128i) __builtin_ia32_cvttps2udq128_mask ((__v4sf) __A,
1068 (__v4si) __W,
1069 (__mmask8) __U);
1072 extern __inline __m128i
1073 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1074 _mm_maskz_cvttps_epu32 (__mmask8 __U, __m128 __A)
1076 return (__m128i) __builtin_ia32_cvttps2udq128_mask ((__v4sf) __A,
1077 (__v4si)
1078 _mm_setzero_si128 (),
1079 (__mmask8) __U);
1082 extern __inline __m128i
1083 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1084 _mm256_mask_cvttpd_epi32 (__m128i __W, __mmask8 __U, __m256d __A)
1086 return (__m128i) __builtin_ia32_cvttpd2dq256_mask ((__v4df) __A,
1087 (__v4si) __W,
1088 (__mmask8) __U);
1091 extern __inline __m128i
1092 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1093 _mm256_maskz_cvttpd_epi32 (__mmask8 __U, __m256d __A)
1095 return (__m128i) __builtin_ia32_cvttpd2dq256_mask ((__v4df) __A,
1096 (__v4si)
1097 _mm_setzero_si128 (),
1098 (__mmask8) __U);
1101 extern __inline __m128i
1102 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1103 _mm_mask_cvttpd_epi32 (__m128i __W, __mmask8 __U, __m128d __A)
1105 return (__m128i) __builtin_ia32_cvttpd2dq128_mask ((__v2df) __A,
1106 (__v4si) __W,
1107 (__mmask8) __U);
1110 extern __inline __m128i
1111 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1112 _mm_maskz_cvttpd_epi32 (__mmask8 __U, __m128d __A)
1114 return (__m128i) __builtin_ia32_cvttpd2dq128_mask ((__v2df) __A,
1115 (__v4si)
1116 _mm_setzero_si128 (),
1117 (__mmask8) __U);
1120 extern __inline __m128i
1121 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1122 _mm256_cvttpd_epu32 (__m256d __A)
1124 return (__m128i) __builtin_ia32_cvttpd2udq256_mask ((__v4df) __A,
1125 (__v4si)
1126 _mm_setzero_si128 (),
1127 (__mmask8) -1);
1130 extern __inline __m128i
1131 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1132 _mm256_mask_cvttpd_epu32 (__m128i __W, __mmask8 __U, __m256d __A)
1134 return (__m128i) __builtin_ia32_cvttpd2udq256_mask ((__v4df) __A,
1135 (__v4si) __W,
1136 (__mmask8) __U);
1139 extern __inline __m128i
1140 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1141 _mm256_maskz_cvttpd_epu32 (__mmask8 __U, __m256d __A)
1143 return (__m128i) __builtin_ia32_cvttpd2udq256_mask ((__v4df) __A,
1144 (__v4si)
1145 _mm_setzero_si128 (),
1146 (__mmask8) __U);
1149 extern __inline __m128i
1150 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1151 _mm_cvttpd_epu32 (__m128d __A)
1153 return (__m128i) __builtin_ia32_cvttpd2udq128_mask ((__v2df) __A,
1154 (__v4si)
1155 _mm_setzero_si128 (),
1156 (__mmask8) -1);
1159 extern __inline __m128i
1160 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1161 _mm_mask_cvttpd_epu32 (__m128i __W, __mmask8 __U, __m128d __A)
1163 return (__m128i) __builtin_ia32_cvttpd2udq128_mask ((__v2df) __A,
1164 (__v4si) __W,
1165 (__mmask8) __U);
1168 extern __inline __m128i
1169 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1170 _mm_maskz_cvttpd_epu32 (__mmask8 __U, __m128d __A)
1172 return (__m128i) __builtin_ia32_cvttpd2udq128_mask ((__v2df) __A,
1173 (__v4si)
1174 _mm_setzero_si128 (),
1175 (__mmask8) __U);
1178 extern __inline __m128i
1179 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1180 _mm256_mask_cvtpd_epi32 (__m128i __W, __mmask8 __U, __m256d __A)
1182 return (__m128i) __builtin_ia32_cvtpd2dq256_mask ((__v4df) __A,
1183 (__v4si) __W,
1184 (__mmask8) __U);
1187 extern __inline __m128i
1188 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1189 _mm256_maskz_cvtpd_epi32 (__mmask8 __U, __m256d __A)
1191 return (__m128i) __builtin_ia32_cvtpd2dq256_mask ((__v4df) __A,
1192 (__v4si)
1193 _mm_setzero_si128 (),
1194 (__mmask8) __U);
1197 extern __inline __m128i
1198 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1199 _mm_mask_cvtpd_epi32 (__m128i __W, __mmask8 __U, __m128d __A)
1201 return (__m128i) __builtin_ia32_cvtpd2dq128_mask ((__v2df) __A,
1202 (__v4si) __W,
1203 (__mmask8) __U);
1206 extern __inline __m128i
1207 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1208 _mm_maskz_cvtpd_epi32 (__mmask8 __U, __m128d __A)
1210 return (__m128i) __builtin_ia32_cvtpd2dq128_mask ((__v2df) __A,
1211 (__v4si)
1212 _mm_setzero_si128 (),
1213 (__mmask8) __U);
1216 extern __inline __m256d
1217 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1218 _mm256_mask_cvtepi32_pd (__m256d __W, __mmask8 __U, __m128i __A)
1220 return (__m256d) __builtin_ia32_cvtdq2pd256_mask ((__v4si) __A,
1221 (__v4df) __W,
1222 (__mmask8) __U);
1225 extern __inline __m256d
1226 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1227 _mm256_maskz_cvtepi32_pd (__mmask8 __U, __m128i __A)
1229 return (__m256d) __builtin_ia32_cvtdq2pd256_mask ((__v4si) __A,
1230 (__v4df)
1231 _mm256_setzero_pd (),
1232 (__mmask8) __U);
1235 extern __inline __m128d
1236 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1237 _mm_mask_cvtepi32_pd (__m128d __W, __mmask8 __U, __m128i __A)
1239 return (__m128d) __builtin_ia32_cvtdq2pd128_mask ((__v4si) __A,
1240 (__v2df) __W,
1241 (__mmask8) __U);
1244 extern __inline __m128d
1245 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1246 _mm_maskz_cvtepi32_pd (__mmask8 __U, __m128i __A)
1248 return (__m128d) __builtin_ia32_cvtdq2pd128_mask ((__v4si) __A,
1249 (__v2df)
1250 _mm_setzero_pd (),
1251 (__mmask8) __U);
1254 extern __inline __m256d
1255 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1256 _mm256_cvtepu32_pd (__m128i __A)
1258 return (__m256d) __builtin_ia32_cvtudq2pd256_mask ((__v4si) __A,
1259 (__v4df)
1260 _mm256_setzero_pd (),
1261 (__mmask8) -1);
1264 extern __inline __m256d
1265 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1266 _mm256_mask_cvtepu32_pd (__m256d __W, __mmask8 __U, __m128i __A)
1268 return (__m256d) __builtin_ia32_cvtudq2pd256_mask ((__v4si) __A,
1269 (__v4df) __W,
1270 (__mmask8) __U);
1273 extern __inline __m256d
1274 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1275 _mm256_maskz_cvtepu32_pd (__mmask8 __U, __m128i __A)
1277 return (__m256d) __builtin_ia32_cvtudq2pd256_mask ((__v4si) __A,
1278 (__v4df)
1279 _mm256_setzero_pd (),
1280 (__mmask8) __U);
1283 extern __inline __m128d
1284 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1285 _mm_cvtepu32_pd (__m128i __A)
1287 return (__m128d) __builtin_ia32_cvtudq2pd128_mask ((__v4si) __A,
1288 (__v2df)
1289 _mm_setzero_pd (),
1290 (__mmask8) -1);
1293 extern __inline __m128d
1294 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1295 _mm_mask_cvtepu32_pd (__m128d __W, __mmask8 __U, __m128i __A)
1297 return (__m128d) __builtin_ia32_cvtudq2pd128_mask ((__v4si) __A,
1298 (__v2df) __W,
1299 (__mmask8) __U);
1302 extern __inline __m128d
1303 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1304 _mm_maskz_cvtepu32_pd (__mmask8 __U, __m128i __A)
1306 return (__m128d) __builtin_ia32_cvtudq2pd128_mask ((__v4si) __A,
1307 (__v2df)
1308 _mm_setzero_pd (),
1309 (__mmask8) __U);
1312 extern __inline __m256
1313 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1314 _mm256_mask_cvtepi32_ps (__m256 __W, __mmask8 __U, __m256i __A)
1316 return (__m256) __builtin_ia32_cvtdq2ps256_mask ((__v8si) __A,
1317 (__v8sf) __W,
1318 (__mmask8) __U);
1321 extern __inline __m256
1322 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1323 _mm256_maskz_cvtepi32_ps (__mmask16 __U, __m256i __A)
1325 return (__m256) __builtin_ia32_cvtdq2ps256_mask ((__v8si) __A,
1326 (__v8sf)
1327 _mm256_setzero_ps (),
1328 (__mmask8) __U);
1331 extern __inline __m128
1332 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1333 _mm_mask_cvtepi32_ps (__m128 __W, __mmask8 __U, __m128i __A)
1335 return (__m128) __builtin_ia32_cvtdq2ps128_mask ((__v4si) __A,
1336 (__v4sf) __W,
1337 (__mmask8) __U);
1340 extern __inline __m128
1341 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1342 _mm_maskz_cvtepi32_ps (__mmask16 __U, __m128i __A)
1344 return (__m128) __builtin_ia32_cvtdq2ps128_mask ((__v4si) __A,
1345 (__v4sf)
1346 _mm_setzero_ps (),
1347 (__mmask8) __U);
1350 extern __inline __m256
1351 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1352 _mm256_cvtepu32_ps (__m256i __A)
1354 return (__m256) __builtin_ia32_cvtudq2ps256_mask ((__v8si) __A,
1355 (__v8sf)
1356 _mm256_setzero_ps (),
1357 (__mmask8) -1);
1360 extern __inline __m256
1361 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1362 _mm256_mask_cvtepu32_ps (__m256 __W, __mmask8 __U, __m256i __A)
1364 return (__m256) __builtin_ia32_cvtudq2ps256_mask ((__v8si) __A,
1365 (__v8sf) __W,
1366 (__mmask8) __U);
1369 extern __inline __m256
1370 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1371 _mm256_maskz_cvtepu32_ps (__mmask8 __U, __m256i __A)
1373 return (__m256) __builtin_ia32_cvtudq2ps256_mask ((__v8si) __A,
1374 (__v8sf)
1375 _mm256_setzero_ps (),
1376 (__mmask8) __U);
1379 extern __inline __m128
1380 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1381 _mm_cvtepu32_ps (__m128i __A)
1383 return (__m128) __builtin_ia32_cvtudq2ps128_mask ((__v4si) __A,
1384 (__v4sf)
1385 _mm_setzero_ps (),
1386 (__mmask8) -1);
1389 extern __inline __m128
1390 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1391 _mm_mask_cvtepu32_ps (__m128 __W, __mmask8 __U, __m128i __A)
1393 return (__m128) __builtin_ia32_cvtudq2ps128_mask ((__v4si) __A,
1394 (__v4sf) __W,
1395 (__mmask8) __U);
1398 extern __inline __m128
1399 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1400 _mm_maskz_cvtepu32_ps (__mmask8 __U, __m128i __A)
1402 return (__m128) __builtin_ia32_cvtudq2ps128_mask ((__v4si) __A,
1403 (__v4sf)
1404 _mm_setzero_ps (),
1405 (__mmask8) __U);
1408 extern __inline __m256d
1409 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1410 _mm256_mask_cvtps_pd (__m256d __W, __mmask8 __U, __m128 __A)
1412 return (__m256d) __builtin_ia32_cvtps2pd256_mask ((__v4sf) __A,
1413 (__v4df) __W,
1414 (__mmask8) __U);
1417 extern __inline __m256d
1418 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1419 _mm256_maskz_cvtps_pd (__mmask8 __U, __m128 __A)
1421 return (__m256d) __builtin_ia32_cvtps2pd256_mask ((__v4sf) __A,
1422 (__v4df)
1423 _mm256_setzero_pd (),
1424 (__mmask8) __U);
1427 extern __inline __m128d
1428 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1429 _mm_mask_cvtps_pd (__m128d __W, __mmask8 __U, __m128 __A)
1431 return (__m128d) __builtin_ia32_cvtps2pd128_mask ((__v4sf) __A,
1432 (__v2df) __W,
1433 (__mmask8) __U);
1436 extern __inline __m128d
1437 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1438 _mm_maskz_cvtps_pd (__mmask8 __U, __m128 __A)
1440 return (__m128d) __builtin_ia32_cvtps2pd128_mask ((__v4sf) __A,
1441 (__v2df)
1442 _mm_setzero_pd (),
1443 (__mmask8) __U);
1446 extern __inline __m128i
1447 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1448 _mm_cvtepi32_epi8 (__m128i __A)
1450 return (__m128i) __builtin_ia32_pmovdb128_mask ((__v4si) __A,
1451 (__v16qi)
1452 _mm_undefined_si128 (),
1453 (__mmask8) -1);
1456 extern __inline void
1457 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1458 _mm_mask_cvtepi32_storeu_epi8 (void * __P, __mmask8 __M, __m128i __A)
1460 __builtin_ia32_pmovdb128mem_mask ((__v16qi *) __P, (__v4si) __A, __M);
1463 extern __inline __m128i
1464 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1465 _mm_mask_cvtepi32_epi8 (__m128i __O, __mmask8 __M, __m128i __A)
1467 return (__m128i) __builtin_ia32_pmovdb128_mask ((__v4si) __A,
1468 (__v16qi) __O, __M);
1471 extern __inline __m128i
1472 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1473 _mm_maskz_cvtepi32_epi8 (__mmask8 __M, __m128i __A)
1475 return (__m128i) __builtin_ia32_pmovdb128_mask ((__v4si) __A,
1476 (__v16qi)
1477 _mm_setzero_si128 (),
1478 __M);
1481 extern __inline __m128i
1482 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1483 _mm256_cvtepi32_epi8 (__m256i __A)
1485 return (__m128i) __builtin_ia32_pmovdb256_mask ((__v8si) __A,
1486 (__v16qi)
1487 _mm_undefined_si128 (),
1488 (__mmask8) -1);
1491 extern __inline __m128i
1492 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1493 _mm256_mask_cvtepi32_epi8 (__m128i __O, __mmask8 __M, __m256i __A)
1495 return (__m128i) __builtin_ia32_pmovdb256_mask ((__v8si) __A,
1496 (__v16qi) __O, __M);
1499 extern __inline void
1500 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1501 _mm256_mask_cvtepi32_storeu_epi8 (void * __P, __mmask8 __M, __m256i __A)
1503 __builtin_ia32_pmovdb256mem_mask ((__v16qi *) __P, (__v8si) __A, __M);
1506 extern __inline __m128i
1507 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1508 _mm256_maskz_cvtepi32_epi8 (__mmask8 __M, __m256i __A)
1510 return (__m128i) __builtin_ia32_pmovdb256_mask ((__v8si) __A,
1511 (__v16qi)
1512 _mm_setzero_si128 (),
1513 __M);
1516 extern __inline __m128i
1517 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1518 _mm_cvtsepi32_epi8 (__m128i __A)
1520 return (__m128i) __builtin_ia32_pmovsdb128_mask ((__v4si) __A,
1521 (__v16qi)
1522 _mm_undefined_si128 (),
1523 (__mmask8) -1);
1526 extern __inline void
1527 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1528 _mm_mask_cvtsepi32_storeu_epi8 (void * __P, __mmask8 __M, __m128i __A)
1530 __builtin_ia32_pmovsdb128mem_mask ((__v16qi *) __P, (__v4si) __A, __M);
1533 extern __inline __m128i
1534 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1535 _mm_mask_cvtsepi32_epi8 (__m128i __O, __mmask8 __M, __m128i __A)
1537 return (__m128i) __builtin_ia32_pmovsdb128_mask ((__v4si) __A,
1538 (__v16qi) __O, __M);
1541 extern __inline __m128i
1542 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1543 _mm_maskz_cvtsepi32_epi8 (__mmask8 __M, __m128i __A)
1545 return (__m128i) __builtin_ia32_pmovsdb128_mask ((__v4si) __A,
1546 (__v16qi)
1547 _mm_setzero_si128 (),
1548 __M);
1551 extern __inline __m128i
1552 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1553 _mm256_cvtsepi32_epi8 (__m256i __A)
1555 return (__m128i) __builtin_ia32_pmovsdb256_mask ((__v8si) __A,
1556 (__v16qi)
1557 _mm_undefined_si128 (),
1558 (__mmask8) -1);
1561 extern __inline void
1562 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1563 _mm256_mask_cvtsepi32_storeu_epi8 (void * __P, __mmask8 __M, __m256i __A)
1565 __builtin_ia32_pmovsdb256mem_mask ((__v16qi *) __P, (__v8si) __A, __M);
1568 extern __inline __m128i
1569 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1570 _mm256_mask_cvtsepi32_epi8 (__m128i __O, __mmask8 __M, __m256i __A)
1572 return (__m128i) __builtin_ia32_pmovsdb256_mask ((__v8si) __A,
1573 (__v16qi) __O, __M);
1576 extern __inline __m128i
1577 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1578 _mm256_maskz_cvtsepi32_epi8 (__mmask8 __M, __m256i __A)
1580 return (__m128i) __builtin_ia32_pmovsdb256_mask ((__v8si) __A,
1581 (__v16qi)
1582 _mm_setzero_si128 (),
1583 __M);
1586 extern __inline __m128i
1587 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1588 _mm_cvtusepi32_epi8 (__m128i __A)
1590 return (__m128i) __builtin_ia32_pmovusdb128_mask ((__v4si) __A,
1591 (__v16qi)
1592 _mm_undefined_si128 (),
1593 (__mmask8) -1);
1596 extern __inline void
1597 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1598 _mm_mask_cvtusepi32_storeu_epi8 (void * __P, __mmask8 __M, __m128i __A)
1600 __builtin_ia32_pmovusdb128mem_mask ((__v16qi *) __P, (__v4si) __A, __M);
1603 extern __inline __m128i
1604 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1605 _mm_mask_cvtusepi32_epi8 (__m128i __O, __mmask8 __M, __m128i __A)
1607 return (__m128i) __builtin_ia32_pmovusdb128_mask ((__v4si) __A,
1608 (__v16qi) __O,
1609 __M);
1612 extern __inline __m128i
1613 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1614 _mm_maskz_cvtusepi32_epi8 (__mmask8 __M, __m128i __A)
1616 return (__m128i) __builtin_ia32_pmovusdb128_mask ((__v4si) __A,
1617 (__v16qi)
1618 _mm_setzero_si128 (),
1619 __M);
1622 extern __inline __m128i
1623 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1624 _mm256_cvtusepi32_epi8 (__m256i __A)
1626 return (__m128i) __builtin_ia32_pmovusdb256_mask ((__v8si) __A,
1627 (__v16qi)
1628 _mm_undefined_si128 (),
1629 (__mmask8) -1);
1632 extern __inline void
1633 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1634 _mm256_mask_cvtusepi32_storeu_epi8 (void * __P, __mmask8 __M, __m256i __A)
1636 __builtin_ia32_pmovusdb256mem_mask ((__v16qi*) __P, (__v8si) __A, __M);
1639 extern __inline __m128i
1640 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1641 _mm256_mask_cvtusepi32_epi8 (__m128i __O, __mmask8 __M, __m256i __A)
1643 return (__m128i) __builtin_ia32_pmovusdb256_mask ((__v8si) __A,
1644 (__v16qi) __O,
1645 __M);
1648 extern __inline __m128i
1649 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1650 _mm256_maskz_cvtusepi32_epi8 (__mmask8 __M, __m256i __A)
1652 return (__m128i) __builtin_ia32_pmovusdb256_mask ((__v8si) __A,
1653 (__v16qi)
1654 _mm_setzero_si128 (),
1655 __M);
1658 extern __inline __m128i
1659 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1660 _mm_cvtepi32_epi16 (__m128i __A)
1662 return (__m128i) __builtin_ia32_pmovdw128_mask ((__v4si) __A,
1663 (__v8hi)
1664 _mm_setzero_si128 (),
1665 (__mmask8) -1);
1668 extern __inline void
1669 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1670 _mm_mask_cvtepi32_storeu_epi16 (void * __P, __mmask8 __M, __m128i __A)
1672 __builtin_ia32_pmovdw128mem_mask ((__v8hi *) __P, (__v4si) __A, __M);
1675 extern __inline __m128i
1676 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1677 _mm_mask_cvtepi32_epi16 (__m128i __O, __mmask8 __M, __m128i __A)
1679 return (__m128i) __builtin_ia32_pmovdw128_mask ((__v4si) __A,
1680 (__v8hi) __O, __M);
1683 extern __inline __m128i
1684 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1685 _mm_maskz_cvtepi32_epi16 (__mmask8 __M, __m128i __A)
1687 return (__m128i) __builtin_ia32_pmovdw128_mask ((__v4si) __A,
1688 (__v8hi)
1689 _mm_setzero_si128 (),
1690 __M);
1693 extern __inline __m128i
1694 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1695 _mm256_cvtepi32_epi16 (__m256i __A)
1697 return (__m128i) __builtin_ia32_pmovdw256_mask ((__v8si) __A,
1698 (__v8hi)
1699 _mm_setzero_si128 (),
1700 (__mmask8) -1);
1703 extern __inline void
1704 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1705 _mm256_mask_cvtepi32_storeu_epi16 (void * __P, __mmask8 __M, __m256i __A)
1707 __builtin_ia32_pmovdw256mem_mask ((__v8hi *) __P, (__v8si) __A, __M);
1710 extern __inline __m128i
1711 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1712 _mm256_mask_cvtepi32_epi16 (__m128i __O, __mmask8 __M, __m256i __A)
1714 return (__m128i) __builtin_ia32_pmovdw256_mask ((__v8si) __A,
1715 (__v8hi) __O, __M);
1718 extern __inline __m128i
1719 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1720 _mm256_maskz_cvtepi32_epi16 (__mmask8 __M, __m256i __A)
1722 return (__m128i) __builtin_ia32_pmovdw256_mask ((__v8si) __A,
1723 (__v8hi)
1724 _mm_setzero_si128 (),
1725 __M);
1728 extern __inline __m128i
1729 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1730 _mm_cvtsepi32_epi16 (__m128i __A)
1732 return (__m128i) __builtin_ia32_pmovsdw128_mask ((__v4si) __A,
1733 (__v8hi)
1734 _mm_setzero_si128 (),
1735 (__mmask8) -1);
1738 extern __inline void
1739 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1740 _mm_mask_cvtsepi32_storeu_epi16 (void * __P, __mmask8 __M, __m128i __A)
1742 __builtin_ia32_pmovsdw128mem_mask ((__v8hi *) __P, (__v4si) __A, __M);
1745 extern __inline __m128i
1746 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1747 _mm_mask_cvtsepi32_epi16 (__m128i __O, __mmask8 __M, __m128i __A)
1749 return (__m128i) __builtin_ia32_pmovsdw128_mask ((__v4si) __A,
1750 (__v8hi)__O,
1751 __M);
1754 extern __inline __m128i
1755 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1756 _mm_maskz_cvtsepi32_epi16 (__mmask8 __M, __m128i __A)
1758 return (__m128i) __builtin_ia32_pmovsdw128_mask ((__v4si) __A,
1759 (__v8hi)
1760 _mm_setzero_si128 (),
1761 __M);
1764 extern __inline __m128i
1765 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1766 _mm256_cvtsepi32_epi16 (__m256i __A)
1768 return (__m128i) __builtin_ia32_pmovsdw256_mask ((__v8si) __A,
1769 (__v8hi)
1770 _mm_undefined_si128 (),
1771 (__mmask8) -1);
1774 extern __inline void
1775 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1776 _mm256_mask_cvtsepi32_storeu_epi16 (void * __P, __mmask8 __M, __m256i __A)
1778 __builtin_ia32_pmovsdw256mem_mask ((__v8hi *) __P, (__v8si) __A, __M);
1781 extern __inline __m128i
1782 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1783 _mm256_mask_cvtsepi32_epi16 (__m128i __O, __mmask8 __M, __m256i __A)
1785 return (__m128i) __builtin_ia32_pmovsdw256_mask ((__v8si) __A,
1786 (__v8hi) __O, __M);
1789 extern __inline __m128i
1790 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1791 _mm256_maskz_cvtsepi32_epi16 (__mmask8 __M, __m256i __A)
1793 return (__m128i) __builtin_ia32_pmovsdw256_mask ((__v8si) __A,
1794 (__v8hi)
1795 _mm_setzero_si128 (),
1796 __M);
1799 extern __inline __m128i
1800 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1801 _mm_cvtusepi32_epi16 (__m128i __A)
1803 return (__m128i) __builtin_ia32_pmovusdw128_mask ((__v4si) __A,
1804 (__v8hi)
1805 _mm_undefined_si128 (),
1806 (__mmask8) -1);
1809 extern __inline void
1810 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1811 _mm_mask_cvtusepi32_storeu_epi16 (void * __P, __mmask8 __M, __m128i __A)
1813 __builtin_ia32_pmovusdw128mem_mask ((__v8hi *) __P, (__v4si) __A, __M);
1816 extern __inline __m128i
1817 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1818 _mm_mask_cvtusepi32_epi16 (__m128i __O, __mmask8 __M, __m128i __A)
1820 return (__m128i) __builtin_ia32_pmovusdw128_mask ((__v4si) __A,
1821 (__v8hi) __O, __M);
1824 extern __inline __m128i
1825 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1826 _mm_maskz_cvtusepi32_epi16 (__mmask8 __M, __m128i __A)
1828 return (__m128i) __builtin_ia32_pmovusdw128_mask ((__v4si) __A,
1829 (__v8hi)
1830 _mm_setzero_si128 (),
1831 __M);
1834 extern __inline __m128i
1835 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1836 _mm256_cvtusepi32_epi16 (__m256i __A)
1838 return (__m128i) __builtin_ia32_pmovusdw256_mask ((__v8si) __A,
1839 (__v8hi)
1840 _mm_undefined_si128 (),
1841 (__mmask8) -1);
1844 extern __inline void
1845 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1846 _mm256_mask_cvtusepi32_storeu_epi16 (void * __P, __mmask8 __M, __m256i __A)
1848 __builtin_ia32_pmovusdw256mem_mask ((__v8hi *) __P, (__v8si) __A, __M);
1851 extern __inline __m128i
1852 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1853 _mm256_mask_cvtusepi32_epi16 (__m128i __O, __mmask8 __M, __m256i __A)
1855 return (__m128i) __builtin_ia32_pmovusdw256_mask ((__v8si) __A,
1856 (__v8hi) __O, __M);
1859 extern __inline __m128i
1860 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1861 _mm256_maskz_cvtusepi32_epi16 (__mmask8 __M, __m256i __A)
1863 return (__m128i) __builtin_ia32_pmovusdw256_mask ((__v8si) __A,
1864 (__v8hi)
1865 _mm_setzero_si128 (),
1866 __M);
1869 extern __inline __m128i
1870 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1871 _mm_cvtepi64_epi8 (__m128i __A)
1873 return (__m128i) __builtin_ia32_pmovqb128_mask ((__v2di) __A,
1874 (__v16qi)
1875 _mm_undefined_si128 (),
1876 (__mmask8) -1);
1879 extern __inline void
1880 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1881 _mm_mask_cvtepi64_storeu_epi8 (void * __P, __mmask8 __M, __m128i __A)
1883 __builtin_ia32_pmovqb128mem_mask ((__v16qi *) __P, (__v2di) __A, __M);
1886 extern __inline __m128i
1887 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1888 _mm_mask_cvtepi64_epi8 (__m128i __O, __mmask8 __M, __m128i __A)
1890 return (__m128i) __builtin_ia32_pmovqb128_mask ((__v2di) __A,
1891 (__v16qi) __O, __M);
1894 extern __inline __m128i
1895 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1896 _mm_maskz_cvtepi64_epi8 (__mmask8 __M, __m128i __A)
1898 return (__m128i) __builtin_ia32_pmovqb128_mask ((__v2di) __A,
1899 (__v16qi)
1900 _mm_setzero_si128 (),
1901 __M);
1904 extern __inline __m128i
1905 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1906 _mm256_cvtepi64_epi8 (__m256i __A)
1908 return (__m128i) __builtin_ia32_pmovqb256_mask ((__v4di) __A,
1909 (__v16qi)
1910 _mm_undefined_si128 (),
1911 (__mmask8) -1);
1914 extern __inline void
1915 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1916 _mm256_mask_cvtepi64_storeu_epi8 (void * __P, __mmask8 __M, __m256i __A)
1918 __builtin_ia32_pmovqb256mem_mask ((__v16qi *) __P, (__v4di) __A, __M);
1921 extern __inline __m128i
1922 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1923 _mm256_mask_cvtepi64_epi8 (__m128i __O, __mmask8 __M, __m256i __A)
1925 return (__m128i) __builtin_ia32_pmovqb256_mask ((__v4di) __A,
1926 (__v16qi) __O, __M);
1929 extern __inline __m128i
1930 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1931 _mm256_maskz_cvtepi64_epi8 (__mmask8 __M, __m256i __A)
1933 return (__m128i) __builtin_ia32_pmovqb256_mask ((__v4di) __A,
1934 (__v16qi)
1935 _mm_setzero_si128 (),
1936 __M);
1939 extern __inline __m128i
1940 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1941 _mm_cvtsepi64_epi8 (__m128i __A)
1943 return (__m128i) __builtin_ia32_pmovsqb128_mask ((__v2di) __A,
1944 (__v16qi)
1945 _mm_undefined_si128 (),
1946 (__mmask8) -1);
1949 extern __inline void
1950 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1951 _mm_mask_cvtsepi64_storeu_epi8 (void * __P, __mmask8 __M, __m128i __A)
1953 __builtin_ia32_pmovsqb128mem_mask ((__v16qi *) __P, (__v2di) __A, __M);
1956 extern __inline __m128i
1957 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1958 _mm_mask_cvtsepi64_epi8 (__m128i __O, __mmask8 __M, __m128i __A)
1960 return (__m128i) __builtin_ia32_pmovsqb128_mask ((__v2di) __A,
1961 (__v16qi) __O, __M);
1964 extern __inline __m128i
1965 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1966 _mm_maskz_cvtsepi64_epi8 (__mmask8 __M, __m128i __A)
1968 return (__m128i) __builtin_ia32_pmovsqb128_mask ((__v2di) __A,
1969 (__v16qi)
1970 _mm_setzero_si128 (),
1971 __M);
1974 extern __inline __m128i
1975 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1976 _mm256_cvtsepi64_epi8 (__m256i __A)
1978 return (__m128i) __builtin_ia32_pmovsqb256_mask ((__v4di) __A,
1979 (__v16qi)
1980 _mm_undefined_si128 (),
1981 (__mmask8) -1);
1984 extern __inline void
1985 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1986 _mm256_mask_cvtsepi64_storeu_epi8 (void * __P, __mmask8 __M, __m256i __A)
1988 __builtin_ia32_pmovsqb256mem_mask ((__v16qi *) __P, (__v4di) __A, __M);
1991 extern __inline __m128i
1992 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
1993 _mm256_mask_cvtsepi64_epi8 (__m128i __O, __mmask8 __M, __m256i __A)
1995 return (__m128i) __builtin_ia32_pmovsqb256_mask ((__v4di) __A,
1996 (__v16qi) __O, __M);
1999 extern __inline __m128i
2000 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2001 _mm256_maskz_cvtsepi64_epi8 (__mmask8 __M, __m256i __A)
2003 return (__m128i) __builtin_ia32_pmovsqb256_mask ((__v4di) __A,
2004 (__v16qi)
2005 _mm_setzero_si128 (),
2006 __M);
2009 extern __inline __m128i
2010 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2011 _mm_cvtusepi64_epi8 (__m128i __A)
2013 return (__m128i) __builtin_ia32_pmovusqb128_mask ((__v2di) __A,
2014 (__v16qi)
2015 _mm_undefined_si128 (),
2016 (__mmask8) -1);
2019 extern __inline void
2020 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2021 _mm_mask_cvtusepi64_storeu_epi8 (void * __P, __mmask8 __M, __m128i __A)
2023 __builtin_ia32_pmovusqb128mem_mask ((__v16qi *) __P, (__v2di) __A, __M);
2026 extern __inline __m128i
2027 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2028 _mm_mask_cvtusepi64_epi8 (__m128i __O, __mmask8 __M, __m128i __A)
2030 return (__m128i) __builtin_ia32_pmovusqb128_mask ((__v2di) __A,
2031 (__v16qi) __O,
2032 __M);
2035 extern __inline __m128i
2036 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2037 _mm_maskz_cvtusepi64_epi8 (__mmask8 __M, __m128i __A)
2039 return (__m128i) __builtin_ia32_pmovusqb128_mask ((__v2di) __A,
2040 (__v16qi)
2041 _mm_setzero_si128 (),
2042 __M);
2045 extern __inline __m128i
2046 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2047 _mm256_cvtusepi64_epi8 (__m256i __A)
2049 return (__m128i) __builtin_ia32_pmovusqb256_mask ((__v4di) __A,
2050 (__v16qi)
2051 _mm_undefined_si128 (),
2052 (__mmask8) -1);
2055 extern __inline void
2056 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2057 _mm256_mask_cvtusepi64_storeu_epi8 (void * __P, __mmask8 __M, __m256i __A)
2059 __builtin_ia32_pmovusqb256mem_mask ((__v16qi *) __P, (__v4di) __A, __M);
2062 extern __inline __m128i
2063 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2064 _mm256_mask_cvtusepi64_epi8 (__m128i __O, __mmask8 __M, __m256i __A)
2066 return (__m128i) __builtin_ia32_pmovusqb256_mask ((__v4di) __A,
2067 (__v16qi) __O,
2068 __M);
2071 extern __inline __m128i
2072 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2073 _mm256_maskz_cvtusepi64_epi8 (__mmask8 __M, __m256i __A)
2075 return (__m128i) __builtin_ia32_pmovusqb256_mask ((__v4di) __A,
2076 (__v16qi)
2077 _mm_setzero_si128 (),
2078 __M);
2081 extern __inline __m128i
2082 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2083 _mm_cvtepi64_epi16 (__m128i __A)
2085 return (__m128i) __builtin_ia32_pmovqw128_mask ((__v2di) __A,
2086 (__v8hi)
2087 _mm_undefined_si128 (),
2088 (__mmask8) -1);
2091 extern __inline void
2092 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2093 _mm_mask_cvtepi64_storeu_epi16 (void * __P, __mmask8 __M, __m128i __A)
2095 __builtin_ia32_pmovqw128mem_mask ((__v8hi *) __P, (__v2di) __A, __M);
2098 extern __inline __m128i
2099 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2100 _mm_mask_cvtepi64_epi16 (__m128i __O, __mmask8 __M, __m128i __A)
2102 return (__m128i) __builtin_ia32_pmovqw128_mask ((__v2di) __A,
2103 (__v8hi)__O,
2104 __M);
2107 extern __inline __m128i
2108 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2109 _mm_maskz_cvtepi64_epi16 (__mmask8 __M, __m128i __A)
2111 return (__m128i) __builtin_ia32_pmovqw128_mask ((__v2di) __A,
2112 (__v8hi)
2113 _mm_setzero_si128 (),
2114 __M);
2117 extern __inline __m128i
2118 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2119 _mm256_cvtepi64_epi16 (__m256i __A)
2121 return (__m128i) __builtin_ia32_pmovqw256_mask ((__v4di) __A,
2122 (__v8hi)
2123 _mm_undefined_si128 (),
2124 (__mmask8) -1);
2127 extern __inline void
2128 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2129 _mm256_mask_cvtepi64_storeu_epi16 (void * __P, __mmask8 __M, __m256i __A)
2131 __builtin_ia32_pmovqw256mem_mask ((__v8hi *) __P, (__v4di) __A, __M);
2134 extern __inline __m128i
2135 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2136 _mm256_mask_cvtepi64_epi16 (__m128i __O, __mmask8 __M, __m256i __A)
2138 return (__m128i) __builtin_ia32_pmovqw256_mask ((__v4di) __A,
2139 (__v8hi) __O, __M);
2142 extern __inline __m128i
2143 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2144 _mm256_maskz_cvtepi64_epi16 (__mmask8 __M, __m256i __A)
2146 return (__m128i) __builtin_ia32_pmovqw256_mask ((__v4di) __A,
2147 (__v8hi)
2148 _mm_setzero_si128 (),
2149 __M);
2152 extern __inline __m128i
2153 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2154 _mm_cvtsepi64_epi16 (__m128i __A)
2156 return (__m128i) __builtin_ia32_pmovsqw128_mask ((__v2di) __A,
2157 (__v8hi)
2158 _mm_undefined_si128 (),
2159 (__mmask8) -1);
2162 extern __inline void
2163 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2164 _mm_mask_cvtsepi64_storeu_epi16 (void * __P, __mmask8 __M, __m128i __A)
2166 __builtin_ia32_pmovsqw128mem_mask ((__v8hi *) __P, (__v2di) __A, __M);
2169 extern __inline __m128i
2170 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2171 _mm_mask_cvtsepi64_epi16 (__m128i __O, __mmask8 __M, __m128i __A)
2173 return (__m128i) __builtin_ia32_pmovsqw128_mask ((__v2di) __A,
2174 (__v8hi) __O, __M);
2177 extern __inline __m128i
2178 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2179 _mm_maskz_cvtsepi64_epi16 (__mmask8 __M, __m128i __A)
2181 return (__m128i) __builtin_ia32_pmovsqw128_mask ((__v2di) __A,
2182 (__v8hi)
2183 _mm_setzero_si128 (),
2184 __M);
2187 extern __inline __m128i
2188 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2189 _mm256_cvtsepi64_epi16 (__m256i __A)
2191 return (__m128i) __builtin_ia32_pmovsqw256_mask ((__v4di) __A,
2192 (__v8hi)
2193 _mm_undefined_si128 (),
2194 (__mmask8) -1);
2197 extern __inline void
2198 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2199 _mm256_mask_cvtsepi64_storeu_epi16 (void * __P, __mmask8 __M, __m256i __A)
2201 __builtin_ia32_pmovsqw256mem_mask ((__v8hi *) __P, (__v4di) __A, __M);
2204 extern __inline __m128i
2205 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2206 _mm256_mask_cvtsepi64_epi16 (__m128i __O, __mmask8 __M, __m256i __A)
2208 return (__m128i) __builtin_ia32_pmovsqw256_mask ((__v4di) __A,
2209 (__v8hi) __O, __M);
2212 extern __inline __m128i
2213 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2214 _mm256_maskz_cvtsepi64_epi16 (__mmask8 __M, __m256i __A)
2216 return (__m128i) __builtin_ia32_pmovsqw256_mask ((__v4di) __A,
2217 (__v8hi)
2218 _mm_setzero_si128 (),
2219 __M);
2222 extern __inline __m128i
2223 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2224 _mm_cvtusepi64_epi16 (__m128i __A)
2226 return (__m128i) __builtin_ia32_pmovusqw128_mask ((__v2di) __A,
2227 (__v8hi)
2228 _mm_undefined_si128 (),
2229 (__mmask8) -1);
2232 extern __inline void
2233 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2234 _mm_mask_cvtusepi64_storeu_epi16 (void * __P, __mmask8 __M, __m128i __A)
2236 __builtin_ia32_pmovusqw128mem_mask ((__v8hi *) __P, (__v2di) __A, __M);
2239 extern __inline __m128i
2240 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2241 _mm_mask_cvtusepi64_epi16 (__m128i __O, __mmask8 __M, __m128i __A)
2243 return (__m128i) __builtin_ia32_pmovusqw128_mask ((__v2di) __A,
2244 (__v8hi) __O, __M);
2247 extern __inline __m128i
2248 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2249 _mm_maskz_cvtusepi64_epi16 (__mmask8 __M, __m128i __A)
2251 return (__m128i) __builtin_ia32_pmovusqw128_mask ((__v2di) __A,
2252 (__v8hi)
2253 _mm_setzero_si128 (),
2254 __M);
2257 extern __inline __m128i
2258 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2259 _mm256_cvtusepi64_epi16 (__m256i __A)
2261 return (__m128i) __builtin_ia32_pmovusqw256_mask ((__v4di) __A,
2262 (__v8hi)
2263 _mm_undefined_si128 (),
2264 (__mmask8) -1);
2267 extern __inline void
2268 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2269 _mm256_mask_cvtusepi64_storeu_epi16 (void * __P, __mmask8 __M, __m256i __A)
2271 __builtin_ia32_pmovusqw256mem_mask ((__v8hi *) __P, (__v4di) __A, __M);
2274 extern __inline __m128i
2275 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2276 _mm256_mask_cvtusepi64_epi16 (__m128i __O, __mmask8 __M, __m256i __A)
2278 return (__m128i) __builtin_ia32_pmovusqw256_mask ((__v4di) __A,
2279 (__v8hi) __O, __M);
2282 extern __inline __m128i
2283 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2284 _mm256_maskz_cvtusepi64_epi16 (__mmask8 __M, __m256i __A)
2286 return (__m128i) __builtin_ia32_pmovusqw256_mask ((__v4di) __A,
2287 (__v8hi)
2288 _mm_setzero_si128 (),
2289 __M);
2292 extern __inline __m128i
2293 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2294 _mm_cvtepi64_epi32 (__m128i __A)
2296 return (__m128i) __builtin_ia32_pmovqd128_mask ((__v2di) __A,
2297 (__v4si)
2298 _mm_undefined_si128 (),
2299 (__mmask8) -1);
2302 extern __inline void
2303 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2304 _mm_mask_cvtepi64_storeu_epi32 (void * __P, __mmask8 __M, __m128i __A)
2306 __builtin_ia32_pmovqd128mem_mask ((__v4si *) __P, (__v2di) __A, __M);
2309 extern __inline __m128i
2310 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2311 _mm_mask_cvtepi64_epi32 (__m128i __O, __mmask8 __M, __m128i __A)
2313 return (__m128i) __builtin_ia32_pmovqd128_mask ((__v2di) __A,
2314 (__v4si) __O, __M);
2317 extern __inline __m128i
2318 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2319 _mm_maskz_cvtepi64_epi32 (__mmask8 __M, __m128i __A)
2321 return (__m128i) __builtin_ia32_pmovqd128_mask ((__v2di) __A,
2322 (__v4si)
2323 _mm_setzero_si128 (),
2324 __M);
2327 extern __inline __m128i
2328 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2329 _mm256_cvtepi64_epi32 (__m256i __A)
2331 return (__m128i) __builtin_ia32_pmovqd256_mask ((__v4di) __A,
2332 (__v4si)
2333 _mm_undefined_si128 (),
2334 (__mmask8) -1);
2337 extern __inline void
2338 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2339 _mm256_mask_cvtepi64_storeu_epi32 (void * __P, __mmask8 __M, __m256i __A)
2341 __builtin_ia32_pmovqd256mem_mask ((__v4si *) __P, (__v4di) __A, __M);
2344 extern __inline __m128i
2345 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2346 _mm256_mask_cvtepi64_epi32 (__m128i __O, __mmask8 __M, __m256i __A)
2348 return (__m128i) __builtin_ia32_pmovqd256_mask ((__v4di) __A,
2349 (__v4si) __O, __M);
2352 extern __inline __m128i
2353 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2354 _mm256_maskz_cvtepi64_epi32 (__mmask8 __M, __m256i __A)
2356 return (__m128i) __builtin_ia32_pmovqd256_mask ((__v4di) __A,
2357 (__v4si)
2358 _mm_setzero_si128 (),
2359 __M);
2362 extern __inline __m128i
2363 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2364 _mm_cvtsepi64_epi32 (__m128i __A)
2366 return (__m128i) __builtin_ia32_pmovsqd128_mask ((__v2di) __A,
2367 (__v4si)
2368 _mm_undefined_si128 (),
2369 (__mmask8) -1);
2372 extern __inline void
2373 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2374 _mm_mask_cvtsepi64_storeu_epi32 (void * __P, __mmask8 __M, __m128i __A)
2376 __builtin_ia32_pmovsqd128mem_mask ((__v4si *) __P, (__v2di) __A, __M);
2379 extern __inline __m128i
2380 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2381 _mm_mask_cvtsepi64_epi32 (__m128i __O, __mmask8 __M, __m128i __A)
2383 return (__m128i) __builtin_ia32_pmovsqd128_mask ((__v2di) __A,
2384 (__v4si) __O, __M);
2387 extern __inline __m128i
2388 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2389 _mm_maskz_cvtsepi64_epi32 (__mmask8 __M, __m128i __A)
2391 return (__m128i) __builtin_ia32_pmovsqd128_mask ((__v2di) __A,
2392 (__v4si)
2393 _mm_setzero_si128 (),
2394 __M);
2397 extern __inline __m128i
2398 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2399 _mm256_cvtsepi64_epi32 (__m256i __A)
2401 return (__m128i) __builtin_ia32_pmovsqd256_mask ((__v4di) __A,
2402 (__v4si)
2403 _mm_undefined_si128 (),
2404 (__mmask8) -1);
2407 extern __inline void
2408 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2409 _mm256_mask_cvtsepi64_storeu_epi32 (void * __P, __mmask8 __M, __m256i __A)
2411 __builtin_ia32_pmovsqd256mem_mask ((__v4si *) __P, (__v4di) __A, __M);
2414 extern __inline __m128i
2415 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2416 _mm256_mask_cvtsepi64_epi32 (__m128i __O, __mmask8 __M, __m256i __A)
2418 return (__m128i) __builtin_ia32_pmovsqd256_mask ((__v4di) __A,
2419 (__v4si)__O,
2420 __M);
2423 extern __inline __m128i
2424 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2425 _mm256_maskz_cvtsepi64_epi32 (__mmask8 __M, __m256i __A)
2427 return (__m128i) __builtin_ia32_pmovsqd256_mask ((__v4di) __A,
2428 (__v4si)
2429 _mm_setzero_si128 (),
2430 __M);
2433 extern __inline __m128i
2434 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2435 _mm_cvtusepi64_epi32 (__m128i __A)
2437 return (__m128i) __builtin_ia32_pmovusqd128_mask ((__v2di) __A,
2438 (__v4si)
2439 _mm_undefined_si128 (),
2440 (__mmask8) -1);
2443 extern __inline void
2444 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2445 _mm_mask_cvtusepi64_storeu_epi32 (void * __P, __mmask8 __M, __m128i __A)
2447 __builtin_ia32_pmovusqd128mem_mask ((__v4si *) __P, (__v2di) __A, __M);
2450 extern __inline __m128i
2451 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2452 _mm_mask_cvtusepi64_epi32 (__m128i __O, __mmask8 __M, __m128i __A)
2454 return (__m128i) __builtin_ia32_pmovusqd128_mask ((__v2di) __A,
2455 (__v4si) __O, __M);
2458 extern __inline __m128i
2459 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2460 _mm_maskz_cvtusepi64_epi32 (__mmask8 __M, __m128i __A)
2462 return (__m128i) __builtin_ia32_pmovusqd128_mask ((__v2di) __A,
2463 (__v4si)
2464 _mm_setzero_si128 (),
2465 __M);
2468 extern __inline __m128i
2469 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2470 _mm256_cvtusepi64_epi32 (__m256i __A)
2472 return (__m128i) __builtin_ia32_pmovusqd256_mask ((__v4di) __A,
2473 (__v4si)
2474 _mm_undefined_si128 (),
2475 (__mmask8) -1);
2478 extern __inline void
2479 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2480 _mm256_mask_cvtusepi64_storeu_epi32 (void * __P, __mmask8 __M, __m256i __A)
2482 __builtin_ia32_pmovusqd256mem_mask ((__v4si *) __P, (__v4di) __A, __M);
2485 extern __inline __m128i
2486 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2487 _mm256_mask_cvtusepi64_epi32 (__m128i __O, __mmask8 __M, __m256i __A)
2489 return (__m128i) __builtin_ia32_pmovusqd256_mask ((__v4di) __A,
2490 (__v4si) __O, __M);
2493 extern __inline __m128i
2494 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2495 _mm256_maskz_cvtusepi64_epi32 (__mmask8 __M, __m256i __A)
2497 return (__m128i) __builtin_ia32_pmovusqd256_mask ((__v4di) __A,
2498 (__v4si)
2499 _mm_setzero_si128 (),
2500 __M);
2503 extern __inline __m256
2504 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2505 _mm256_mask_broadcastss_ps (__m256 __O, __mmask8 __M, __m128 __A)
2507 return (__m256) __builtin_ia32_broadcastss256_mask ((__v4sf) __A,
2508 (__v8sf) __O,
2509 __M);
2512 extern __inline __m256
2513 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2514 _mm256_maskz_broadcastss_ps (__mmask8 __M, __m128 __A)
2516 return (__m256) __builtin_ia32_broadcastss256_mask ((__v4sf) __A,
2517 (__v8sf)
2518 _mm256_setzero_ps (),
2519 __M);
2522 extern __inline __m128
2523 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2524 _mm_mask_broadcastss_ps (__m128 __O, __mmask8 __M, __m128 __A)
2526 return (__m128) __builtin_ia32_broadcastss128_mask ((__v4sf) __A,
2527 (__v4sf) __O,
2528 __M);
2531 extern __inline __m128
2532 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2533 _mm_maskz_broadcastss_ps (__mmask8 __M, __m128 __A)
2535 return (__m128) __builtin_ia32_broadcastss128_mask ((__v4sf) __A,
2536 (__v4sf)
2537 _mm_setzero_ps (),
2538 __M);
2541 extern __inline __m256d
2542 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2543 _mm256_mask_broadcastsd_pd (__m256d __O, __mmask8 __M, __m128d __A)
2545 return (__m256d) __builtin_ia32_broadcastsd256_mask ((__v2df) __A,
2546 (__v4df) __O,
2547 __M);
2550 extern __inline __m256d
2551 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2552 _mm256_maskz_broadcastsd_pd (__mmask8 __M, __m128d __A)
2554 return (__m256d) __builtin_ia32_broadcastsd256_mask ((__v2df) __A,
2555 (__v4df)
2556 _mm256_setzero_pd (),
2557 __M);
2560 extern __inline __m256i
2561 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2562 _mm256_mask_broadcastd_epi32 (__m256i __O, __mmask8 __M, __m128i __A)
2564 return (__m256i) __builtin_ia32_pbroadcastd256_mask ((__v4si) __A,
2565 (__v8si) __O,
2566 __M);
2569 extern __inline __m256i
2570 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2571 _mm256_maskz_broadcastd_epi32 (__mmask8 __M, __m128i __A)
2573 return (__m256i) __builtin_ia32_pbroadcastd256_mask ((__v4si) __A,
2574 (__v8si)
2575 _mm256_setzero_si256 (),
2576 __M);
2579 extern __inline __m256i
2580 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2581 _mm256_mask_set1_epi32 (__m256i __O, __mmask8 __M, int __A)
2583 return (__m256i) __builtin_ia32_pbroadcastd256_gpr_mask (__A, (__v8si) __O,
2584 __M);
2587 extern __inline __m256i
2588 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2589 _mm256_maskz_set1_epi32 (__mmask8 __M, int __A)
2591 return (__m256i) __builtin_ia32_pbroadcastd256_gpr_mask (__A,
2592 (__v8si)
2593 _mm256_setzero_si256 (),
2594 __M);
2597 extern __inline __m128i
2598 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2599 _mm_mask_broadcastd_epi32 (__m128i __O, __mmask8 __M, __m128i __A)
2601 return (__m128i) __builtin_ia32_pbroadcastd128_mask ((__v4si) __A,
2602 (__v4si) __O,
2603 __M);
2606 extern __inline __m128i
2607 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2608 _mm_maskz_broadcastd_epi32 (__mmask8 __M, __m128i __A)
2610 return (__m128i) __builtin_ia32_pbroadcastd128_mask ((__v4si) __A,
2611 (__v4si)
2612 _mm_setzero_si128 (),
2613 __M);
2616 extern __inline __m128i
2617 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2618 _mm_mask_set1_epi32 (__m128i __O, __mmask8 __M, int __A)
2620 return (__m128i) __builtin_ia32_pbroadcastd128_gpr_mask (__A, (__v4si) __O,
2621 __M);
2624 extern __inline __m128i
2625 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2626 _mm_maskz_set1_epi32 (__mmask8 __M, int __A)
2628 return (__m128i)
2629 __builtin_ia32_pbroadcastd128_gpr_mask (__A,
2630 (__v4si) _mm_setzero_si128 (),
2631 __M);
2634 extern __inline __m256i
2635 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2636 _mm256_mask_broadcastq_epi64 (__m256i __O, __mmask8 __M, __m128i __A)
2638 return (__m256i) __builtin_ia32_pbroadcastq256_mask ((__v2di) __A,
2639 (__v4di) __O,
2640 __M);
2643 extern __inline __m256i
2644 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2645 _mm256_maskz_broadcastq_epi64 (__mmask8 __M, __m128i __A)
2647 return (__m256i) __builtin_ia32_pbroadcastq256_mask ((__v2di) __A,
2648 (__v4di)
2649 _mm256_setzero_si256 (),
2650 __M);
2653 extern __inline __m256i
2654 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2655 _mm256_mask_set1_epi64 (__m256i __O, __mmask8 __M, long long __A)
2657 return (__m256i) __builtin_ia32_pbroadcastq256_gpr_mask (__A, (__v4di) __O,
2658 __M);
2661 extern __inline __m256i
2662 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2663 _mm256_maskz_set1_epi64 (__mmask8 __M, long long __A)
2665 return (__m256i) __builtin_ia32_pbroadcastq256_gpr_mask (__A,
2666 (__v4di)
2667 _mm256_setzero_si256 (),
2668 __M);
2671 extern __inline __m128i
2672 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2673 _mm_mask_broadcastq_epi64 (__m128i __O, __mmask8 __M, __m128i __A)
2675 return (__m128i) __builtin_ia32_pbroadcastq128_mask ((__v2di) __A,
2676 (__v2di) __O,
2677 __M);
2680 extern __inline __m128i
2681 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2682 _mm_maskz_broadcastq_epi64 (__mmask8 __M, __m128i __A)
2684 return (__m128i) __builtin_ia32_pbroadcastq128_mask ((__v2di) __A,
2685 (__v2di)
2686 _mm_setzero_si128 (),
2687 __M);
2690 extern __inline __m128i
2691 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2692 _mm_mask_set1_epi64 (__m128i __O, __mmask8 __M, long long __A)
2694 return (__m128i) __builtin_ia32_pbroadcastq128_gpr_mask (__A, (__v2di) __O,
2695 __M);
2698 extern __inline __m128i
2699 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2700 _mm_maskz_set1_epi64 (__mmask8 __M, long long __A)
2702 return (__m128i)
2703 __builtin_ia32_pbroadcastq128_gpr_mask (__A,
2704 (__v2di) _mm_setzero_si128 (),
2705 __M);
2708 extern __inline __m256
2709 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2710 _mm256_broadcast_f32x4 (__m128 __A)
2712 return (__m256) __builtin_ia32_broadcastf32x4_256_mask ((__v4sf) __A,
2713 (__v8sf)_mm256_undefined_pd (),
2714 (__mmask8) -1);
2717 extern __inline __m256
2718 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2719 _mm256_mask_broadcast_f32x4 (__m256 __O, __mmask8 __M, __m128 __A)
2721 return (__m256) __builtin_ia32_broadcastf32x4_256_mask ((__v4sf) __A,
2722 (__v8sf) __O,
2723 __M);
2726 extern __inline __m256
2727 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2728 _mm256_maskz_broadcast_f32x4 (__mmask8 __M, __m128 __A)
2730 return (__m256) __builtin_ia32_broadcastf32x4_256_mask ((__v4sf) __A,
2731 (__v8sf)
2732 _mm256_setzero_ps (),
2733 __M);
2736 extern __inline __m256i
2737 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2738 _mm256_broadcast_i32x4 (__m128i __A)
2740 return (__m256i) __builtin_ia32_broadcasti32x4_256_mask ((__v4si)
2741 __A,
2742 (__v8si)_mm256_undefined_si256 (),
2743 (__mmask8) -1);
2746 extern __inline __m256i
2747 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2748 _mm256_mask_broadcast_i32x4 (__m256i __O, __mmask8 __M, __m128i __A)
2750 return (__m256i) __builtin_ia32_broadcasti32x4_256_mask ((__v4si)
2751 __A,
2752 (__v8si)
2753 __O, __M);
2756 extern __inline __m256i
2757 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2758 _mm256_maskz_broadcast_i32x4 (__mmask8 __M, __m128i __A)
2760 return (__m256i) __builtin_ia32_broadcasti32x4_256_mask ((__v4si)
2761 __A,
2762 (__v8si)
2763 _mm256_setzero_si256 (),
2764 __M);
2767 extern __inline __m256i
2768 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2769 _mm256_mask_cvtepi8_epi32 (__m256i __W, __mmask8 __U, __m128i __A)
2771 return (__m256i) __builtin_ia32_pmovsxbd256_mask ((__v16qi) __A,
2772 (__v8si) __W,
2773 (__mmask8) __U);
2776 extern __inline __m256i
2777 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2778 _mm256_maskz_cvtepi8_epi32 (__mmask8 __U, __m128i __A)
2780 return (__m256i) __builtin_ia32_pmovsxbd256_mask ((__v16qi) __A,
2781 (__v8si)
2782 _mm256_setzero_si256 (),
2783 (__mmask8) __U);
2786 extern __inline __m128i
2787 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2788 _mm_mask_cvtepi8_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
2790 return (__m128i) __builtin_ia32_pmovsxbd128_mask ((__v16qi) __A,
2791 (__v4si) __W,
2792 (__mmask8) __U);
2795 extern __inline __m128i
2796 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2797 _mm_maskz_cvtepi8_epi32 (__mmask8 __U, __m128i __A)
2799 return (__m128i) __builtin_ia32_pmovsxbd128_mask ((__v16qi) __A,
2800 (__v4si)
2801 _mm_setzero_si128 (),
2802 (__mmask8) __U);
2805 extern __inline __m256i
2806 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2807 _mm256_mask_cvtepi8_epi64 (__m256i __W, __mmask8 __U, __m128i __A)
2809 return (__m256i) __builtin_ia32_pmovsxbq256_mask ((__v16qi) __A,
2810 (__v4di) __W,
2811 (__mmask8) __U);
2814 extern __inline __m256i
2815 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2816 _mm256_maskz_cvtepi8_epi64 (__mmask8 __U, __m128i __A)
2818 return (__m256i) __builtin_ia32_pmovsxbq256_mask ((__v16qi) __A,
2819 (__v4di)
2820 _mm256_setzero_si256 (),
2821 (__mmask8) __U);
2824 extern __inline __m128i
2825 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2826 _mm_mask_cvtepi8_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
2828 return (__m128i) __builtin_ia32_pmovsxbq128_mask ((__v16qi) __A,
2829 (__v2di) __W,
2830 (__mmask8) __U);
2833 extern __inline __m128i
2834 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2835 _mm_maskz_cvtepi8_epi64 (__mmask8 __U, __m128i __A)
2837 return (__m128i) __builtin_ia32_pmovsxbq128_mask ((__v16qi) __A,
2838 (__v2di)
2839 _mm_setzero_si128 (),
2840 (__mmask8) __U);
2843 extern __inline __m256i
2844 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2845 _mm256_mask_cvtepi16_epi32 (__m256i __W, __mmask8 __U, __m128i __A)
2847 return (__m256i) __builtin_ia32_pmovsxwd256_mask ((__v8hi) __A,
2848 (__v8si) __W,
2849 (__mmask8) __U);
2852 extern __inline __m256i
2853 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2854 _mm256_maskz_cvtepi16_epi32 (__mmask8 __U, __m128i __A)
2856 return (__m256i) __builtin_ia32_pmovsxwd256_mask ((__v8hi) __A,
2857 (__v8si)
2858 _mm256_setzero_si256 (),
2859 (__mmask8) __U);
2862 extern __inline __m128i
2863 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2864 _mm_mask_cvtepi16_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
2866 return (__m128i) __builtin_ia32_pmovsxwd128_mask ((__v8hi) __A,
2867 (__v4si) __W,
2868 (__mmask8) __U);
2871 extern __inline __m128i
2872 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2873 _mm_maskz_cvtepi16_epi32 (__mmask8 __U, __m128i __A)
2875 return (__m128i) __builtin_ia32_pmovsxwd128_mask ((__v8hi) __A,
2876 (__v4si)
2877 _mm_setzero_si128 (),
2878 (__mmask8) __U);
2881 extern __inline __m256i
2882 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2883 _mm256_mask_cvtepi16_epi64 (__m256i __W, __mmask8 __U, __m128i __A)
2885 return (__m256i) __builtin_ia32_pmovsxwq256_mask ((__v8hi) __A,
2886 (__v4di) __W,
2887 (__mmask8) __U);
2890 extern __inline __m256i
2891 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2892 _mm256_maskz_cvtepi16_epi64 (__mmask8 __U, __m128i __A)
2894 return (__m256i) __builtin_ia32_pmovsxwq256_mask ((__v8hi) __A,
2895 (__v4di)
2896 _mm256_setzero_si256 (),
2897 (__mmask8) __U);
2900 extern __inline __m128i
2901 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2902 _mm_mask_cvtepi16_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
2904 return (__m128i) __builtin_ia32_pmovsxwq128_mask ((__v8hi) __A,
2905 (__v2di) __W,
2906 (__mmask8) __U);
2909 extern __inline __m128i
2910 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2911 _mm_maskz_cvtepi16_epi64 (__mmask8 __U, __m128i __A)
2913 return (__m128i) __builtin_ia32_pmovsxwq128_mask ((__v8hi) __A,
2914 (__v2di)
2915 _mm_setzero_si128 (),
2916 (__mmask8) __U);
2919 extern __inline __m256i
2920 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2921 _mm256_mask_cvtepi32_epi64 (__m256i __W, __mmask8 __U, __m128i __X)
2923 return (__m256i) __builtin_ia32_pmovsxdq256_mask ((__v4si) __X,
2924 (__v4di) __W,
2925 (__mmask8) __U);
2928 extern __inline __m256i
2929 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2930 _mm256_maskz_cvtepi32_epi64 (__mmask8 __U, __m128i __X)
2932 return (__m256i) __builtin_ia32_pmovsxdq256_mask ((__v4si) __X,
2933 (__v4di)
2934 _mm256_setzero_si256 (),
2935 (__mmask8) __U);
2938 extern __inline __m128i
2939 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2940 _mm_mask_cvtepi32_epi64 (__m128i __W, __mmask8 __U, __m128i __X)
2942 return (__m128i) __builtin_ia32_pmovsxdq128_mask ((__v4si) __X,
2943 (__v2di) __W,
2944 (__mmask8) __U);
2947 extern __inline __m128i
2948 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2949 _mm_maskz_cvtepi32_epi64 (__mmask8 __U, __m128i __X)
2951 return (__m128i) __builtin_ia32_pmovsxdq128_mask ((__v4si) __X,
2952 (__v2di)
2953 _mm_setzero_si128 (),
2954 (__mmask8) __U);
2957 extern __inline __m256i
2958 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2959 _mm256_mask_cvtepu8_epi32 (__m256i __W, __mmask8 __U, __m128i __A)
2961 return (__m256i) __builtin_ia32_pmovzxbd256_mask ((__v16qi) __A,
2962 (__v8si) __W,
2963 (__mmask8) __U);
2966 extern __inline __m256i
2967 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2968 _mm256_maskz_cvtepu8_epi32 (__mmask8 __U, __m128i __A)
2970 return (__m256i) __builtin_ia32_pmovzxbd256_mask ((__v16qi) __A,
2971 (__v8si)
2972 _mm256_setzero_si256 (),
2973 (__mmask8) __U);
2976 extern __inline __m128i
2977 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2978 _mm_mask_cvtepu8_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
2980 return (__m128i) __builtin_ia32_pmovzxbd128_mask ((__v16qi) __A,
2981 (__v4si) __W,
2982 (__mmask8) __U);
2985 extern __inline __m128i
2986 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2987 _mm_maskz_cvtepu8_epi32 (__mmask8 __U, __m128i __A)
2989 return (__m128i) __builtin_ia32_pmovzxbd128_mask ((__v16qi) __A,
2990 (__v4si)
2991 _mm_setzero_si128 (),
2992 (__mmask8) __U);
2995 extern __inline __m256i
2996 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
2997 _mm256_mask_cvtepu8_epi64 (__m256i __W, __mmask8 __U, __m128i __A)
2999 return (__m256i) __builtin_ia32_pmovzxbq256_mask ((__v16qi) __A,
3000 (__v4di) __W,
3001 (__mmask8) __U);
3004 extern __inline __m256i
3005 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3006 _mm256_maskz_cvtepu8_epi64 (__mmask8 __U, __m128i __A)
3008 return (__m256i) __builtin_ia32_pmovzxbq256_mask ((__v16qi) __A,
3009 (__v4di)
3010 _mm256_setzero_si256 (),
3011 (__mmask8) __U);
3014 extern __inline __m128i
3015 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3016 _mm_mask_cvtepu8_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
3018 return (__m128i) __builtin_ia32_pmovzxbq128_mask ((__v16qi) __A,
3019 (__v2di) __W,
3020 (__mmask8) __U);
3023 extern __inline __m128i
3024 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3025 _mm_maskz_cvtepu8_epi64 (__mmask8 __U, __m128i __A)
3027 return (__m128i) __builtin_ia32_pmovzxbq128_mask ((__v16qi) __A,
3028 (__v2di)
3029 _mm_setzero_si128 (),
3030 (__mmask8) __U);
3033 extern __inline __m256i
3034 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3035 _mm256_mask_cvtepu16_epi32 (__m256i __W, __mmask8 __U, __m128i __A)
3037 return (__m256i) __builtin_ia32_pmovzxwd256_mask ((__v8hi) __A,
3038 (__v8si) __W,
3039 (__mmask8) __U);
3042 extern __inline __m256i
3043 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3044 _mm256_maskz_cvtepu16_epi32 (__mmask8 __U, __m128i __A)
3046 return (__m256i) __builtin_ia32_pmovzxwd256_mask ((__v8hi) __A,
3047 (__v8si)
3048 _mm256_setzero_si256 (),
3049 (__mmask8) __U);
3052 extern __inline __m128i
3053 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3054 _mm_mask_cvtepu16_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
3056 return (__m128i) __builtin_ia32_pmovzxwd128_mask ((__v8hi) __A,
3057 (__v4si) __W,
3058 (__mmask8) __U);
3061 extern __inline __m128i
3062 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3063 _mm_maskz_cvtepu16_epi32 (__mmask8 __U, __m128i __A)
3065 return (__m128i) __builtin_ia32_pmovzxwd128_mask ((__v8hi) __A,
3066 (__v4si)
3067 _mm_setzero_si128 (),
3068 (__mmask8) __U);
3071 extern __inline __m256i
3072 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3073 _mm256_mask_cvtepu16_epi64 (__m256i __W, __mmask8 __U, __m128i __A)
3075 return (__m256i) __builtin_ia32_pmovzxwq256_mask ((__v8hi) __A,
3076 (__v4di) __W,
3077 (__mmask8) __U);
3080 extern __inline __m256i
3081 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3082 _mm256_maskz_cvtepu16_epi64 (__mmask8 __U, __m128i __A)
3084 return (__m256i) __builtin_ia32_pmovzxwq256_mask ((__v8hi) __A,
3085 (__v4di)
3086 _mm256_setzero_si256 (),
3087 (__mmask8) __U);
3090 extern __inline __m128i
3091 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3092 _mm_mask_cvtepu16_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
3094 return (__m128i) __builtin_ia32_pmovzxwq128_mask ((__v8hi) __A,
3095 (__v2di) __W,
3096 (__mmask8) __U);
3099 extern __inline __m128i
3100 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3101 _mm_maskz_cvtepu16_epi64 (__mmask8 __U, __m128i __A)
3103 return (__m128i) __builtin_ia32_pmovzxwq128_mask ((__v8hi) __A,
3104 (__v2di)
3105 _mm_setzero_si128 (),
3106 (__mmask8) __U);
3109 extern __inline __m256i
3110 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3111 _mm256_mask_cvtepu32_epi64 (__m256i __W, __mmask8 __U, __m128i __X)
3113 return (__m256i) __builtin_ia32_pmovzxdq256_mask ((__v4si) __X,
3114 (__v4di) __W,
3115 (__mmask8) __U);
3118 extern __inline __m256i
3119 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3120 _mm256_maskz_cvtepu32_epi64 (__mmask8 __U, __m128i __X)
3122 return (__m256i) __builtin_ia32_pmovzxdq256_mask ((__v4si) __X,
3123 (__v4di)
3124 _mm256_setzero_si256 (),
3125 (__mmask8) __U);
3128 extern __inline __m128i
3129 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3130 _mm_mask_cvtepu32_epi64 (__m128i __W, __mmask8 __U, __m128i __X)
3132 return (__m128i) __builtin_ia32_pmovzxdq128_mask ((__v4si) __X,
3133 (__v2di) __W,
3134 (__mmask8) __U);
3137 extern __inline __m128i
3138 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3139 _mm_maskz_cvtepu32_epi64 (__mmask8 __U, __m128i __X)
3141 return (__m128i) __builtin_ia32_pmovzxdq128_mask ((__v4si) __X,
3142 (__v2di)
3143 _mm_setzero_si128 (),
3144 (__mmask8) __U);
3147 extern __inline __m256d
3148 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3149 _mm256_rcp14_pd (__m256d __A)
3151 return (__m256d) __builtin_ia32_rcp14pd256_mask ((__v4df) __A,
3152 (__v4df)
3153 _mm256_setzero_pd (),
3154 (__mmask8) -1);
3157 extern __inline __m256d
3158 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3159 _mm256_mask_rcp14_pd (__m256d __W, __mmask8 __U, __m256d __A)
3161 return (__m256d) __builtin_ia32_rcp14pd256_mask ((__v4df) __A,
3162 (__v4df) __W,
3163 (__mmask8) __U);
3166 extern __inline __m256d
3167 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3168 _mm256_maskz_rcp14_pd (__mmask8 __U, __m256d __A)
3170 return (__m256d) __builtin_ia32_rcp14pd256_mask ((__v4df) __A,
3171 (__v4df)
3172 _mm256_setzero_pd (),
3173 (__mmask8) __U);
3176 extern __inline __m128d
3177 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3178 _mm_rcp14_pd (__m128d __A)
3180 return (__m128d) __builtin_ia32_rcp14pd128_mask ((__v2df) __A,
3181 (__v2df)
3182 _mm_setzero_pd (),
3183 (__mmask8) -1);
3186 extern __inline __m128d
3187 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3188 _mm_mask_rcp14_pd (__m128d __W, __mmask8 __U, __m128d __A)
3190 return (__m128d) __builtin_ia32_rcp14pd128_mask ((__v2df) __A,
3191 (__v2df) __W,
3192 (__mmask8) __U);
3195 extern __inline __m128d
3196 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3197 _mm_maskz_rcp14_pd (__mmask8 __U, __m128d __A)
3199 return (__m128d) __builtin_ia32_rcp14pd128_mask ((__v2df) __A,
3200 (__v2df)
3201 _mm_setzero_pd (),
3202 (__mmask8) __U);
3205 extern __inline __m256
3206 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3207 _mm256_rcp14_ps (__m256 __A)
3209 return (__m256) __builtin_ia32_rcp14ps256_mask ((__v8sf) __A,
3210 (__v8sf)
3211 _mm256_setzero_ps (),
3212 (__mmask8) -1);
3215 extern __inline __m256
3216 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3217 _mm256_mask_rcp14_ps (__m256 __W, __mmask8 __U, __m256 __A)
3219 return (__m256) __builtin_ia32_rcp14ps256_mask ((__v8sf) __A,
3220 (__v8sf) __W,
3221 (__mmask8) __U);
3224 extern __inline __m256
3225 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3226 _mm256_maskz_rcp14_ps (__mmask8 __U, __m256 __A)
3228 return (__m256) __builtin_ia32_rcp14ps256_mask ((__v8sf) __A,
3229 (__v8sf)
3230 _mm256_setzero_ps (),
3231 (__mmask8) __U);
3234 extern __inline __m128
3235 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3236 _mm_rcp14_ps (__m128 __A)
3238 return (__m128) __builtin_ia32_rcp14ps128_mask ((__v4sf) __A,
3239 (__v4sf)
3240 _mm_setzero_ps (),
3241 (__mmask8) -1);
3244 extern __inline __m128
3245 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3246 _mm_mask_rcp14_ps (__m128 __W, __mmask8 __U, __m128 __A)
3248 return (__m128) __builtin_ia32_rcp14ps128_mask ((__v4sf) __A,
3249 (__v4sf) __W,
3250 (__mmask8) __U);
3253 extern __inline __m128
3254 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3255 _mm_maskz_rcp14_ps (__mmask8 __U, __m128 __A)
3257 return (__m128) __builtin_ia32_rcp14ps128_mask ((__v4sf) __A,
3258 (__v4sf)
3259 _mm_setzero_ps (),
3260 (__mmask8) __U);
3263 extern __inline __m256d
3264 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3265 _mm256_rsqrt14_pd (__m256d __A)
3267 return (__m256d) __builtin_ia32_rsqrt14pd256_mask ((__v4df) __A,
3268 (__v4df)
3269 _mm256_setzero_pd (),
3270 (__mmask8) -1);
3273 extern __inline __m256d
3274 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3275 _mm256_mask_rsqrt14_pd (__m256d __W, __mmask8 __U, __m256d __A)
3277 return (__m256d) __builtin_ia32_rsqrt14pd256_mask ((__v4df) __A,
3278 (__v4df) __W,
3279 (__mmask8) __U);
3282 extern __inline __m256d
3283 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3284 _mm256_maskz_rsqrt14_pd (__mmask8 __U, __m256d __A)
3286 return (__m256d) __builtin_ia32_rsqrt14pd256_mask ((__v4df) __A,
3287 (__v4df)
3288 _mm256_setzero_pd (),
3289 (__mmask8) __U);
3292 extern __inline __m128d
3293 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3294 _mm_rsqrt14_pd (__m128d __A)
3296 return (__m128d) __builtin_ia32_rsqrt14pd128_mask ((__v2df) __A,
3297 (__v2df)
3298 _mm_setzero_pd (),
3299 (__mmask8) -1);
3302 extern __inline __m128d
3303 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3304 _mm_mask_rsqrt14_pd (__m128d __W, __mmask8 __U, __m128d __A)
3306 return (__m128d) __builtin_ia32_rsqrt14pd128_mask ((__v2df) __A,
3307 (__v2df) __W,
3308 (__mmask8) __U);
3311 extern __inline __m128d
3312 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3313 _mm_maskz_rsqrt14_pd (__mmask8 __U, __m128d __A)
3315 return (__m128d) __builtin_ia32_rsqrt14pd128_mask ((__v2df) __A,
3316 (__v2df)
3317 _mm_setzero_pd (),
3318 (__mmask8) __U);
3321 extern __inline __m256
3322 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3323 _mm256_rsqrt14_ps (__m256 __A)
3325 return (__m256) __builtin_ia32_rsqrt14ps256_mask ((__v8sf) __A,
3326 (__v8sf)
3327 _mm256_setzero_ps (),
3328 (__mmask8) -1);
3331 extern __inline __m256
3332 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3333 _mm256_mask_rsqrt14_ps (__m256 __W, __mmask8 __U, __m256 __A)
3335 return (__m256) __builtin_ia32_rsqrt14ps256_mask ((__v8sf) __A,
3336 (__v8sf) __W,
3337 (__mmask8) __U);
3340 extern __inline __m256
3341 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3342 _mm256_maskz_rsqrt14_ps (__mmask8 __U, __m256 __A)
3344 return (__m256) __builtin_ia32_rsqrt14ps256_mask ((__v8sf) __A,
3345 (__v8sf)
3346 _mm256_setzero_ps (),
3347 (__mmask8) __U);
3350 extern __inline __m128
3351 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3352 _mm_rsqrt14_ps (__m128 __A)
3354 return (__m128) __builtin_ia32_rsqrt14ps128_mask ((__v4sf) __A,
3355 (__v4sf)
3356 _mm_setzero_ps (),
3357 (__mmask8) -1);
3360 extern __inline __m128
3361 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3362 _mm_mask_rsqrt14_ps (__m128 __W, __mmask8 __U, __m128 __A)
3364 return (__m128) __builtin_ia32_rsqrt14ps128_mask ((__v4sf) __A,
3365 (__v4sf) __W,
3366 (__mmask8) __U);
3369 extern __inline __m128
3370 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3371 _mm_maskz_rsqrt14_ps (__mmask8 __U, __m128 __A)
3373 return (__m128) __builtin_ia32_rsqrt14ps128_mask ((__v4sf) __A,
3374 (__v4sf)
3375 _mm_setzero_ps (),
3376 (__mmask8) __U);
3379 extern __inline __m256d
3380 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3381 _mm256_mask_sqrt_pd (__m256d __W, __mmask8 __U, __m256d __A)
3383 return (__m256d) __builtin_ia32_sqrtpd256_mask ((__v4df) __A,
3384 (__v4df) __W,
3385 (__mmask8) __U);
3388 extern __inline __m256d
3389 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3390 _mm256_maskz_sqrt_pd (__mmask8 __U, __m256d __A)
3392 return (__m256d) __builtin_ia32_sqrtpd256_mask ((__v4df) __A,
3393 (__v4df)
3394 _mm256_setzero_pd (),
3395 (__mmask8) __U);
3398 extern __inline __m128d
3399 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3400 _mm_mask_sqrt_pd (__m128d __W, __mmask8 __U, __m128d __A)
3402 return (__m128d) __builtin_ia32_sqrtpd128_mask ((__v2df) __A,
3403 (__v2df) __W,
3404 (__mmask8) __U);
3407 extern __inline __m128d
3408 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3409 _mm_maskz_sqrt_pd (__mmask8 __U, __m128d __A)
3411 return (__m128d) __builtin_ia32_sqrtpd128_mask ((__v2df) __A,
3412 (__v2df)
3413 _mm_setzero_pd (),
3414 (__mmask8) __U);
3417 extern __inline __m256
3418 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3419 _mm256_mask_sqrt_ps (__m256 __W, __mmask8 __U, __m256 __A)
3421 return (__m256) __builtin_ia32_sqrtps256_mask ((__v8sf) __A,
3422 (__v8sf) __W,
3423 (__mmask8) __U);
3426 extern __inline __m256
3427 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3428 _mm256_maskz_sqrt_ps (__mmask8 __U, __m256 __A)
3430 return (__m256) __builtin_ia32_sqrtps256_mask ((__v8sf) __A,
3431 (__v8sf)
3432 _mm256_setzero_ps (),
3433 (__mmask8) __U);
3436 extern __inline __m128
3437 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3438 _mm_mask_sqrt_ps (__m128 __W, __mmask8 __U, __m128 __A)
3440 return (__m128) __builtin_ia32_sqrtps128_mask ((__v4sf) __A,
3441 (__v4sf) __W,
3442 (__mmask8) __U);
3445 extern __inline __m128
3446 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3447 _mm_maskz_sqrt_ps (__mmask8 __U, __m128 __A)
3449 return (__m128) __builtin_ia32_sqrtps128_mask ((__v4sf) __A,
3450 (__v4sf)
3451 _mm_setzero_ps (),
3452 (__mmask8) __U);
3455 extern __inline __m256i
3456 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3457 _mm256_mask_add_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
3458 __m256i __B)
3460 return (__m256i) __builtin_ia32_paddd256_mask ((__v8si) __A,
3461 (__v8si) __B,
3462 (__v8si) __W,
3463 (__mmask8) __U);
3466 extern __inline __m256i
3467 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3468 _mm256_maskz_add_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
3470 return (__m256i) __builtin_ia32_paddd256_mask ((__v8si) __A,
3471 (__v8si) __B,
3472 (__v8si)
3473 _mm256_setzero_si256 (),
3474 (__mmask8) __U);
3477 extern __inline __m256i
3478 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3479 _mm256_mask_add_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
3480 __m256i __B)
3482 return (__m256i) __builtin_ia32_paddq256_mask ((__v4di) __A,
3483 (__v4di) __B,
3484 (__v4di) __W,
3485 (__mmask8) __U);
3488 extern __inline __m256i
3489 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3490 _mm256_maskz_add_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
3492 return (__m256i) __builtin_ia32_paddq256_mask ((__v4di) __A,
3493 (__v4di) __B,
3494 (__v4di)
3495 _mm256_setzero_si256 (),
3496 (__mmask8) __U);
3499 extern __inline __m256i
3500 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3501 _mm256_mask_sub_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
3502 __m256i __B)
3504 return (__m256i) __builtin_ia32_psubd256_mask ((__v8si) __A,
3505 (__v8si) __B,
3506 (__v8si) __W,
3507 (__mmask8) __U);
3510 extern __inline __m256i
3511 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3512 _mm256_maskz_sub_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
3514 return (__m256i) __builtin_ia32_psubd256_mask ((__v8si) __A,
3515 (__v8si) __B,
3516 (__v8si)
3517 _mm256_setzero_si256 (),
3518 (__mmask8) __U);
3521 extern __inline __m256i
3522 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3523 _mm256_mask_sub_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
3524 __m256i __B)
3526 return (__m256i) __builtin_ia32_psubq256_mask ((__v4di) __A,
3527 (__v4di) __B,
3528 (__v4di) __W,
3529 (__mmask8) __U);
3532 extern __inline __m256i
3533 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3534 _mm256_maskz_sub_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
3536 return (__m256i) __builtin_ia32_psubq256_mask ((__v4di) __A,
3537 (__v4di) __B,
3538 (__v4di)
3539 _mm256_setzero_si256 (),
3540 (__mmask8) __U);
3543 extern __inline __m128i
3544 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3545 _mm_mask_add_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
3546 __m128i __B)
3548 return (__m128i) __builtin_ia32_paddd128_mask ((__v4si) __A,
3549 (__v4si) __B,
3550 (__v4si) __W,
3551 (__mmask8) __U);
3554 extern __inline __m128i
3555 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3556 _mm_maskz_add_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
3558 return (__m128i) __builtin_ia32_paddd128_mask ((__v4si) __A,
3559 (__v4si) __B,
3560 (__v4si)
3561 _mm_setzero_si128 (),
3562 (__mmask8) __U);
3565 extern __inline __m128i
3566 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3567 _mm_mask_add_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
3568 __m128i __B)
3570 return (__m128i) __builtin_ia32_paddq128_mask ((__v2di) __A,
3571 (__v2di) __B,
3572 (__v2di) __W,
3573 (__mmask8) __U);
3576 extern __inline __m128i
3577 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3578 _mm_maskz_add_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
3580 return (__m128i) __builtin_ia32_paddq128_mask ((__v2di) __A,
3581 (__v2di) __B,
3582 (__v2di)
3583 _mm_setzero_si128 (),
3584 (__mmask8) __U);
3587 extern __inline __m128i
3588 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3589 _mm_mask_sub_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
3590 __m128i __B)
3592 return (__m128i) __builtin_ia32_psubd128_mask ((__v4si) __A,
3593 (__v4si) __B,
3594 (__v4si) __W,
3595 (__mmask8) __U);
3598 extern __inline __m128i
3599 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3600 _mm_maskz_sub_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
3602 return (__m128i) __builtin_ia32_psubd128_mask ((__v4si) __A,
3603 (__v4si) __B,
3604 (__v4si)
3605 _mm_setzero_si128 (),
3606 (__mmask8) __U);
3609 extern __inline __m128i
3610 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3611 _mm_mask_sub_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
3612 __m128i __B)
3614 return (__m128i) __builtin_ia32_psubq128_mask ((__v2di) __A,
3615 (__v2di) __B,
3616 (__v2di) __W,
3617 (__mmask8) __U);
3620 extern __inline __m128i
3621 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3622 _mm_maskz_sub_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
3624 return (__m128i) __builtin_ia32_psubq128_mask ((__v2di) __A,
3625 (__v2di) __B,
3626 (__v2di)
3627 _mm_setzero_si128 (),
3628 (__mmask8) __U);
3631 extern __inline __m256
3632 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3633 _mm256_getexp_ps (__m256 __A)
3635 return (__m256) __builtin_ia32_getexpps256_mask ((__v8sf) __A,
3636 (__v8sf)
3637 _mm256_setzero_ps (),
3638 (__mmask8) -1);
3641 extern __inline __m256
3642 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3643 _mm256_mask_getexp_ps (__m256 __W, __mmask8 __U, __m256 __A)
3645 return (__m256) __builtin_ia32_getexpps256_mask ((__v8sf) __A,
3646 (__v8sf) __W,
3647 (__mmask8) __U);
3650 extern __inline __m256
3651 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3652 _mm256_maskz_getexp_ps (__mmask8 __U, __m256 __A)
3654 return (__m256) __builtin_ia32_getexpps256_mask ((__v8sf) __A,
3655 (__v8sf)
3656 _mm256_setzero_ps (),
3657 (__mmask8) __U);
3660 extern __inline __m256d
3661 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3662 _mm256_getexp_pd (__m256d __A)
3664 return (__m256d) __builtin_ia32_getexppd256_mask ((__v4df) __A,
3665 (__v4df)
3666 _mm256_setzero_pd (),
3667 (__mmask8) -1);
3670 extern __inline __m256d
3671 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3672 _mm256_mask_getexp_pd (__m256d __W, __mmask8 __U, __m256d __A)
3674 return (__m256d) __builtin_ia32_getexppd256_mask ((__v4df) __A,
3675 (__v4df) __W,
3676 (__mmask8) __U);
3679 extern __inline __m256d
3680 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3681 _mm256_maskz_getexp_pd (__mmask8 __U, __m256d __A)
3683 return (__m256d) __builtin_ia32_getexppd256_mask ((__v4df) __A,
3684 (__v4df)
3685 _mm256_setzero_pd (),
3686 (__mmask8) __U);
3689 extern __inline __m128
3690 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3691 _mm_getexp_ps (__m128 __A)
3693 return (__m128) __builtin_ia32_getexpps128_mask ((__v4sf) __A,
3694 (__v4sf)
3695 _mm_setzero_ps (),
3696 (__mmask8) -1);
3699 extern __inline __m128
3700 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3701 _mm_mask_getexp_ps (__m128 __W, __mmask8 __U, __m128 __A)
3703 return (__m128) __builtin_ia32_getexpps128_mask ((__v4sf) __A,
3704 (__v4sf) __W,
3705 (__mmask8) __U);
3708 extern __inline __m128
3709 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3710 _mm_maskz_getexp_ps (__mmask8 __U, __m128 __A)
3712 return (__m128) __builtin_ia32_getexpps128_mask ((__v4sf) __A,
3713 (__v4sf)
3714 _mm_setzero_ps (),
3715 (__mmask8) __U);
3718 extern __inline __m128d
3719 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3720 _mm_getexp_pd (__m128d __A)
3722 return (__m128d) __builtin_ia32_getexppd128_mask ((__v2df) __A,
3723 (__v2df)
3724 _mm_setzero_pd (),
3725 (__mmask8) -1);
3728 extern __inline __m128d
3729 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3730 _mm_mask_getexp_pd (__m128d __W, __mmask8 __U, __m128d __A)
3732 return (__m128d) __builtin_ia32_getexppd128_mask ((__v2df) __A,
3733 (__v2df) __W,
3734 (__mmask8) __U);
3737 extern __inline __m128d
3738 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3739 _mm_maskz_getexp_pd (__mmask8 __U, __m128d __A)
3741 return (__m128d) __builtin_ia32_getexppd128_mask ((__v2df) __A,
3742 (__v2df)
3743 _mm_setzero_pd (),
3744 (__mmask8) __U);
3747 extern __inline __m256i
3748 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3749 _mm256_mask_srl_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
3750 __m128i __B)
3752 return (__m256i) __builtin_ia32_psrld256_mask ((__v8si) __A,
3753 (__v4si) __B,
3754 (__v8si) __W,
3755 (__mmask8) __U);
3758 extern __inline __m256i
3759 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3760 _mm256_maskz_srl_epi32 (__mmask8 __U, __m256i __A, __m128i __B)
3762 return (__m256i) __builtin_ia32_psrld256_mask ((__v8si) __A,
3763 (__v4si) __B,
3764 (__v8si)
3765 _mm256_setzero_si256 (),
3766 (__mmask8) __U);
3769 extern __inline __m128i
3770 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3771 _mm_mask_srl_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
3772 __m128i __B)
3774 return (__m128i) __builtin_ia32_psrld128_mask ((__v4si) __A,
3775 (__v4si) __B,
3776 (__v4si) __W,
3777 (__mmask8) __U);
3780 extern __inline __m128i
3781 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3782 _mm_maskz_srl_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
3784 return (__m128i) __builtin_ia32_psrld128_mask ((__v4si) __A,
3785 (__v4si) __B,
3786 (__v4si)
3787 _mm_setzero_si128 (),
3788 (__mmask8) __U);
3791 extern __inline __m256i
3792 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3793 _mm256_mask_srl_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
3794 __m128i __B)
3796 return (__m256i) __builtin_ia32_psrlq256_mask ((__v4di) __A,
3797 (__v2di) __B,
3798 (__v4di) __W,
3799 (__mmask8) __U);
3802 extern __inline __m256i
3803 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3804 _mm256_maskz_srl_epi64 (__mmask8 __U, __m256i __A, __m128i __B)
3806 return (__m256i) __builtin_ia32_psrlq256_mask ((__v4di) __A,
3807 (__v2di) __B,
3808 (__v4di)
3809 _mm256_setzero_si256 (),
3810 (__mmask8) __U);
3813 extern __inline __m128i
3814 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3815 _mm_mask_srl_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
3816 __m128i __B)
3818 return (__m128i) __builtin_ia32_psrlq128_mask ((__v2di) __A,
3819 (__v2di) __B,
3820 (__v2di) __W,
3821 (__mmask8) __U);
3824 extern __inline __m128i
3825 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3826 _mm_maskz_srl_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
3828 return (__m128i) __builtin_ia32_psrlq128_mask ((__v2di) __A,
3829 (__v2di) __B,
3830 (__v2di)
3831 _mm_setzero_si128 (),
3832 (__mmask8) __U);
3835 extern __inline __m256i
3836 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3837 _mm256_mask_and_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
3838 __m256i __B)
3840 return (__m256i) __builtin_ia32_pandd256_mask ((__v8si) __A,
3841 (__v8si) __B,
3842 (__v8si) __W,
3843 (__mmask8) __U);
3846 extern __inline __m256i
3847 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3848 _mm256_maskz_and_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
3850 return (__m256i) __builtin_ia32_pandd256_mask ((__v8si) __A,
3851 (__v8si) __B,
3852 (__v8si)
3853 _mm256_setzero_si256 (),
3854 (__mmask8) __U);
3857 extern __inline __m256d
3858 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3859 _mm256_scalef_pd (__m256d __A, __m256d __B)
3861 return (__m256d) __builtin_ia32_scalefpd256_mask ((__v4df) __A,
3862 (__v4df) __B,
3863 (__v4df)
3864 _mm256_setzero_pd (),
3865 (__mmask8) -1);
3868 extern __inline __m256d
3869 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3870 _mm256_mask_scalef_pd (__m256d __W, __mmask8 __U, __m256d __A,
3871 __m256d __B)
3873 return (__m256d) __builtin_ia32_scalefpd256_mask ((__v4df) __A,
3874 (__v4df) __B,
3875 (__v4df) __W,
3876 (__mmask8) __U);
3879 extern __inline __m256d
3880 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3881 _mm256_maskz_scalef_pd (__mmask8 __U, __m256d __A, __m256d __B)
3883 return (__m256d) __builtin_ia32_scalefpd256_mask ((__v4df) __A,
3884 (__v4df) __B,
3885 (__v4df)
3886 _mm256_setzero_pd (),
3887 (__mmask8) __U);
3890 extern __inline __m256
3891 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3892 _mm256_scalef_ps (__m256 __A, __m256 __B)
3894 return (__m256) __builtin_ia32_scalefps256_mask ((__v8sf) __A,
3895 (__v8sf) __B,
3896 (__v8sf)
3897 _mm256_setzero_ps (),
3898 (__mmask8) -1);
3901 extern __inline __m256
3902 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3903 _mm256_mask_scalef_ps (__m256 __W, __mmask8 __U, __m256 __A,
3904 __m256 __B)
3906 return (__m256) __builtin_ia32_scalefps256_mask ((__v8sf) __A,
3907 (__v8sf) __B,
3908 (__v8sf) __W,
3909 (__mmask8) __U);
3912 extern __inline __m256
3913 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3914 _mm256_maskz_scalef_ps (__mmask8 __U, __m256 __A, __m256 __B)
3916 return (__m256) __builtin_ia32_scalefps256_mask ((__v8sf) __A,
3917 (__v8sf) __B,
3918 (__v8sf)
3919 _mm256_setzero_ps (),
3920 (__mmask8) __U);
3923 extern __inline __m128d
3924 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3925 _mm_scalef_pd (__m128d __A, __m128d __B)
3927 return (__m128d) __builtin_ia32_scalefpd128_mask ((__v2df) __A,
3928 (__v2df) __B,
3929 (__v2df)
3930 _mm_setzero_pd (),
3931 (__mmask8) -1);
3934 extern __inline __m128d
3935 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3936 _mm_mask_scalef_pd (__m128d __W, __mmask8 __U, __m128d __A,
3937 __m128d __B)
3939 return (__m128d) __builtin_ia32_scalefpd128_mask ((__v2df) __A,
3940 (__v2df) __B,
3941 (__v2df) __W,
3942 (__mmask8) __U);
3945 extern __inline __m128d
3946 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3947 _mm_maskz_scalef_pd (__mmask8 __U, __m128d __A, __m128d __B)
3949 return (__m128d) __builtin_ia32_scalefpd128_mask ((__v2df) __A,
3950 (__v2df) __B,
3951 (__v2df)
3952 _mm_setzero_pd (),
3953 (__mmask8) __U);
3956 extern __inline __m128
3957 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3958 _mm_scalef_ps (__m128 __A, __m128 __B)
3960 return (__m128) __builtin_ia32_scalefps128_mask ((__v4sf) __A,
3961 (__v4sf) __B,
3962 (__v4sf)
3963 _mm_setzero_ps (),
3964 (__mmask8) -1);
3967 extern __inline __m128
3968 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3969 _mm_mask_scalef_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B)
3971 return (__m128) __builtin_ia32_scalefps128_mask ((__v4sf) __A,
3972 (__v4sf) __B,
3973 (__v4sf) __W,
3974 (__mmask8) __U);
3977 extern __inline __m128
3978 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3979 _mm_maskz_scalef_ps (__mmask8 __U, __m128 __A, __m128 __B)
3981 return (__m128) __builtin_ia32_scalefps128_mask ((__v4sf) __A,
3982 (__v4sf) __B,
3983 (__v4sf)
3984 _mm_setzero_ps (),
3985 (__mmask8) __U);
3988 extern __inline __m256d
3989 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
3990 _mm256_mask_fmadd_pd (__m256d __A, __mmask8 __U, __m256d __B,
3991 __m256d __C)
3993 return (__m256d) __builtin_ia32_vfmaddpd256_mask ((__v4df) __A,
3994 (__v4df) __B,
3995 (__v4df) __C,
3996 (__mmask8) __U);
3999 extern __inline __m256d
4000 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4001 _mm256_mask3_fmadd_pd (__m256d __A, __m256d __B, __m256d __C,
4002 __mmask8 __U)
4004 return (__m256d) __builtin_ia32_vfmaddpd256_mask3 ((__v4df) __A,
4005 (__v4df) __B,
4006 (__v4df) __C,
4007 (__mmask8) __U);
4010 extern __inline __m256d
4011 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4012 _mm256_maskz_fmadd_pd (__mmask8 __U, __m256d __A, __m256d __B,
4013 __m256d __C)
4015 return (__m256d) __builtin_ia32_vfmaddpd256_maskz ((__v4df) __A,
4016 (__v4df) __B,
4017 (__v4df) __C,
4018 (__mmask8) __U);
4021 extern __inline __m128d
4022 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4023 _mm_mask_fmadd_pd (__m128d __A, __mmask8 __U, __m128d __B, __m128d __C)
4025 return (__m128d) __builtin_ia32_vfmaddpd128_mask ((__v2df) __A,
4026 (__v2df) __B,
4027 (__v2df) __C,
4028 (__mmask8) __U);
4031 extern __inline __m128d
4032 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4033 _mm_mask3_fmadd_pd (__m128d __A, __m128d __B, __m128d __C,
4034 __mmask8 __U)
4036 return (__m128d) __builtin_ia32_vfmaddpd128_mask3 ((__v2df) __A,
4037 (__v2df) __B,
4038 (__v2df) __C,
4039 (__mmask8) __U);
4042 extern __inline __m128d
4043 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4044 _mm_maskz_fmadd_pd (__mmask8 __U, __m128d __A, __m128d __B,
4045 __m128d __C)
4047 return (__m128d) __builtin_ia32_vfmaddpd128_maskz ((__v2df) __A,
4048 (__v2df) __B,
4049 (__v2df) __C,
4050 (__mmask8) __U);
4053 extern __inline __m256
4054 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4055 _mm256_mask_fmadd_ps (__m256 __A, __mmask8 __U, __m256 __B, __m256 __C)
4057 return (__m256) __builtin_ia32_vfmaddps256_mask ((__v8sf) __A,
4058 (__v8sf) __B,
4059 (__v8sf) __C,
4060 (__mmask8) __U);
4063 extern __inline __m256
4064 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4065 _mm256_mask3_fmadd_ps (__m256 __A, __m256 __B, __m256 __C,
4066 __mmask8 __U)
4068 return (__m256) __builtin_ia32_vfmaddps256_mask3 ((__v8sf) __A,
4069 (__v8sf) __B,
4070 (__v8sf) __C,
4071 (__mmask8) __U);
4074 extern __inline __m256
4075 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4076 _mm256_maskz_fmadd_ps (__mmask8 __U, __m256 __A, __m256 __B,
4077 __m256 __C)
4079 return (__m256) __builtin_ia32_vfmaddps256_maskz ((__v8sf) __A,
4080 (__v8sf) __B,
4081 (__v8sf) __C,
4082 (__mmask8) __U);
4085 extern __inline __m128
4086 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4087 _mm_mask_fmadd_ps (__m128 __A, __mmask8 __U, __m128 __B, __m128 __C)
4089 return (__m128) __builtin_ia32_vfmaddps128_mask ((__v4sf) __A,
4090 (__v4sf) __B,
4091 (__v4sf) __C,
4092 (__mmask8) __U);
4095 extern __inline __m128
4096 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4097 _mm_mask3_fmadd_ps (__m128 __A, __m128 __B, __m128 __C, __mmask8 __U)
4099 return (__m128) __builtin_ia32_vfmaddps128_mask3 ((__v4sf) __A,
4100 (__v4sf) __B,
4101 (__v4sf) __C,
4102 (__mmask8) __U);
4105 extern __inline __m128
4106 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4107 _mm_maskz_fmadd_ps (__mmask8 __U, __m128 __A, __m128 __B, __m128 __C)
4109 return (__m128) __builtin_ia32_vfmaddps128_maskz ((__v4sf) __A,
4110 (__v4sf) __B,
4111 (__v4sf) __C,
4112 (__mmask8) __U);
4115 extern __inline __m256d
4116 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4117 _mm256_mask_fmsub_pd (__m256d __A, __mmask8 __U, __m256d __B,
4118 __m256d __C)
4120 return (__m256d) __builtin_ia32_vfmaddpd256_mask ((__v4df) __A,
4121 (__v4df) __B,
4122 -(__v4df) __C,
4123 (__mmask8) __U);
4126 extern __inline __m256d
4127 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4128 _mm256_mask3_fmsub_pd (__m256d __A, __m256d __B, __m256d __C,
4129 __mmask8 __U)
4131 return (__m256d) __builtin_ia32_vfmsubpd256_mask3 ((__v4df) __A,
4132 (__v4df) __B,
4133 (__v4df) __C,
4134 (__mmask8) __U);
4137 extern __inline __m256d
4138 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4139 _mm256_maskz_fmsub_pd (__mmask8 __U, __m256d __A, __m256d __B,
4140 __m256d __C)
4142 return (__m256d) __builtin_ia32_vfmaddpd256_maskz ((__v4df) __A,
4143 (__v4df) __B,
4144 -(__v4df) __C,
4145 (__mmask8) __U);
4148 extern __inline __m128d
4149 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4150 _mm_mask_fmsub_pd (__m128d __A, __mmask8 __U, __m128d __B, __m128d __C)
4152 return (__m128d) __builtin_ia32_vfmaddpd128_mask ((__v2df) __A,
4153 (__v2df) __B,
4154 -(__v2df) __C,
4155 (__mmask8) __U);
4158 extern __inline __m128d
4159 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4160 _mm_mask3_fmsub_pd (__m128d __A, __m128d __B, __m128d __C,
4161 __mmask8 __U)
4163 return (__m128d) __builtin_ia32_vfmsubpd128_mask3 ((__v2df) __A,
4164 (__v2df) __B,
4165 (__v2df) __C,
4166 (__mmask8) __U);
4169 extern __inline __m128d
4170 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4171 _mm_maskz_fmsub_pd (__mmask8 __U, __m128d __A, __m128d __B,
4172 __m128d __C)
4174 return (__m128d) __builtin_ia32_vfmaddpd128_maskz ((__v2df) __A,
4175 (__v2df) __B,
4176 -(__v2df) __C,
4177 (__mmask8) __U);
4180 extern __inline __m256
4181 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4182 _mm256_mask_fmsub_ps (__m256 __A, __mmask8 __U, __m256 __B, __m256 __C)
4184 return (__m256) __builtin_ia32_vfmaddps256_mask ((__v8sf) __A,
4185 (__v8sf) __B,
4186 -(__v8sf) __C,
4187 (__mmask8) __U);
4190 extern __inline __m256
4191 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4192 _mm256_mask3_fmsub_ps (__m256 __A, __m256 __B, __m256 __C,
4193 __mmask8 __U)
4195 return (__m256) __builtin_ia32_vfmsubps256_mask3 ((__v8sf) __A,
4196 (__v8sf) __B,
4197 (__v8sf) __C,
4198 (__mmask8) __U);
4201 extern __inline __m256
4202 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4203 _mm256_maskz_fmsub_ps (__mmask8 __U, __m256 __A, __m256 __B,
4204 __m256 __C)
4206 return (__m256) __builtin_ia32_vfmaddps256_maskz ((__v8sf) __A,
4207 (__v8sf) __B,
4208 -(__v8sf) __C,
4209 (__mmask8) __U);
4212 extern __inline __m128
4213 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4214 _mm_mask_fmsub_ps (__m128 __A, __mmask8 __U, __m128 __B, __m128 __C)
4216 return (__m128) __builtin_ia32_vfmaddps128_mask ((__v4sf) __A,
4217 (__v4sf) __B,
4218 -(__v4sf) __C,
4219 (__mmask8) __U);
4222 extern __inline __m128
4223 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4224 _mm_mask3_fmsub_ps (__m128 __A, __m128 __B, __m128 __C, __mmask8 __U)
4226 return (__m128) __builtin_ia32_vfmsubps128_mask3 ((__v4sf) __A,
4227 (__v4sf) __B,
4228 (__v4sf) __C,
4229 (__mmask8) __U);
4232 extern __inline __m128
4233 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4234 _mm_maskz_fmsub_ps (__mmask8 __U, __m128 __A, __m128 __B, __m128 __C)
4236 return (__m128) __builtin_ia32_vfmaddps128_maskz ((__v4sf) __A,
4237 (__v4sf) __B,
4238 -(__v4sf) __C,
4239 (__mmask8) __U);
4242 extern __inline __m256d
4243 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4244 _mm256_mask_fmaddsub_pd (__m256d __A, __mmask8 __U, __m256d __B,
4245 __m256d __C)
4247 return (__m256d) __builtin_ia32_vfmaddsubpd256_mask ((__v4df) __A,
4248 (__v4df) __B,
4249 (__v4df) __C,
4250 (__mmask8) __U);
4253 extern __inline __m256d
4254 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4255 _mm256_mask3_fmaddsub_pd (__m256d __A, __m256d __B, __m256d __C,
4256 __mmask8 __U)
4258 return (__m256d) __builtin_ia32_vfmaddsubpd256_mask3 ((__v4df) __A,
4259 (__v4df) __B,
4260 (__v4df) __C,
4261 (__mmask8)
4262 __U);
4265 extern __inline __m256d
4266 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4267 _mm256_maskz_fmaddsub_pd (__mmask8 __U, __m256d __A, __m256d __B,
4268 __m256d __C)
4270 return (__m256d) __builtin_ia32_vfmaddsubpd256_maskz ((__v4df) __A,
4271 (__v4df) __B,
4272 (__v4df) __C,
4273 (__mmask8)
4274 __U);
4277 extern __inline __m128d
4278 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4279 _mm_mask_fmaddsub_pd (__m128d __A, __mmask8 __U, __m128d __B,
4280 __m128d __C)
4282 return (__m128d) __builtin_ia32_vfmaddsubpd128_mask ((__v2df) __A,
4283 (__v2df) __B,
4284 (__v2df) __C,
4285 (__mmask8) __U);
4288 extern __inline __m128d
4289 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4290 _mm_mask3_fmaddsub_pd (__m128d __A, __m128d __B, __m128d __C,
4291 __mmask8 __U)
4293 return (__m128d) __builtin_ia32_vfmaddsubpd128_mask3 ((__v2df) __A,
4294 (__v2df) __B,
4295 (__v2df) __C,
4296 (__mmask8)
4297 __U);
4300 extern __inline __m128d
4301 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4302 _mm_maskz_fmaddsub_pd (__mmask8 __U, __m128d __A, __m128d __B,
4303 __m128d __C)
4305 return (__m128d) __builtin_ia32_vfmaddsubpd128_maskz ((__v2df) __A,
4306 (__v2df) __B,
4307 (__v2df) __C,
4308 (__mmask8)
4309 __U);
4312 extern __inline __m256
4313 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4314 _mm256_mask_fmaddsub_ps (__m256 __A, __mmask8 __U, __m256 __B,
4315 __m256 __C)
4317 return (__m256) __builtin_ia32_vfmaddsubps256_mask ((__v8sf) __A,
4318 (__v8sf) __B,
4319 (__v8sf) __C,
4320 (__mmask8) __U);
4323 extern __inline __m256
4324 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4325 _mm256_mask3_fmaddsub_ps (__m256 __A, __m256 __B, __m256 __C,
4326 __mmask8 __U)
4328 return (__m256) __builtin_ia32_vfmaddsubps256_mask3 ((__v8sf) __A,
4329 (__v8sf) __B,
4330 (__v8sf) __C,
4331 (__mmask8) __U);
4334 extern __inline __m256
4335 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4336 _mm256_maskz_fmaddsub_ps (__mmask8 __U, __m256 __A, __m256 __B,
4337 __m256 __C)
4339 return (__m256) __builtin_ia32_vfmaddsubps256_maskz ((__v8sf) __A,
4340 (__v8sf) __B,
4341 (__v8sf) __C,
4342 (__mmask8) __U);
4345 extern __inline __m128
4346 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4347 _mm_mask_fmaddsub_ps (__m128 __A, __mmask8 __U, __m128 __B, __m128 __C)
4349 return (__m128) __builtin_ia32_vfmaddsubps128_mask ((__v4sf) __A,
4350 (__v4sf) __B,
4351 (__v4sf) __C,
4352 (__mmask8) __U);
4355 extern __inline __m128
4356 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4357 _mm_mask3_fmaddsub_ps (__m128 __A, __m128 __B, __m128 __C,
4358 __mmask8 __U)
4360 return (__m128) __builtin_ia32_vfmaddsubps128_mask3 ((__v4sf) __A,
4361 (__v4sf) __B,
4362 (__v4sf) __C,
4363 (__mmask8) __U);
4366 extern __inline __m128
4367 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4368 _mm_maskz_fmaddsub_ps (__mmask8 __U, __m128 __A, __m128 __B,
4369 __m128 __C)
4371 return (__m128) __builtin_ia32_vfmaddsubps128_maskz ((__v4sf) __A,
4372 (__v4sf) __B,
4373 (__v4sf) __C,
4374 (__mmask8) __U);
4377 extern __inline __m256d
4378 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4379 _mm256_mask_fmsubadd_pd (__m256d __A, __mmask8 __U, __m256d __B,
4380 __m256d __C)
4382 return (__m256d) __builtin_ia32_vfmaddsubpd256_mask ((__v4df) __A,
4383 (__v4df) __B,
4384 -(__v4df) __C,
4385 (__mmask8) __U);
4388 extern __inline __m256d
4389 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4390 _mm256_mask3_fmsubadd_pd (__m256d __A, __m256d __B, __m256d __C,
4391 __mmask8 __U)
4393 return (__m256d) __builtin_ia32_vfmsubaddpd256_mask3 ((__v4df) __A,
4394 (__v4df) __B,
4395 (__v4df) __C,
4396 (__mmask8)
4397 __U);
4400 extern __inline __m256d
4401 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4402 _mm256_maskz_fmsubadd_pd (__mmask8 __U, __m256d __A, __m256d __B,
4403 __m256d __C)
4405 return (__m256d) __builtin_ia32_vfmaddsubpd256_maskz ((__v4df) __A,
4406 (__v4df) __B,
4407 -(__v4df) __C,
4408 (__mmask8)
4409 __U);
4412 extern __inline __m128d
4413 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4414 _mm_mask_fmsubadd_pd (__m128d __A, __mmask8 __U, __m128d __B,
4415 __m128d __C)
4417 return (__m128d) __builtin_ia32_vfmaddsubpd128_mask ((__v2df) __A,
4418 (__v2df) __B,
4419 -(__v2df) __C,
4420 (__mmask8) __U);
4423 extern __inline __m128d
4424 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4425 _mm_mask3_fmsubadd_pd (__m128d __A, __m128d __B, __m128d __C,
4426 __mmask8 __U)
4428 return (__m128d) __builtin_ia32_vfmsubaddpd128_mask3 ((__v2df) __A,
4429 (__v2df) __B,
4430 (__v2df) __C,
4431 (__mmask8)
4432 __U);
4435 extern __inline __m128d
4436 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4437 _mm_maskz_fmsubadd_pd (__mmask8 __U, __m128d __A, __m128d __B,
4438 __m128d __C)
4440 return (__m128d) __builtin_ia32_vfmaddsubpd128_maskz ((__v2df) __A,
4441 (__v2df) __B,
4442 -(__v2df) __C,
4443 (__mmask8)
4444 __U);
4447 extern __inline __m256
4448 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4449 _mm256_mask_fmsubadd_ps (__m256 __A, __mmask8 __U, __m256 __B,
4450 __m256 __C)
4452 return (__m256) __builtin_ia32_vfmaddsubps256_mask ((__v8sf) __A,
4453 (__v8sf) __B,
4454 -(__v8sf) __C,
4455 (__mmask8) __U);
4458 extern __inline __m256
4459 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4460 _mm256_mask3_fmsubadd_ps (__m256 __A, __m256 __B, __m256 __C,
4461 __mmask8 __U)
4463 return (__m256) __builtin_ia32_vfmsubaddps256_mask3 ((__v8sf) __A,
4464 (__v8sf) __B,
4465 (__v8sf) __C,
4466 (__mmask8) __U);
4469 extern __inline __m256
4470 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4471 _mm256_maskz_fmsubadd_ps (__mmask8 __U, __m256 __A, __m256 __B,
4472 __m256 __C)
4474 return (__m256) __builtin_ia32_vfmaddsubps256_maskz ((__v8sf) __A,
4475 (__v8sf) __B,
4476 -(__v8sf) __C,
4477 (__mmask8) __U);
4480 extern __inline __m128
4481 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4482 _mm_mask_fmsubadd_ps (__m128 __A, __mmask8 __U, __m128 __B, __m128 __C)
4484 return (__m128) __builtin_ia32_vfmaddsubps128_mask ((__v4sf) __A,
4485 (__v4sf) __B,
4486 -(__v4sf) __C,
4487 (__mmask8) __U);
4490 extern __inline __m128
4491 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4492 _mm_mask3_fmsubadd_ps (__m128 __A, __m128 __B, __m128 __C,
4493 __mmask8 __U)
4495 return (__m128) __builtin_ia32_vfmsubaddps128_mask3 ((__v4sf) __A,
4496 (__v4sf) __B,
4497 (__v4sf) __C,
4498 (__mmask8) __U);
4501 extern __inline __m128
4502 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4503 _mm_maskz_fmsubadd_ps (__mmask8 __U, __m128 __A, __m128 __B,
4504 __m128 __C)
4506 return (__m128) __builtin_ia32_vfmaddsubps128_maskz ((__v4sf) __A,
4507 (__v4sf) __B,
4508 -(__v4sf) __C,
4509 (__mmask8) __U);
4512 extern __inline __m256d
4513 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4514 _mm256_mask_fnmadd_pd (__m256d __A, __mmask8 __U, __m256d __B,
4515 __m256d __C)
4517 return (__m256d) __builtin_ia32_vfnmaddpd256_mask ((__v4df) __A,
4518 (__v4df) __B,
4519 (__v4df) __C,
4520 (__mmask8) __U);
4523 extern __inline __m256d
4524 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4525 _mm256_mask3_fnmadd_pd (__m256d __A, __m256d __B, __m256d __C,
4526 __mmask8 __U)
4528 return (__m256d) __builtin_ia32_vfmaddpd256_mask3 (-(__v4df) __A,
4529 (__v4df) __B,
4530 (__v4df) __C,
4531 (__mmask8) __U);
4534 extern __inline __m256d
4535 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4536 _mm256_maskz_fnmadd_pd (__mmask8 __U, __m256d __A, __m256d __B,
4537 __m256d __C)
4539 return (__m256d) __builtin_ia32_vfmaddpd256_maskz (-(__v4df) __A,
4540 (__v4df) __B,
4541 (__v4df) __C,
4542 (__mmask8) __U);
4545 extern __inline __m128d
4546 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4547 _mm_mask_fnmadd_pd (__m128d __A, __mmask8 __U, __m128d __B,
4548 __m128d __C)
4550 return (__m128d) __builtin_ia32_vfnmaddpd128_mask ((__v2df) __A,
4551 (__v2df) __B,
4552 (__v2df) __C,
4553 (__mmask8) __U);
4556 extern __inline __m128d
4557 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4558 _mm_mask3_fnmadd_pd (__m128d __A, __m128d __B, __m128d __C,
4559 __mmask8 __U)
4561 return (__m128d) __builtin_ia32_vfmaddpd128_mask3 (-(__v2df) __A,
4562 (__v2df) __B,
4563 (__v2df) __C,
4564 (__mmask8) __U);
4567 extern __inline __m128d
4568 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4569 _mm_maskz_fnmadd_pd (__mmask8 __U, __m128d __A, __m128d __B,
4570 __m128d __C)
4572 return (__m128d) __builtin_ia32_vfmaddpd128_maskz (-(__v2df) __A,
4573 (__v2df) __B,
4574 (__v2df) __C,
4575 (__mmask8) __U);
4578 extern __inline __m256
4579 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4580 _mm256_mask_fnmadd_ps (__m256 __A, __mmask8 __U, __m256 __B,
4581 __m256 __C)
4583 return (__m256) __builtin_ia32_vfnmaddps256_mask ((__v8sf) __A,
4584 (__v8sf) __B,
4585 (__v8sf) __C,
4586 (__mmask8) __U);
4589 extern __inline __m256
4590 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4591 _mm256_mask3_fnmadd_ps (__m256 __A, __m256 __B, __m256 __C,
4592 __mmask8 __U)
4594 return (__m256) __builtin_ia32_vfmaddps256_mask3 (-(__v8sf) __A,
4595 (__v8sf) __B,
4596 (__v8sf) __C,
4597 (__mmask8) __U);
4600 extern __inline __m256
4601 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4602 _mm256_maskz_fnmadd_ps (__mmask8 __U, __m256 __A, __m256 __B,
4603 __m256 __C)
4605 return (__m256) __builtin_ia32_vfmaddps256_maskz (-(__v8sf) __A,
4606 (__v8sf) __B,
4607 (__v8sf) __C,
4608 (__mmask8) __U);
4611 extern __inline __m128
4612 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4613 _mm_mask_fnmadd_ps (__m128 __A, __mmask8 __U, __m128 __B, __m128 __C)
4615 return (__m128) __builtin_ia32_vfnmaddps128_mask ((__v4sf) __A,
4616 (__v4sf) __B,
4617 (__v4sf) __C,
4618 (__mmask8) __U);
4621 extern __inline __m128
4622 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4623 _mm_mask3_fnmadd_ps (__m128 __A, __m128 __B, __m128 __C, __mmask8 __U)
4625 return (__m128) __builtin_ia32_vfmaddps128_mask3 (-(__v4sf) __A,
4626 (__v4sf) __B,
4627 (__v4sf) __C,
4628 (__mmask8) __U);
4631 extern __inline __m128
4632 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4633 _mm_maskz_fnmadd_ps (__mmask8 __U, __m128 __A, __m128 __B, __m128 __C)
4635 return (__m128) __builtin_ia32_vfmaddps128_maskz (-(__v4sf) __A,
4636 (__v4sf) __B,
4637 (__v4sf) __C,
4638 (__mmask8) __U);
4641 extern __inline __m256d
4642 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4643 _mm256_mask_fnmsub_pd (__m256d __A, __mmask8 __U, __m256d __B,
4644 __m256d __C)
4646 return (__m256d) __builtin_ia32_vfnmsubpd256_mask ((__v4df) __A,
4647 (__v4df) __B,
4648 (__v4df) __C,
4649 (__mmask8) __U);
4652 extern __inline __m256d
4653 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4654 _mm256_mask3_fnmsub_pd (__m256d __A, __m256d __B, __m256d __C,
4655 __mmask8 __U)
4657 return (__m256d) __builtin_ia32_vfnmsubpd256_mask3 ((__v4df) __A,
4658 (__v4df) __B,
4659 (__v4df) __C,
4660 (__mmask8) __U);
4663 extern __inline __m256d
4664 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4665 _mm256_maskz_fnmsub_pd (__mmask8 __U, __m256d __A, __m256d __B,
4666 __m256d __C)
4668 return (__m256d) __builtin_ia32_vfmaddpd256_maskz (-(__v4df) __A,
4669 (__v4df) __B,
4670 -(__v4df) __C,
4671 (__mmask8) __U);
4674 extern __inline __m128d
4675 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4676 _mm_mask_fnmsub_pd (__m128d __A, __mmask8 __U, __m128d __B,
4677 __m128d __C)
4679 return (__m128d) __builtin_ia32_vfnmsubpd128_mask ((__v2df) __A,
4680 (__v2df) __B,
4681 (__v2df) __C,
4682 (__mmask8) __U);
4685 extern __inline __m128d
4686 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4687 _mm_mask3_fnmsub_pd (__m128d __A, __m128d __B, __m128d __C,
4688 __mmask8 __U)
4690 return (__m128d) __builtin_ia32_vfnmsubpd128_mask3 ((__v2df) __A,
4691 (__v2df) __B,
4692 (__v2df) __C,
4693 (__mmask8) __U);
4696 extern __inline __m128d
4697 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4698 _mm_maskz_fnmsub_pd (__mmask8 __U, __m128d __A, __m128d __B,
4699 __m128d __C)
4701 return (__m128d) __builtin_ia32_vfmaddpd128_maskz (-(__v2df) __A,
4702 (__v2df) __B,
4703 -(__v2df) __C,
4704 (__mmask8) __U);
4707 extern __inline __m256
4708 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4709 _mm256_mask_fnmsub_ps (__m256 __A, __mmask8 __U, __m256 __B,
4710 __m256 __C)
4712 return (__m256) __builtin_ia32_vfnmsubps256_mask ((__v8sf) __A,
4713 (__v8sf) __B,
4714 (__v8sf) __C,
4715 (__mmask8) __U);
4718 extern __inline __m256
4719 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4720 _mm256_mask3_fnmsub_ps (__m256 __A, __m256 __B, __m256 __C,
4721 __mmask8 __U)
4723 return (__m256) __builtin_ia32_vfnmsubps256_mask3 ((__v8sf) __A,
4724 (__v8sf) __B,
4725 (__v8sf) __C,
4726 (__mmask8) __U);
4729 extern __inline __m256
4730 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4731 _mm256_maskz_fnmsub_ps (__mmask8 __U, __m256 __A, __m256 __B,
4732 __m256 __C)
4734 return (__m256) __builtin_ia32_vfmaddps256_maskz (-(__v8sf) __A,
4735 (__v8sf) __B,
4736 -(__v8sf) __C,
4737 (__mmask8) __U);
4740 extern __inline __m128
4741 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4742 _mm_mask_fnmsub_ps (__m128 __A, __mmask8 __U, __m128 __B, __m128 __C)
4744 return (__m128) __builtin_ia32_vfnmsubps128_mask ((__v4sf) __A,
4745 (__v4sf) __B,
4746 (__v4sf) __C,
4747 (__mmask8) __U);
4750 extern __inline __m128
4751 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4752 _mm_mask3_fnmsub_ps (__m128 __A, __m128 __B, __m128 __C, __mmask8 __U)
4754 return (__m128) __builtin_ia32_vfnmsubps128_mask3 ((__v4sf) __A,
4755 (__v4sf) __B,
4756 (__v4sf) __C,
4757 (__mmask8) __U);
4760 extern __inline __m128
4761 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4762 _mm_maskz_fnmsub_ps (__mmask8 __U, __m128 __A, __m128 __B, __m128 __C)
4764 return (__m128) __builtin_ia32_vfmaddps128_maskz (-(__v4sf) __A,
4765 (__v4sf) __B,
4766 -(__v4sf) __C,
4767 (__mmask8) __U);
4770 extern __inline __m128i
4771 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4772 _mm_mask_and_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
4773 __m128i __B)
4775 return (__m128i) __builtin_ia32_pandd128_mask ((__v4si) __A,
4776 (__v4si) __B,
4777 (__v4si) __W,
4778 (__mmask8) __U);
4781 extern __inline __m128i
4782 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4783 _mm_maskz_and_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
4785 return (__m128i) __builtin_ia32_pandd128_mask ((__v4si) __A,
4786 (__v4si) __B,
4787 (__v4si)
4788 _mm_setzero_si128 (),
4789 (__mmask8) __U);
4792 extern __inline __m256i
4793 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4794 _mm256_mask_andnot_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
4795 __m256i __B)
4797 return (__m256i) __builtin_ia32_pandnd256_mask ((__v8si) __A,
4798 (__v8si) __B,
4799 (__v8si) __W,
4800 (__mmask8) __U);
4803 extern __inline __m256i
4804 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4805 _mm256_maskz_andnot_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
4807 return (__m256i) __builtin_ia32_pandnd256_mask ((__v8si) __A,
4808 (__v8si) __B,
4809 (__v8si)
4810 _mm256_setzero_si256 (),
4811 (__mmask8) __U);
4814 extern __inline __m128i
4815 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4816 _mm_mask_andnot_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
4817 __m128i __B)
4819 return (__m128i) __builtin_ia32_pandnd128_mask ((__v4si) __A,
4820 (__v4si) __B,
4821 (__v4si) __W,
4822 (__mmask8) __U);
4825 extern __inline __m128i
4826 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4827 _mm_maskz_andnot_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
4829 return (__m128i) __builtin_ia32_pandnd128_mask ((__v4si) __A,
4830 (__v4si) __B,
4831 (__v4si)
4832 _mm_setzero_si128 (),
4833 (__mmask8) __U);
4836 extern __inline __m256i
4837 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4838 _mm256_mask_or_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
4839 __m256i __B)
4841 return (__m256i) __builtin_ia32_pord256_mask ((__v8si) __A,
4842 (__v8si) __B,
4843 (__v8si) __W,
4844 (__mmask8) __U);
4847 extern __inline __m256i
4848 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4849 _mm256_maskz_or_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
4851 return (__m256i) __builtin_ia32_pord256_mask ((__v8si) __A,
4852 (__v8si) __B,
4853 (__v8si)
4854 _mm256_setzero_si256 (),
4855 (__mmask8) __U);
4858 extern __inline __m128i
4859 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4860 _mm_mask_or_epi32 (__m128i __W, __mmask8 __U, __m128i __A, __m128i __B)
4862 return (__m128i) __builtin_ia32_pord128_mask ((__v4si) __A,
4863 (__v4si) __B,
4864 (__v4si) __W,
4865 (__mmask8) __U);
4868 extern __inline __m128i
4869 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4870 _mm_maskz_or_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
4872 return (__m128i) __builtin_ia32_pord128_mask ((__v4si) __A,
4873 (__v4si) __B,
4874 (__v4si)
4875 _mm_setzero_si128 (),
4876 (__mmask8) __U);
4879 extern __inline __m256i
4880 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4881 _mm256_mask_xor_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
4882 __m256i __B)
4884 return (__m256i) __builtin_ia32_pxord256_mask ((__v8si) __A,
4885 (__v8si) __B,
4886 (__v8si) __W,
4887 (__mmask8) __U);
4890 extern __inline __m256i
4891 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4892 _mm256_maskz_xor_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
4894 return (__m256i) __builtin_ia32_pxord256_mask ((__v8si) __A,
4895 (__v8si) __B,
4896 (__v8si)
4897 _mm256_setzero_si256 (),
4898 (__mmask8) __U);
4901 extern __inline __m128i
4902 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4903 _mm_mask_xor_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
4904 __m128i __B)
4906 return (__m128i) __builtin_ia32_pxord128_mask ((__v4si) __A,
4907 (__v4si) __B,
4908 (__v4si) __W,
4909 (__mmask8) __U);
4912 extern __inline __m128i
4913 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4914 _mm_maskz_xor_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
4916 return (__m128i) __builtin_ia32_pxord128_mask ((__v4si) __A,
4917 (__v4si) __B,
4918 (__v4si)
4919 _mm_setzero_si128 (),
4920 (__mmask8) __U);
4923 extern __inline __m128
4924 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4925 _mm_mask_cvtpd_ps (__m128 __W, __mmask8 __U, __m128d __A)
4927 return (__m128) __builtin_ia32_cvtpd2ps_mask ((__v2df) __A,
4928 (__v4sf) __W,
4929 (__mmask8) __U);
4932 extern __inline __m128
4933 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4934 _mm_maskz_cvtpd_ps (__mmask8 __U, __m128d __A)
4936 return (__m128) __builtin_ia32_cvtpd2ps_mask ((__v2df) __A,
4937 (__v4sf)
4938 _mm_setzero_ps (),
4939 (__mmask8) __U);
4942 extern __inline __m128
4943 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4944 _mm256_mask_cvtpd_ps (__m128 __W, __mmask8 __U, __m256d __A)
4946 return (__m128) __builtin_ia32_cvtpd2ps256_mask ((__v4df) __A,
4947 (__v4sf) __W,
4948 (__mmask8) __U);
4951 extern __inline __m128
4952 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4953 _mm256_maskz_cvtpd_ps (__mmask8 __U, __m256d __A)
4955 return (__m128) __builtin_ia32_cvtpd2ps256_mask ((__v4df) __A,
4956 (__v4sf)
4957 _mm_setzero_ps (),
4958 (__mmask8) __U);
4961 extern __inline __m256i
4962 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4963 _mm256_mask_cvtps_epi32 (__m256i __W, __mmask8 __U, __m256 __A)
4965 return (__m256i) __builtin_ia32_cvtps2dq256_mask ((__v8sf) __A,
4966 (__v8si) __W,
4967 (__mmask8) __U);
4970 extern __inline __m256i
4971 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4972 _mm256_maskz_cvtps_epi32 (__mmask8 __U, __m256 __A)
4974 return (__m256i) __builtin_ia32_cvtps2dq256_mask ((__v8sf) __A,
4975 (__v8si)
4976 _mm256_setzero_si256 (),
4977 (__mmask8) __U);
4980 extern __inline __m128i
4981 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4982 _mm_mask_cvtps_epi32 (__m128i __W, __mmask8 __U, __m128 __A)
4984 return (__m128i) __builtin_ia32_cvtps2dq128_mask ((__v4sf) __A,
4985 (__v4si) __W,
4986 (__mmask8) __U);
4989 extern __inline __m128i
4990 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
4991 _mm_maskz_cvtps_epi32 (__mmask8 __U, __m128 __A)
4993 return (__m128i) __builtin_ia32_cvtps2dq128_mask ((__v4sf) __A,
4994 (__v4si)
4995 _mm_setzero_si128 (),
4996 (__mmask8) __U);
4999 extern __inline __m256i
5000 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5001 _mm256_cvtps_epu32 (__m256 __A)
5003 return (__m256i) __builtin_ia32_cvtps2udq256_mask ((__v8sf) __A,
5004 (__v8si)
5005 _mm256_setzero_si256 (),
5006 (__mmask8) -1);
5009 extern __inline __m256i
5010 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5011 _mm256_mask_cvtps_epu32 (__m256i __W, __mmask8 __U, __m256 __A)
5013 return (__m256i) __builtin_ia32_cvtps2udq256_mask ((__v8sf) __A,
5014 (__v8si) __W,
5015 (__mmask8) __U);
5018 extern __inline __m256i
5019 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5020 _mm256_maskz_cvtps_epu32 (__mmask8 __U, __m256 __A)
5022 return (__m256i) __builtin_ia32_cvtps2udq256_mask ((__v8sf) __A,
5023 (__v8si)
5024 _mm256_setzero_si256 (),
5025 (__mmask8) __U);
5028 extern __inline __m128i
5029 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5030 _mm_cvtps_epu32 (__m128 __A)
5032 return (__m128i) __builtin_ia32_cvtps2udq128_mask ((__v4sf) __A,
5033 (__v4si)
5034 _mm_setzero_si128 (),
5035 (__mmask8) -1);
5038 extern __inline __m128i
5039 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5040 _mm_mask_cvtps_epu32 (__m128i __W, __mmask8 __U, __m128 __A)
5042 return (__m128i) __builtin_ia32_cvtps2udq128_mask ((__v4sf) __A,
5043 (__v4si) __W,
5044 (__mmask8) __U);
5047 extern __inline __m128i
5048 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5049 _mm_maskz_cvtps_epu32 (__mmask8 __U, __m128 __A)
5051 return (__m128i) __builtin_ia32_cvtps2udq128_mask ((__v4sf) __A,
5052 (__v4si)
5053 _mm_setzero_si128 (),
5054 (__mmask8) __U);
5057 extern __inline __m256d
5058 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5059 _mm256_mask_movedup_pd (__m256d __W, __mmask8 __U, __m256d __A)
5061 return (__m256d) __builtin_ia32_movddup256_mask ((__v4df) __A,
5062 (__v4df) __W,
5063 (__mmask8) __U);
5066 extern __inline __m256d
5067 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5068 _mm256_maskz_movedup_pd (__mmask8 __U, __m256d __A)
5070 return (__m256d) __builtin_ia32_movddup256_mask ((__v4df) __A,
5071 (__v4df)
5072 _mm256_setzero_pd (),
5073 (__mmask8) __U);
5076 extern __inline __m128d
5077 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5078 _mm_mask_movedup_pd (__m128d __W, __mmask8 __U, __m128d __A)
5080 return (__m128d) __builtin_ia32_movddup128_mask ((__v2df) __A,
5081 (__v2df) __W,
5082 (__mmask8) __U);
5085 extern __inline __m128d
5086 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5087 _mm_maskz_movedup_pd (__mmask8 __U, __m128d __A)
5089 return (__m128d) __builtin_ia32_movddup128_mask ((__v2df) __A,
5090 (__v2df)
5091 _mm_setzero_pd (),
5092 (__mmask8) __U);
5095 extern __inline __m256
5096 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5097 _mm256_mask_movehdup_ps (__m256 __W, __mmask8 __U, __m256 __A)
5099 return (__m256) __builtin_ia32_movshdup256_mask ((__v8sf) __A,
5100 (__v8sf) __W,
5101 (__mmask8) __U);
5104 extern __inline __m256
5105 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5106 _mm256_maskz_movehdup_ps (__mmask8 __U, __m256 __A)
5108 return (__m256) __builtin_ia32_movshdup256_mask ((__v8sf) __A,
5109 (__v8sf)
5110 _mm256_setzero_ps (),
5111 (__mmask8) __U);
5114 extern __inline __m128
5115 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5116 _mm_mask_movehdup_ps (__m128 __W, __mmask8 __U, __m128 __A)
5118 return (__m128) __builtin_ia32_movshdup128_mask ((__v4sf) __A,
5119 (__v4sf) __W,
5120 (__mmask8) __U);
5123 extern __inline __m128
5124 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5125 _mm_maskz_movehdup_ps (__mmask8 __U, __m128 __A)
5127 return (__m128) __builtin_ia32_movshdup128_mask ((__v4sf) __A,
5128 (__v4sf)
5129 _mm_setzero_ps (),
5130 (__mmask8) __U);
5133 extern __inline __m256
5134 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5135 _mm256_mask_moveldup_ps (__m256 __W, __mmask8 __U, __m256 __A)
5137 return (__m256) __builtin_ia32_movsldup256_mask ((__v8sf) __A,
5138 (__v8sf) __W,
5139 (__mmask8) __U);
5142 extern __inline __m256
5143 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5144 _mm256_maskz_moveldup_ps (__mmask8 __U, __m256 __A)
5146 return (__m256) __builtin_ia32_movsldup256_mask ((__v8sf) __A,
5147 (__v8sf)
5148 _mm256_setzero_ps (),
5149 (__mmask8) __U);
5152 extern __inline __m128
5153 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5154 _mm_mask_moveldup_ps (__m128 __W, __mmask8 __U, __m128 __A)
5156 return (__m128) __builtin_ia32_movsldup128_mask ((__v4sf) __A,
5157 (__v4sf) __W,
5158 (__mmask8) __U);
5161 extern __inline __m128
5162 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5163 _mm_maskz_moveldup_ps (__mmask8 __U, __m128 __A)
5165 return (__m128) __builtin_ia32_movsldup128_mask ((__v4sf) __A,
5166 (__v4sf)
5167 _mm_setzero_ps (),
5168 (__mmask8) __U);
5171 extern __inline __m128i
5172 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5173 _mm_mask_unpackhi_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
5174 __m128i __B)
5176 return (__m128i) __builtin_ia32_punpckhdq128_mask ((__v4si) __A,
5177 (__v4si) __B,
5178 (__v4si) __W,
5179 (__mmask8) __U);
5182 extern __inline __m128i
5183 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5184 _mm_maskz_unpackhi_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
5186 return (__m128i) __builtin_ia32_punpckhdq128_mask ((__v4si) __A,
5187 (__v4si) __B,
5188 (__v4si)
5189 _mm_setzero_si128 (),
5190 (__mmask8) __U);
5193 extern __inline __m256i
5194 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5195 _mm256_mask_unpackhi_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
5196 __m256i __B)
5198 return (__m256i) __builtin_ia32_punpckhdq256_mask ((__v8si) __A,
5199 (__v8si) __B,
5200 (__v8si) __W,
5201 (__mmask8) __U);
5204 extern __inline __m256i
5205 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5206 _mm256_maskz_unpackhi_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
5208 return (__m256i) __builtin_ia32_punpckhdq256_mask ((__v8si) __A,
5209 (__v8si) __B,
5210 (__v8si)
5211 _mm256_setzero_si256 (),
5212 (__mmask8) __U);
5215 extern __inline __m128i
5216 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5217 _mm_mask_unpackhi_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
5218 __m128i __B)
5220 return (__m128i) __builtin_ia32_punpckhqdq128_mask ((__v2di) __A,
5221 (__v2di) __B,
5222 (__v2di) __W,
5223 (__mmask8) __U);
5226 extern __inline __m128i
5227 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5228 _mm_maskz_unpackhi_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
5230 return (__m128i) __builtin_ia32_punpckhqdq128_mask ((__v2di) __A,
5231 (__v2di) __B,
5232 (__v2di)
5233 _mm_setzero_si128 (),
5234 (__mmask8) __U);
5237 extern __inline __m256i
5238 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5239 _mm256_mask_unpackhi_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
5240 __m256i __B)
5242 return (__m256i) __builtin_ia32_punpckhqdq256_mask ((__v4di) __A,
5243 (__v4di) __B,
5244 (__v4di) __W,
5245 (__mmask8) __U);
5248 extern __inline __m256i
5249 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5250 _mm256_maskz_unpackhi_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
5252 return (__m256i) __builtin_ia32_punpckhqdq256_mask ((__v4di) __A,
5253 (__v4di) __B,
5254 (__v4di)
5255 _mm256_setzero_si256 (),
5256 (__mmask8) __U);
5259 extern __inline __m128i
5260 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5261 _mm_mask_unpacklo_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
5262 __m128i __B)
5264 return (__m128i) __builtin_ia32_punpckldq128_mask ((__v4si) __A,
5265 (__v4si) __B,
5266 (__v4si) __W,
5267 (__mmask8) __U);
5270 extern __inline __m128i
5271 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5272 _mm_maskz_unpacklo_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
5274 return (__m128i) __builtin_ia32_punpckldq128_mask ((__v4si) __A,
5275 (__v4si) __B,
5276 (__v4si)
5277 _mm_setzero_si128 (),
5278 (__mmask8) __U);
5281 extern __inline __m256i
5282 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5283 _mm256_mask_unpacklo_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
5284 __m256i __B)
5286 return (__m256i) __builtin_ia32_punpckldq256_mask ((__v8si) __A,
5287 (__v8si) __B,
5288 (__v8si) __W,
5289 (__mmask8) __U);
5292 extern __inline __m256i
5293 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5294 _mm256_maskz_unpacklo_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
5296 return (__m256i) __builtin_ia32_punpckldq256_mask ((__v8si) __A,
5297 (__v8si) __B,
5298 (__v8si)
5299 _mm256_setzero_si256 (),
5300 (__mmask8) __U);
5303 extern __inline __m128i
5304 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5305 _mm_mask_unpacklo_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
5306 __m128i __B)
5308 return (__m128i) __builtin_ia32_punpcklqdq128_mask ((__v2di) __A,
5309 (__v2di) __B,
5310 (__v2di) __W,
5311 (__mmask8) __U);
5314 extern __inline __m128i
5315 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5316 _mm_maskz_unpacklo_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
5318 return (__m128i) __builtin_ia32_punpcklqdq128_mask ((__v2di) __A,
5319 (__v2di) __B,
5320 (__v2di)
5321 _mm_setzero_si128 (),
5322 (__mmask8) __U);
5325 extern __inline __m256i
5326 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5327 _mm256_mask_unpacklo_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
5328 __m256i __B)
5330 return (__m256i) __builtin_ia32_punpcklqdq256_mask ((__v4di) __A,
5331 (__v4di) __B,
5332 (__v4di) __W,
5333 (__mmask8) __U);
5336 extern __inline __m256i
5337 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5338 _mm256_maskz_unpacklo_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
5340 return (__m256i) __builtin_ia32_punpcklqdq256_mask ((__v4di) __A,
5341 (__v4di) __B,
5342 (__v4di)
5343 _mm256_setzero_si256 (),
5344 (__mmask8) __U);
5347 extern __inline __mmask8
5348 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5349 _mm_cmpeq_epu32_mask (__m128i __A, __m128i __B)
5351 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __A,
5352 (__v4si) __B, 0,
5353 (__mmask8) -1);
5356 extern __inline __mmask8
5357 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5358 _mm_cmpeq_epi32_mask (__m128i __A, __m128i __B)
5360 return (__mmask8) __builtin_ia32_pcmpeqd128_mask ((__v4si) __A,
5361 (__v4si) __B,
5362 (__mmask8) -1);
5365 extern __inline __mmask8
5366 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5367 _mm_mask_cmpeq_epu32_mask (__mmask8 __U, __m128i __A, __m128i __B)
5369 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __A,
5370 (__v4si) __B, 0, __U);
5373 extern __inline __mmask8
5374 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5375 _mm_mask_cmpeq_epi32_mask (__mmask8 __U, __m128i __A, __m128i __B)
5377 return (__mmask8) __builtin_ia32_pcmpeqd128_mask ((__v4si) __A,
5378 (__v4si) __B, __U);
5381 extern __inline __mmask8
5382 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5383 _mm256_cmpeq_epu32_mask (__m256i __A, __m256i __B)
5385 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __A,
5386 (__v8si) __B, 0,
5387 (__mmask8) -1);
5390 extern __inline __mmask8
5391 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5392 _mm256_cmpeq_epi32_mask (__m256i __A, __m256i __B)
5394 return (__mmask8) __builtin_ia32_pcmpeqd256_mask ((__v8si) __A,
5395 (__v8si) __B,
5396 (__mmask8) -1);
5399 extern __inline __mmask8
5400 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5401 _mm256_mask_cmpeq_epu32_mask (__mmask8 __U, __m256i __A, __m256i __B)
5403 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __A,
5404 (__v8si) __B, 0, __U);
5407 extern __inline __mmask8
5408 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5409 _mm256_mask_cmpeq_epi32_mask (__mmask8 __U, __m256i __A, __m256i __B)
5411 return (__mmask8) __builtin_ia32_pcmpeqd256_mask ((__v8si) __A,
5412 (__v8si) __B, __U);
5415 extern __inline __mmask8
5416 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5417 _mm_cmpeq_epu64_mask (__m128i __A, __m128i __B)
5419 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __A,
5420 (__v2di) __B, 0,
5421 (__mmask8) -1);
5424 extern __inline __mmask8
5425 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5426 _mm_cmpeq_epi64_mask (__m128i __A, __m128i __B)
5428 return (__mmask8) __builtin_ia32_pcmpeqq128_mask ((__v2di) __A,
5429 (__v2di) __B,
5430 (__mmask8) -1);
5433 extern __inline __mmask8
5434 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5435 _mm_mask_cmpeq_epu64_mask (__mmask8 __U, __m128i __A, __m128i __B)
5437 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __A,
5438 (__v2di) __B, 0, __U);
5441 extern __inline __mmask8
5442 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5443 _mm_mask_cmpeq_epi64_mask (__mmask8 __U, __m128i __A, __m128i __B)
5445 return (__mmask8) __builtin_ia32_pcmpeqq128_mask ((__v2di) __A,
5446 (__v2di) __B, __U);
5449 extern __inline __mmask8
5450 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5451 _mm256_cmpeq_epu64_mask (__m256i __A, __m256i __B)
5453 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __A,
5454 (__v4di) __B, 0,
5455 (__mmask8) -1);
5458 extern __inline __mmask8
5459 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5460 _mm256_cmpeq_epi64_mask (__m256i __A, __m256i __B)
5462 return (__mmask8) __builtin_ia32_pcmpeqq256_mask ((__v4di) __A,
5463 (__v4di) __B,
5464 (__mmask8) -1);
5467 extern __inline __mmask8
5468 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5469 _mm256_mask_cmpeq_epu64_mask (__mmask8 __U, __m256i __A, __m256i __B)
5471 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __A,
5472 (__v4di) __B, 0, __U);
5475 extern __inline __mmask8
5476 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5477 _mm256_mask_cmpeq_epi64_mask (__mmask8 __U, __m256i __A, __m256i __B)
5479 return (__mmask8) __builtin_ia32_pcmpeqq256_mask ((__v4di) __A,
5480 (__v4di) __B, __U);
5483 extern __inline __mmask8
5484 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5485 _mm_cmpgt_epu32_mask (__m128i __A, __m128i __B)
5487 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __A,
5488 (__v4si) __B, 6,
5489 (__mmask8) -1);
5492 extern __inline __mmask8
5493 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5494 _mm_cmpgt_epi32_mask (__m128i __A, __m128i __B)
5496 return (__mmask8) __builtin_ia32_pcmpgtd128_mask ((__v4si) __A,
5497 (__v4si) __B,
5498 (__mmask8) -1);
5501 extern __inline __mmask8
5502 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5503 _mm_mask_cmpgt_epu32_mask (__mmask8 __U, __m128i __A, __m128i __B)
5505 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __A,
5506 (__v4si) __B, 6, __U);
5509 extern __inline __mmask8
5510 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5511 _mm_mask_cmpgt_epi32_mask (__mmask8 __U, __m128i __A, __m128i __B)
5513 return (__mmask8) __builtin_ia32_pcmpgtd128_mask ((__v4si) __A,
5514 (__v4si) __B, __U);
5517 extern __inline __mmask8
5518 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5519 _mm256_cmpgt_epu32_mask (__m256i __A, __m256i __B)
5521 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __A,
5522 (__v8si) __B, 6,
5523 (__mmask8) -1);
5526 extern __inline __mmask8
5527 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5528 _mm256_cmpgt_epi32_mask (__m256i __A, __m256i __B)
5530 return (__mmask8) __builtin_ia32_pcmpgtd256_mask ((__v8si) __A,
5531 (__v8si) __B,
5532 (__mmask8) -1);
5535 extern __inline __mmask8
5536 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5537 _mm256_mask_cmpgt_epu32_mask (__mmask8 __U, __m256i __A, __m256i __B)
5539 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __A,
5540 (__v8si) __B, 6, __U);
5543 extern __inline __mmask8
5544 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5545 _mm256_mask_cmpgt_epi32_mask (__mmask8 __U, __m256i __A, __m256i __B)
5547 return (__mmask8) __builtin_ia32_pcmpgtd256_mask ((__v8si) __A,
5548 (__v8si) __B, __U);
5551 extern __inline __mmask8
5552 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5553 _mm_cmpgt_epu64_mask (__m128i __A, __m128i __B)
5555 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __A,
5556 (__v2di) __B, 6,
5557 (__mmask8) -1);
5560 extern __inline __mmask8
5561 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5562 _mm_cmpgt_epi64_mask (__m128i __A, __m128i __B)
5564 return (__mmask8) __builtin_ia32_pcmpgtq128_mask ((__v2di) __A,
5565 (__v2di) __B,
5566 (__mmask8) -1);
5569 extern __inline __mmask8
5570 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5571 _mm_mask_cmpgt_epu64_mask (__mmask8 __U, __m128i __A, __m128i __B)
5573 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __A,
5574 (__v2di) __B, 6, __U);
5577 extern __inline __mmask8
5578 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5579 _mm_mask_cmpgt_epi64_mask (__mmask8 __U, __m128i __A, __m128i __B)
5581 return (__mmask8) __builtin_ia32_pcmpgtq128_mask ((__v2di) __A,
5582 (__v2di) __B, __U);
5585 extern __inline __mmask8
5586 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5587 _mm256_cmpgt_epu64_mask (__m256i __A, __m256i __B)
5589 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __A,
5590 (__v4di) __B, 6,
5591 (__mmask8) -1);
5594 extern __inline __mmask8
5595 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5596 _mm256_cmpgt_epi64_mask (__m256i __A, __m256i __B)
5598 return (__mmask8) __builtin_ia32_pcmpgtq256_mask ((__v4di) __A,
5599 (__v4di) __B,
5600 (__mmask8) -1);
5603 extern __inline __mmask8
5604 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5605 _mm256_mask_cmpgt_epu64_mask (__mmask8 __U, __m256i __A, __m256i __B)
5607 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __A,
5608 (__v4di) __B, 6, __U);
5611 extern __inline __mmask8
5612 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5613 _mm256_mask_cmpgt_epi64_mask (__mmask8 __U, __m256i __A, __m256i __B)
5615 return (__mmask8) __builtin_ia32_pcmpgtq256_mask ((__v4di) __A,
5616 (__v4di) __B, __U);
5619 extern __inline __mmask8
5620 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5621 _mm_test_epi32_mask (__m128i __A, __m128i __B)
5623 return (__mmask8) __builtin_ia32_ptestmd128 ((__v4si) __A,
5624 (__v4si) __B,
5625 (__mmask8) -1);
5628 extern __inline __mmask8
5629 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5630 _mm_mask_test_epi32_mask (__mmask8 __U, __m128i __A, __m128i __B)
5632 return (__mmask8) __builtin_ia32_ptestmd128 ((__v4si) __A,
5633 (__v4si) __B, __U);
5636 extern __inline __mmask8
5637 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5638 _mm256_test_epi32_mask (__m256i __A, __m256i __B)
5640 return (__mmask8) __builtin_ia32_ptestmd256 ((__v8si) __A,
5641 (__v8si) __B,
5642 (__mmask8) -1);
5645 extern __inline __mmask8
5646 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5647 _mm256_mask_test_epi32_mask (__mmask8 __U, __m256i __A, __m256i __B)
5649 return (__mmask8) __builtin_ia32_ptestmd256 ((__v8si) __A,
5650 (__v8si) __B, __U);
5653 extern __inline __mmask8
5654 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5655 _mm_test_epi64_mask (__m128i __A, __m128i __B)
5657 return (__mmask8) __builtin_ia32_ptestmq128 ((__v2di) __A,
5658 (__v2di) __B,
5659 (__mmask8) -1);
5662 extern __inline __mmask8
5663 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5664 _mm_mask_test_epi64_mask (__mmask8 __U, __m128i __A, __m128i __B)
5666 return (__mmask8) __builtin_ia32_ptestmq128 ((__v2di) __A,
5667 (__v2di) __B, __U);
5670 extern __inline __mmask8
5671 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5672 _mm256_test_epi64_mask (__m256i __A, __m256i __B)
5674 return (__mmask8) __builtin_ia32_ptestmq256 ((__v4di) __A,
5675 (__v4di) __B,
5676 (__mmask8) -1);
5679 extern __inline __mmask8
5680 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5681 _mm256_mask_test_epi64_mask (__mmask8 __U, __m256i __A, __m256i __B)
5683 return (__mmask8) __builtin_ia32_ptestmq256 ((__v4di) __A,
5684 (__v4di) __B, __U);
5687 extern __inline __mmask8
5688 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5689 _mm_testn_epi32_mask (__m128i __A, __m128i __B)
5691 return (__mmask8) __builtin_ia32_ptestnmd128 ((__v4si) __A,
5692 (__v4si) __B,
5693 (__mmask8) -1);
5696 extern __inline __mmask8
5697 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5698 _mm_mask_testn_epi32_mask (__mmask8 __U, __m128i __A, __m128i __B)
5700 return (__mmask8) __builtin_ia32_ptestnmd128 ((__v4si) __A,
5701 (__v4si) __B, __U);
5704 extern __inline __mmask8
5705 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5706 _mm256_testn_epi32_mask (__m256i __A, __m256i __B)
5708 return (__mmask8) __builtin_ia32_ptestnmd256 ((__v8si) __A,
5709 (__v8si) __B,
5710 (__mmask8) -1);
5713 extern __inline __mmask8
5714 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5715 _mm256_mask_testn_epi32_mask (__mmask8 __U, __m256i __A, __m256i __B)
5717 return (__mmask8) __builtin_ia32_ptestnmd256 ((__v8si) __A,
5718 (__v8si) __B, __U);
5721 extern __inline __mmask8
5722 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5723 _mm_testn_epi64_mask (__m128i __A, __m128i __B)
5725 return (__mmask8) __builtin_ia32_ptestnmq128 ((__v2di) __A,
5726 (__v2di) __B,
5727 (__mmask8) -1);
5730 extern __inline __mmask8
5731 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5732 _mm_mask_testn_epi64_mask (__mmask8 __U, __m128i __A, __m128i __B)
5734 return (__mmask8) __builtin_ia32_ptestnmq128 ((__v2di) __A,
5735 (__v2di) __B, __U);
5738 extern __inline __mmask8
5739 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5740 _mm256_testn_epi64_mask (__m256i __A, __m256i __B)
5742 return (__mmask8) __builtin_ia32_ptestnmq256 ((__v4di) __A,
5743 (__v4di) __B,
5744 (__mmask8) -1);
5747 extern __inline __mmask8
5748 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5749 _mm256_mask_testn_epi64_mask (__mmask8 __U, __m256i __A, __m256i __B)
5751 return (__mmask8) __builtin_ia32_ptestnmq256 ((__v4di) __A,
5752 (__v4di) __B, __U);
5755 extern __inline __m256d
5756 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5757 _mm256_mask_compress_pd (__m256d __W, __mmask8 __U, __m256d __A)
5759 return (__m256d) __builtin_ia32_compressdf256_mask ((__v4df) __A,
5760 (__v4df) __W,
5761 (__mmask8) __U);
5764 extern __inline __m256d
5765 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5766 _mm256_maskz_compress_pd (__mmask8 __U, __m256d __A)
5768 return (__m256d) __builtin_ia32_compressdf256_mask ((__v4df) __A,
5769 (__v4df)
5770 _mm256_setzero_pd (),
5771 (__mmask8) __U);
5774 extern __inline void
5775 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5776 _mm256_mask_compressstoreu_pd (void *__P, __mmask8 __U, __m256d __A)
5778 __builtin_ia32_compressstoredf256_mask ((__v4df *) __P,
5779 (__v4df) __A,
5780 (__mmask8) __U);
5783 extern __inline __m128d
5784 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5785 _mm_mask_compress_pd (__m128d __W, __mmask8 __U, __m128d __A)
5787 return (__m128d) __builtin_ia32_compressdf128_mask ((__v2df) __A,
5788 (__v2df) __W,
5789 (__mmask8) __U);
5792 extern __inline __m128d
5793 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5794 _mm_maskz_compress_pd (__mmask8 __U, __m128d __A)
5796 return (__m128d) __builtin_ia32_compressdf128_mask ((__v2df) __A,
5797 (__v2df)
5798 _mm_setzero_pd (),
5799 (__mmask8) __U);
5802 extern __inline void
5803 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5804 _mm_mask_compressstoreu_pd (void *__P, __mmask8 __U, __m128d __A)
5806 __builtin_ia32_compressstoredf128_mask ((__v2df *) __P,
5807 (__v2df) __A,
5808 (__mmask8) __U);
5811 extern __inline __m256
5812 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5813 _mm256_mask_compress_ps (__m256 __W, __mmask8 __U, __m256 __A)
5815 return (__m256) __builtin_ia32_compresssf256_mask ((__v8sf) __A,
5816 (__v8sf) __W,
5817 (__mmask8) __U);
5820 extern __inline __m256
5821 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5822 _mm256_maskz_compress_ps (__mmask8 __U, __m256 __A)
5824 return (__m256) __builtin_ia32_compresssf256_mask ((__v8sf) __A,
5825 (__v8sf)
5826 _mm256_setzero_ps (),
5827 (__mmask8) __U);
5830 extern __inline void
5831 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5832 _mm256_mask_compressstoreu_ps (void *__P, __mmask8 __U, __m256 __A)
5834 __builtin_ia32_compressstoresf256_mask ((__v8sf *) __P,
5835 (__v8sf) __A,
5836 (__mmask8) __U);
5839 extern __inline __m128
5840 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5841 _mm_mask_compress_ps (__m128 __W, __mmask8 __U, __m128 __A)
5843 return (__m128) __builtin_ia32_compresssf128_mask ((__v4sf) __A,
5844 (__v4sf) __W,
5845 (__mmask8) __U);
5848 extern __inline __m128
5849 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5850 _mm_maskz_compress_ps (__mmask8 __U, __m128 __A)
5852 return (__m128) __builtin_ia32_compresssf128_mask ((__v4sf) __A,
5853 (__v4sf)
5854 _mm_setzero_ps (),
5855 (__mmask8) __U);
5858 extern __inline void
5859 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5860 _mm_mask_compressstoreu_ps (void *__P, __mmask8 __U, __m128 __A)
5862 __builtin_ia32_compressstoresf128_mask ((__v4sf *) __P,
5863 (__v4sf) __A,
5864 (__mmask8) __U);
5867 extern __inline __m256i
5868 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5869 _mm256_mask_compress_epi64 (__m256i __W, __mmask8 __U, __m256i __A)
5871 return (__m256i) __builtin_ia32_compressdi256_mask ((__v4di) __A,
5872 (__v4di) __W,
5873 (__mmask8) __U);
5876 extern __inline __m256i
5877 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5878 _mm256_maskz_compress_epi64 (__mmask8 __U, __m256i __A)
5880 return (__m256i) __builtin_ia32_compressdi256_mask ((__v4di) __A,
5881 (__v4di)
5882 _mm256_setzero_si256 (),
5883 (__mmask8) __U);
5886 extern __inline void
5887 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5888 _mm256_mask_compressstoreu_epi64 (void *__P, __mmask8 __U, __m256i __A)
5890 __builtin_ia32_compressstoredi256_mask ((__v4di *) __P,
5891 (__v4di) __A,
5892 (__mmask8) __U);
5895 extern __inline __m128i
5896 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5897 _mm_mask_compress_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
5899 return (__m128i) __builtin_ia32_compressdi128_mask ((__v2di) __A,
5900 (__v2di) __W,
5901 (__mmask8) __U);
5904 extern __inline __m128i
5905 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5906 _mm_maskz_compress_epi64 (__mmask8 __U, __m128i __A)
5908 return (__m128i) __builtin_ia32_compressdi128_mask ((__v2di) __A,
5909 (__v2di)
5910 _mm_setzero_si128 (),
5911 (__mmask8) __U);
5914 extern __inline void
5915 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5916 _mm_mask_compressstoreu_epi64 (void *__P, __mmask8 __U, __m128i __A)
5918 __builtin_ia32_compressstoredi128_mask ((__v2di *) __P,
5919 (__v2di) __A,
5920 (__mmask8) __U);
5923 extern __inline __m256i
5924 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5925 _mm256_mask_compress_epi32 (__m256i __W, __mmask8 __U, __m256i __A)
5927 return (__m256i) __builtin_ia32_compresssi256_mask ((__v8si) __A,
5928 (__v8si) __W,
5929 (__mmask8) __U);
5932 extern __inline __m256i
5933 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5934 _mm256_maskz_compress_epi32 (__mmask8 __U, __m256i __A)
5936 return (__m256i) __builtin_ia32_compresssi256_mask ((__v8si) __A,
5937 (__v8si)
5938 _mm256_setzero_si256 (),
5939 (__mmask8) __U);
5942 extern __inline void
5943 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5944 _mm256_mask_compressstoreu_epi32 (void *__P, __mmask8 __U, __m256i __A)
5946 __builtin_ia32_compressstoresi256_mask ((__v8si *) __P,
5947 (__v8si) __A,
5948 (__mmask8) __U);
5951 extern __inline __m128i
5952 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5953 _mm_mask_compress_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
5955 return (__m128i) __builtin_ia32_compresssi128_mask ((__v4si) __A,
5956 (__v4si) __W,
5957 (__mmask8) __U);
5960 extern __inline __m128i
5961 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5962 _mm_maskz_compress_epi32 (__mmask8 __U, __m128i __A)
5964 return (__m128i) __builtin_ia32_compresssi128_mask ((__v4si) __A,
5965 (__v4si)
5966 _mm_setzero_si128 (),
5967 (__mmask8) __U);
5970 extern __inline void
5971 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5972 _mm_mask_compressstoreu_epi32 (void *__P, __mmask8 __U, __m128i __A)
5974 __builtin_ia32_compressstoresi128_mask ((__v4si *) __P,
5975 (__v4si) __A,
5976 (__mmask8) __U);
5979 extern __inline __m256d
5980 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5981 _mm256_mask_expand_pd (__m256d __W, __mmask8 __U, __m256d __A)
5983 return (__m256d) __builtin_ia32_expanddf256_mask ((__v4df) __A,
5984 (__v4df) __W,
5985 (__mmask8) __U);
5988 extern __inline __m256d
5989 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
5990 _mm256_maskz_expand_pd (__mmask8 __U, __m256d __A)
5992 return (__m256d) __builtin_ia32_expanddf256_maskz ((__v4df) __A,
5993 (__v4df)
5994 _mm256_setzero_pd (),
5995 (__mmask8) __U);
5998 extern __inline __m256d
5999 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6000 _mm256_mask_expandloadu_pd (__m256d __W, __mmask8 __U, void const *__P)
6002 return (__m256d) __builtin_ia32_expandloaddf256_mask ((__v4df *) __P,
6003 (__v4df) __W,
6004 (__mmask8)
6005 __U);
6008 extern __inline __m256d
6009 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6010 _mm256_maskz_expandloadu_pd (__mmask8 __U, void const *__P)
6012 return (__m256d) __builtin_ia32_expandloaddf256_maskz ((__v4df *) __P,
6013 (__v4df)
6014 _mm256_setzero_pd (),
6015 (__mmask8)
6016 __U);
6019 extern __inline __m128d
6020 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6021 _mm_mask_expand_pd (__m128d __W, __mmask8 __U, __m128d __A)
6023 return (__m128d) __builtin_ia32_expanddf128_mask ((__v2df) __A,
6024 (__v2df) __W,
6025 (__mmask8) __U);
6028 extern __inline __m128d
6029 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6030 _mm_maskz_expand_pd (__mmask8 __U, __m128d __A)
6032 return (__m128d) __builtin_ia32_expanddf128_maskz ((__v2df) __A,
6033 (__v2df)
6034 _mm_setzero_pd (),
6035 (__mmask8) __U);
6038 extern __inline __m128d
6039 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6040 _mm_mask_expandloadu_pd (__m128d __W, __mmask8 __U, void const *__P)
6042 return (__m128d) __builtin_ia32_expandloaddf128_mask ((__v2df *) __P,
6043 (__v2df) __W,
6044 (__mmask8)
6045 __U);
6048 extern __inline __m128d
6049 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6050 _mm_maskz_expandloadu_pd (__mmask8 __U, void const *__P)
6052 return (__m128d) __builtin_ia32_expandloaddf128_maskz ((__v2df *) __P,
6053 (__v2df)
6054 _mm_setzero_pd (),
6055 (__mmask8)
6056 __U);
6059 extern __inline __m256
6060 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6061 _mm256_mask_expand_ps (__m256 __W, __mmask8 __U, __m256 __A)
6063 return (__m256) __builtin_ia32_expandsf256_mask ((__v8sf) __A,
6064 (__v8sf) __W,
6065 (__mmask8) __U);
6068 extern __inline __m256
6069 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6070 _mm256_maskz_expand_ps (__mmask8 __U, __m256 __A)
6072 return (__m256) __builtin_ia32_expandsf256_maskz ((__v8sf) __A,
6073 (__v8sf)
6074 _mm256_setzero_ps (),
6075 (__mmask8) __U);
6078 extern __inline __m256
6079 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6080 _mm256_mask_expandloadu_ps (__m256 __W, __mmask8 __U, void const *__P)
6082 return (__m256) __builtin_ia32_expandloadsf256_mask ((__v8sf *) __P,
6083 (__v8sf) __W,
6084 (__mmask8) __U);
6087 extern __inline __m256
6088 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6089 _mm256_maskz_expandloadu_ps (__mmask8 __U, void const *__P)
6091 return (__m256) __builtin_ia32_expandloadsf256_maskz ((__v8sf *) __P,
6092 (__v8sf)
6093 _mm256_setzero_ps (),
6094 (__mmask8)
6095 __U);
6098 extern __inline __m128
6099 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6100 _mm_mask_expand_ps (__m128 __W, __mmask8 __U, __m128 __A)
6102 return (__m128) __builtin_ia32_expandsf128_mask ((__v4sf) __A,
6103 (__v4sf) __W,
6104 (__mmask8) __U);
6107 extern __inline __m128
6108 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6109 _mm_maskz_expand_ps (__mmask8 __U, __m128 __A)
6111 return (__m128) __builtin_ia32_expandsf128_maskz ((__v4sf) __A,
6112 (__v4sf)
6113 _mm_setzero_ps (),
6114 (__mmask8) __U);
6117 extern __inline __m128
6118 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6119 _mm_mask_expandloadu_ps (__m128 __W, __mmask8 __U, void const *__P)
6121 return (__m128) __builtin_ia32_expandloadsf128_mask ((__v4sf *) __P,
6122 (__v4sf) __W,
6123 (__mmask8) __U);
6126 extern __inline __m128
6127 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6128 _mm_maskz_expandloadu_ps (__mmask8 __U, void const *__P)
6130 return (__m128) __builtin_ia32_expandloadsf128_maskz ((__v4sf *) __P,
6131 (__v4sf)
6132 _mm_setzero_ps (),
6133 (__mmask8)
6134 __U);
6137 extern __inline __m256i
6138 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6139 _mm256_mask_expand_epi64 (__m256i __W, __mmask8 __U, __m256i __A)
6141 return (__m256i) __builtin_ia32_expanddi256_mask ((__v4di) __A,
6142 (__v4di) __W,
6143 (__mmask8) __U);
6146 extern __inline __m256i
6147 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6148 _mm256_maskz_expand_epi64 (__mmask8 __U, __m256i __A)
6150 return (__m256i) __builtin_ia32_expanddi256_maskz ((__v4di) __A,
6151 (__v4di)
6152 _mm256_setzero_si256 (),
6153 (__mmask8) __U);
6156 extern __inline __m256i
6157 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6158 _mm256_mask_expandloadu_epi64 (__m256i __W, __mmask8 __U,
6159 void const *__P)
6161 return (__m256i) __builtin_ia32_expandloaddi256_mask ((__v4di *) __P,
6162 (__v4di) __W,
6163 (__mmask8)
6164 __U);
6167 extern __inline __m256i
6168 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6169 _mm256_maskz_expandloadu_epi64 (__mmask8 __U, void const *__P)
6171 return (__m256i) __builtin_ia32_expandloaddi256_maskz ((__v4di *) __P,
6172 (__v4di)
6173 _mm256_setzero_si256 (),
6174 (__mmask8)
6175 __U);
6178 extern __inline __m128i
6179 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6180 _mm_mask_expand_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
6182 return (__m128i) __builtin_ia32_expanddi128_mask ((__v2di) __A,
6183 (__v2di) __W,
6184 (__mmask8) __U);
6187 extern __inline __m128i
6188 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6189 _mm_maskz_expand_epi64 (__mmask8 __U, __m128i __A)
6191 return (__m128i) __builtin_ia32_expanddi128_maskz ((__v2di) __A,
6192 (__v2di)
6193 _mm_setzero_si128 (),
6194 (__mmask8) __U);
6197 extern __inline __m128i
6198 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6199 _mm_mask_expandloadu_epi64 (__m128i __W, __mmask8 __U, void const *__P)
6201 return (__m128i) __builtin_ia32_expandloaddi128_mask ((__v2di *) __P,
6202 (__v2di) __W,
6203 (__mmask8)
6204 __U);
6207 extern __inline __m128i
6208 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6209 _mm_maskz_expandloadu_epi64 (__mmask8 __U, void const *__P)
6211 return (__m128i) __builtin_ia32_expandloaddi128_maskz ((__v2di *) __P,
6212 (__v2di)
6213 _mm_setzero_si128 (),
6214 (__mmask8)
6215 __U);
6218 extern __inline __m256i
6219 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6220 _mm256_mask_expand_epi32 (__m256i __W, __mmask8 __U, __m256i __A)
6222 return (__m256i) __builtin_ia32_expandsi256_mask ((__v8si) __A,
6223 (__v8si) __W,
6224 (__mmask8) __U);
6227 extern __inline __m256i
6228 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6229 _mm256_maskz_expand_epi32 (__mmask8 __U, __m256i __A)
6231 return (__m256i) __builtin_ia32_expandsi256_maskz ((__v8si) __A,
6232 (__v8si)
6233 _mm256_setzero_si256 (),
6234 (__mmask8) __U);
6237 extern __inline __m256i
6238 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6239 _mm256_mask_expandloadu_epi32 (__m256i __W, __mmask8 __U,
6240 void const *__P)
6242 return (__m256i) __builtin_ia32_expandloadsi256_mask ((__v8si *) __P,
6243 (__v8si) __W,
6244 (__mmask8)
6245 __U);
6248 extern __inline __m256i
6249 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6250 _mm256_maskz_expandloadu_epi32 (__mmask8 __U, void const *__P)
6252 return (__m256i) __builtin_ia32_expandloadsi256_maskz ((__v8si *) __P,
6253 (__v8si)
6254 _mm256_setzero_si256 (),
6255 (__mmask8)
6256 __U);
6259 extern __inline __m128i
6260 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6261 _mm_mask_expand_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
6263 return (__m128i) __builtin_ia32_expandsi128_mask ((__v4si) __A,
6264 (__v4si) __W,
6265 (__mmask8) __U);
6268 extern __inline __m128i
6269 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6270 _mm_maskz_expand_epi32 (__mmask8 __U, __m128i __A)
6272 return (__m128i) __builtin_ia32_expandsi128_maskz ((__v4si) __A,
6273 (__v4si)
6274 _mm_setzero_si128 (),
6275 (__mmask8) __U);
6278 extern __inline __m128i
6279 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6280 _mm_mask_expandloadu_epi32 (__m128i __W, __mmask8 __U, void const *__P)
6282 return (__m128i) __builtin_ia32_expandloadsi128_mask ((__v4si *) __P,
6283 (__v4si) __W,
6284 (__mmask8)
6285 __U);
6288 extern __inline __m128i
6289 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6290 _mm_maskz_expandloadu_epi32 (__mmask8 __U, void const *__P)
6292 return (__m128i) __builtin_ia32_expandloadsi128_maskz ((__v4si *) __P,
6293 (__v4si)
6294 _mm_setzero_si128 (),
6295 (__mmask8)
6296 __U);
6299 extern __inline __m256d
6300 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6301 _mm256_permutex2var_pd (__m256d __A, __m256i __I, __m256d __B)
6303 return (__m256d) __builtin_ia32_vpermt2varpd256_mask ((__v4di) __I
6304 /* idx */ ,
6305 (__v4df) __A,
6306 (__v4df) __B,
6307 (__mmask8) -1);
6310 extern __inline __m256d
6311 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6312 _mm256_mask_permutex2var_pd (__m256d __A, __mmask8 __U, __m256i __I,
6313 __m256d __B)
6315 return (__m256d) __builtin_ia32_vpermt2varpd256_mask ((__v4di) __I
6316 /* idx */ ,
6317 (__v4df) __A,
6318 (__v4df) __B,
6319 (__mmask8)
6320 __U);
6323 extern __inline __m256d
6324 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6325 _mm256_mask2_permutex2var_pd (__m256d __A, __m256i __I, __mmask8 __U,
6326 __m256d __B)
6328 return (__m256d) __builtin_ia32_vpermi2varpd256_mask ((__v4df) __A,
6329 (__v4di) __I
6330 /* idx */ ,
6331 (__v4df) __B,
6332 (__mmask8)
6333 __U);
6336 extern __inline __m256d
6337 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6338 _mm256_maskz_permutex2var_pd (__mmask8 __U, __m256d __A, __m256i __I,
6339 __m256d __B)
6341 return (__m256d) __builtin_ia32_vpermt2varpd256_maskz ((__v4di) __I
6342 /* idx */ ,
6343 (__v4df) __A,
6344 (__v4df) __B,
6345 (__mmask8)
6346 __U);
6349 extern __inline __m256
6350 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6351 _mm256_permutex2var_ps (__m256 __A, __m256i __I, __m256 __B)
6353 return (__m256) __builtin_ia32_vpermt2varps256_mask ((__v8si) __I
6354 /* idx */ ,
6355 (__v8sf) __A,
6356 (__v8sf) __B,
6357 (__mmask8) -1);
6360 extern __inline __m256
6361 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6362 _mm256_mask_permutex2var_ps (__m256 __A, __mmask8 __U, __m256i __I,
6363 __m256 __B)
6365 return (__m256) __builtin_ia32_vpermt2varps256_mask ((__v8si) __I
6366 /* idx */ ,
6367 (__v8sf) __A,
6368 (__v8sf) __B,
6369 (__mmask8) __U);
6372 extern __inline __m256
6373 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6374 _mm256_mask2_permutex2var_ps (__m256 __A, __m256i __I, __mmask8 __U,
6375 __m256 __B)
6377 return (__m256) __builtin_ia32_vpermi2varps256_mask ((__v8sf) __A,
6378 (__v8si) __I
6379 /* idx */ ,
6380 (__v8sf) __B,
6381 (__mmask8) __U);
6384 extern __inline __m256
6385 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6386 _mm256_maskz_permutex2var_ps (__mmask8 __U, __m256 __A, __m256i __I,
6387 __m256 __B)
6389 return (__m256) __builtin_ia32_vpermt2varps256_maskz ((__v8si) __I
6390 /* idx */ ,
6391 (__v8sf) __A,
6392 (__v8sf) __B,
6393 (__mmask8)
6394 __U);
6397 extern __inline __m128i
6398 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6399 _mm_permutex2var_epi64 (__m128i __A, __m128i __I, __m128i __B)
6401 return (__m128i) __builtin_ia32_vpermt2varq128_mask ((__v2di) __I
6402 /* idx */ ,
6403 (__v2di) __A,
6404 (__v2di) __B,
6405 (__mmask8) -1);
6408 extern __inline __m128i
6409 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6410 _mm_mask_permutex2var_epi64 (__m128i __A, __mmask8 __U, __m128i __I,
6411 __m128i __B)
6413 return (__m128i) __builtin_ia32_vpermt2varq128_mask ((__v2di) __I
6414 /* idx */ ,
6415 (__v2di) __A,
6416 (__v2di) __B,
6417 (__mmask8) __U);
6420 extern __inline __m128i
6421 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6422 _mm_mask2_permutex2var_epi64 (__m128i __A, __m128i __I, __mmask8 __U,
6423 __m128i __B)
6425 return (__m128i) __builtin_ia32_vpermi2varq128_mask ((__v2di) __A,
6426 (__v2di) __I
6427 /* idx */ ,
6428 (__v2di) __B,
6429 (__mmask8) __U);
6432 extern __inline __m128i
6433 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6434 _mm_maskz_permutex2var_epi64 (__mmask8 __U, __m128i __A, __m128i __I,
6435 __m128i __B)
6437 return (__m128i) __builtin_ia32_vpermt2varq128_maskz ((__v2di) __I
6438 /* idx */ ,
6439 (__v2di) __A,
6440 (__v2di) __B,
6441 (__mmask8)
6442 __U);
6445 extern __inline __m128i
6446 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6447 _mm_permutex2var_epi32 (__m128i __A, __m128i __I, __m128i __B)
6449 return (__m128i) __builtin_ia32_vpermt2vard128_mask ((__v4si) __I
6450 /* idx */ ,
6451 (__v4si) __A,
6452 (__v4si) __B,
6453 (__mmask8) -1);
6456 extern __inline __m128i
6457 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6458 _mm_mask_permutex2var_epi32 (__m128i __A, __mmask8 __U, __m128i __I,
6459 __m128i __B)
6461 return (__m128i) __builtin_ia32_vpermt2vard128_mask ((__v4si) __I
6462 /* idx */ ,
6463 (__v4si) __A,
6464 (__v4si) __B,
6465 (__mmask8) __U);
6468 extern __inline __m128i
6469 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6470 _mm_mask2_permutex2var_epi32 (__m128i __A, __m128i __I, __mmask8 __U,
6471 __m128i __B)
6473 return (__m128i) __builtin_ia32_vpermi2vard128_mask ((__v4si) __A,
6474 (__v4si) __I
6475 /* idx */ ,
6476 (__v4si) __B,
6477 (__mmask8) __U);
6480 extern __inline __m128i
6481 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6482 _mm_maskz_permutex2var_epi32 (__mmask8 __U, __m128i __A, __m128i __I,
6483 __m128i __B)
6485 return (__m128i) __builtin_ia32_vpermt2vard128_maskz ((__v4si) __I
6486 /* idx */ ,
6487 (__v4si) __A,
6488 (__v4si) __B,
6489 (__mmask8)
6490 __U);
6493 extern __inline __m256i
6494 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6495 _mm256_permutex2var_epi64 (__m256i __A, __m256i __I, __m256i __B)
6497 return (__m256i) __builtin_ia32_vpermt2varq256_mask ((__v4di) __I
6498 /* idx */ ,
6499 (__v4di) __A,
6500 (__v4di) __B,
6501 (__mmask8) -1);
6504 extern __inline __m256i
6505 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6506 _mm256_mask_permutex2var_epi64 (__m256i __A, __mmask8 __U, __m256i __I,
6507 __m256i __B)
6509 return (__m256i) __builtin_ia32_vpermt2varq256_mask ((__v4di) __I
6510 /* idx */ ,
6511 (__v4di) __A,
6512 (__v4di) __B,
6513 (__mmask8) __U);
6516 extern __inline __m256i
6517 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6518 _mm256_mask2_permutex2var_epi64 (__m256i __A, __m256i __I,
6519 __mmask8 __U, __m256i __B)
6521 return (__m256i) __builtin_ia32_vpermi2varq256_mask ((__v4di) __A,
6522 (__v4di) __I
6523 /* idx */ ,
6524 (__v4di) __B,
6525 (__mmask8) __U);
6528 extern __inline __m256i
6529 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6530 _mm256_maskz_permutex2var_epi64 (__mmask8 __U, __m256i __A,
6531 __m256i __I, __m256i __B)
6533 return (__m256i) __builtin_ia32_vpermt2varq256_maskz ((__v4di) __I
6534 /* idx */ ,
6535 (__v4di) __A,
6536 (__v4di) __B,
6537 (__mmask8)
6538 __U);
6541 extern __inline __m256i
6542 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6543 _mm256_permutex2var_epi32 (__m256i __A, __m256i __I, __m256i __B)
6545 return (__m256i) __builtin_ia32_vpermt2vard256_mask ((__v8si) __I
6546 /* idx */ ,
6547 (__v8si) __A,
6548 (__v8si) __B,
6549 (__mmask8) -1);
6552 extern __inline __m256i
6553 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6554 _mm256_mask_permutex2var_epi32 (__m256i __A, __mmask8 __U, __m256i __I,
6555 __m256i __B)
6557 return (__m256i) __builtin_ia32_vpermt2vard256_mask ((__v8si) __I
6558 /* idx */ ,
6559 (__v8si) __A,
6560 (__v8si) __B,
6561 (__mmask8) __U);
6564 extern __inline __m256i
6565 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6566 _mm256_mask2_permutex2var_epi32 (__m256i __A, __m256i __I,
6567 __mmask8 __U, __m256i __B)
6569 return (__m256i) __builtin_ia32_vpermi2vard256_mask ((__v8si) __A,
6570 (__v8si) __I
6571 /* idx */ ,
6572 (__v8si) __B,
6573 (__mmask8) __U);
6576 extern __inline __m256i
6577 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6578 _mm256_maskz_permutex2var_epi32 (__mmask8 __U, __m256i __A,
6579 __m256i __I, __m256i __B)
6581 return (__m256i) __builtin_ia32_vpermt2vard256_maskz ((__v8si) __I
6582 /* idx */ ,
6583 (__v8si) __A,
6584 (__v8si) __B,
6585 (__mmask8)
6586 __U);
6589 extern __inline __m128d
6590 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6591 _mm_permutex2var_pd (__m128d __A, __m128i __I, __m128d __B)
6593 return (__m128d) __builtin_ia32_vpermt2varpd128_mask ((__v2di) __I
6594 /* idx */ ,
6595 (__v2df) __A,
6596 (__v2df) __B,
6597 (__mmask8) -1);
6600 extern __inline __m128d
6601 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6602 _mm_mask_permutex2var_pd (__m128d __A, __mmask8 __U, __m128i __I,
6603 __m128d __B)
6605 return (__m128d) __builtin_ia32_vpermt2varpd128_mask ((__v2di) __I
6606 /* idx */ ,
6607 (__v2df) __A,
6608 (__v2df) __B,
6609 (__mmask8)
6610 __U);
6613 extern __inline __m128d
6614 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6615 _mm_mask2_permutex2var_pd (__m128d __A, __m128i __I, __mmask8 __U,
6616 __m128d __B)
6618 return (__m128d) __builtin_ia32_vpermi2varpd128_mask ((__v2df) __A,
6619 (__v2di) __I
6620 /* idx */ ,
6621 (__v2df) __B,
6622 (__mmask8)
6623 __U);
6626 extern __inline __m128d
6627 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6628 _mm_maskz_permutex2var_pd (__mmask8 __U, __m128d __A, __m128i __I,
6629 __m128d __B)
6631 return (__m128d) __builtin_ia32_vpermt2varpd128_maskz ((__v2di) __I
6632 /* idx */ ,
6633 (__v2df) __A,
6634 (__v2df) __B,
6635 (__mmask8)
6636 __U);
6639 extern __inline __m128
6640 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6641 _mm_permutex2var_ps (__m128 __A, __m128i __I, __m128 __B)
6643 return (__m128) __builtin_ia32_vpermt2varps128_mask ((__v4si) __I
6644 /* idx */ ,
6645 (__v4sf) __A,
6646 (__v4sf) __B,
6647 (__mmask8) -1);
6650 extern __inline __m128
6651 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6652 _mm_mask_permutex2var_ps (__m128 __A, __mmask8 __U, __m128i __I,
6653 __m128 __B)
6655 return (__m128) __builtin_ia32_vpermt2varps128_mask ((__v4si) __I
6656 /* idx */ ,
6657 (__v4sf) __A,
6658 (__v4sf) __B,
6659 (__mmask8) __U);
6662 extern __inline __m128
6663 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6664 _mm_mask2_permutex2var_ps (__m128 __A, __m128i __I, __mmask8 __U,
6665 __m128 __B)
6667 return (__m128) __builtin_ia32_vpermi2varps128_mask ((__v4sf) __A,
6668 (__v4si) __I
6669 /* idx */ ,
6670 (__v4sf) __B,
6671 (__mmask8) __U);
6674 extern __inline __m128
6675 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6676 _mm_maskz_permutex2var_ps (__mmask8 __U, __m128 __A, __m128i __I,
6677 __m128 __B)
6679 return (__m128) __builtin_ia32_vpermt2varps128_maskz ((__v4si) __I
6680 /* idx */ ,
6681 (__v4sf) __A,
6682 (__v4sf) __B,
6683 (__mmask8)
6684 __U);
6687 extern __inline __m128i
6688 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6689 _mm_srav_epi64 (__m128i __X, __m128i __Y)
6691 return (__m128i) __builtin_ia32_psravq128_mask ((__v2di) __X,
6692 (__v2di) __Y,
6693 (__v2di)
6694 _mm_setzero_si128 (),
6695 (__mmask8) -1);
6698 extern __inline __m128i
6699 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6700 _mm_mask_srav_epi64 (__m128i __W, __mmask8 __U, __m128i __X,
6701 __m128i __Y)
6703 return (__m128i) __builtin_ia32_psravq128_mask ((__v2di) __X,
6704 (__v2di) __Y,
6705 (__v2di) __W,
6706 (__mmask8) __U);
6709 extern __inline __m128i
6710 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6711 _mm_maskz_srav_epi64 (__mmask8 __U, __m128i __X, __m128i __Y)
6713 return (__m128i) __builtin_ia32_psravq128_mask ((__v2di) __X,
6714 (__v2di) __Y,
6715 (__v2di)
6716 _mm_setzero_si128 (),
6717 (__mmask8) __U);
6720 extern __inline __m256i
6721 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6722 _mm256_mask_sllv_epi32 (__m256i __W, __mmask8 __U, __m256i __X,
6723 __m256i __Y)
6725 return (__m256i) __builtin_ia32_psllv8si_mask ((__v8si) __X,
6726 (__v8si) __Y,
6727 (__v8si) __W,
6728 (__mmask8) __U);
6731 extern __inline __m256i
6732 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6733 _mm256_maskz_sllv_epi32 (__mmask8 __U, __m256i __X, __m256i __Y)
6735 return (__m256i) __builtin_ia32_psllv8si_mask ((__v8si) __X,
6736 (__v8si) __Y,
6737 (__v8si)
6738 _mm256_setzero_si256 (),
6739 (__mmask8) __U);
6742 extern __inline __m128i
6743 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6744 _mm_mask_sllv_epi32 (__m128i __W, __mmask8 __U, __m128i __X,
6745 __m128i __Y)
6747 return (__m128i) __builtin_ia32_psllv4si_mask ((__v4si) __X,
6748 (__v4si) __Y,
6749 (__v4si) __W,
6750 (__mmask8) __U);
6753 extern __inline __m128i
6754 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6755 _mm_maskz_sllv_epi32 (__mmask8 __U, __m128i __X, __m128i __Y)
6757 return (__m128i) __builtin_ia32_psllv4si_mask ((__v4si) __X,
6758 (__v4si) __Y,
6759 (__v4si)
6760 _mm_setzero_si128 (),
6761 (__mmask8) __U);
6764 extern __inline __m256i
6765 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6766 _mm256_mask_sllv_epi64 (__m256i __W, __mmask8 __U, __m256i __X,
6767 __m256i __Y)
6769 return (__m256i) __builtin_ia32_psllv4di_mask ((__v4di) __X,
6770 (__v4di) __Y,
6771 (__v4di) __W,
6772 (__mmask8) __U);
6775 extern __inline __m256i
6776 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6777 _mm256_maskz_sllv_epi64 (__mmask8 __U, __m256i __X, __m256i __Y)
6779 return (__m256i) __builtin_ia32_psllv4di_mask ((__v4di) __X,
6780 (__v4di) __Y,
6781 (__v4di)
6782 _mm256_setzero_si256 (),
6783 (__mmask8) __U);
6786 extern __inline __m128i
6787 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6788 _mm_mask_sllv_epi64 (__m128i __W, __mmask8 __U, __m128i __X,
6789 __m128i __Y)
6791 return (__m128i) __builtin_ia32_psllv2di_mask ((__v2di) __X,
6792 (__v2di) __Y,
6793 (__v2di) __W,
6794 (__mmask8) __U);
6797 extern __inline __m128i
6798 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6799 _mm_maskz_sllv_epi64 (__mmask8 __U, __m128i __X, __m128i __Y)
6801 return (__m128i) __builtin_ia32_psllv2di_mask ((__v2di) __X,
6802 (__v2di) __Y,
6803 (__v2di)
6804 _mm_setzero_si128 (),
6805 (__mmask8) __U);
6808 extern __inline __m256i
6809 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6810 _mm256_mask_srav_epi32 (__m256i __W, __mmask8 __U, __m256i __X,
6811 __m256i __Y)
6813 return (__m256i) __builtin_ia32_psrav8si_mask ((__v8si) __X,
6814 (__v8si) __Y,
6815 (__v8si) __W,
6816 (__mmask8) __U);
6819 extern __inline __m256i
6820 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6821 _mm256_maskz_srav_epi32 (__mmask8 __U, __m256i __X, __m256i __Y)
6823 return (__m256i) __builtin_ia32_psrav8si_mask ((__v8si) __X,
6824 (__v8si) __Y,
6825 (__v8si)
6826 _mm256_setzero_si256 (),
6827 (__mmask8) __U);
6830 extern __inline __m128i
6831 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6832 _mm_mask_srav_epi32 (__m128i __W, __mmask8 __U, __m128i __X,
6833 __m128i __Y)
6835 return (__m128i) __builtin_ia32_psrav4si_mask ((__v4si) __X,
6836 (__v4si) __Y,
6837 (__v4si) __W,
6838 (__mmask8) __U);
6841 extern __inline __m128i
6842 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6843 _mm_maskz_srav_epi32 (__mmask8 __U, __m128i __X, __m128i __Y)
6845 return (__m128i) __builtin_ia32_psrav4si_mask ((__v4si) __X,
6846 (__v4si) __Y,
6847 (__v4si)
6848 _mm_setzero_si128 (),
6849 (__mmask8) __U);
6852 extern __inline __m256i
6853 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6854 _mm256_mask_srlv_epi32 (__m256i __W, __mmask8 __U, __m256i __X,
6855 __m256i __Y)
6857 return (__m256i) __builtin_ia32_psrlv8si_mask ((__v8si) __X,
6858 (__v8si) __Y,
6859 (__v8si) __W,
6860 (__mmask8) __U);
6863 extern __inline __m256i
6864 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6865 _mm256_maskz_srlv_epi32 (__mmask8 __U, __m256i __X, __m256i __Y)
6867 return (__m256i) __builtin_ia32_psrlv8si_mask ((__v8si) __X,
6868 (__v8si) __Y,
6869 (__v8si)
6870 _mm256_setzero_si256 (),
6871 (__mmask8) __U);
6874 extern __inline __m128i
6875 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6876 _mm_mask_srlv_epi32 (__m128i __W, __mmask8 __U, __m128i __X,
6877 __m128i __Y)
6879 return (__m128i) __builtin_ia32_psrlv4si_mask ((__v4si) __X,
6880 (__v4si) __Y,
6881 (__v4si) __W,
6882 (__mmask8) __U);
6885 extern __inline __m128i
6886 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6887 _mm_maskz_srlv_epi32 (__mmask8 __U, __m128i __X, __m128i __Y)
6889 return (__m128i) __builtin_ia32_psrlv4si_mask ((__v4si) __X,
6890 (__v4si) __Y,
6891 (__v4si)
6892 _mm_setzero_si128 (),
6893 (__mmask8) __U);
6896 extern __inline __m256i
6897 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6898 _mm256_mask_srlv_epi64 (__m256i __W, __mmask8 __U, __m256i __X,
6899 __m256i __Y)
6901 return (__m256i) __builtin_ia32_psrlv4di_mask ((__v4di) __X,
6902 (__v4di) __Y,
6903 (__v4di) __W,
6904 (__mmask8) __U);
6907 extern __inline __m256i
6908 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6909 _mm256_maskz_srlv_epi64 (__mmask8 __U, __m256i __X, __m256i __Y)
6911 return (__m256i) __builtin_ia32_psrlv4di_mask ((__v4di) __X,
6912 (__v4di) __Y,
6913 (__v4di)
6914 _mm256_setzero_si256 (),
6915 (__mmask8) __U);
6918 extern __inline __m128i
6919 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6920 _mm_mask_srlv_epi64 (__m128i __W, __mmask8 __U, __m128i __X,
6921 __m128i __Y)
6923 return (__m128i) __builtin_ia32_psrlv2di_mask ((__v2di) __X,
6924 (__v2di) __Y,
6925 (__v2di) __W,
6926 (__mmask8) __U);
6929 extern __inline __m128i
6930 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6931 _mm_maskz_srlv_epi64 (__mmask8 __U, __m128i __X, __m128i __Y)
6933 return (__m128i) __builtin_ia32_psrlv2di_mask ((__v2di) __X,
6934 (__v2di) __Y,
6935 (__v2di)
6936 _mm_setzero_si128 (),
6937 (__mmask8) __U);
6940 extern __inline __m256i
6941 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6942 _mm256_rolv_epi32 (__m256i __A, __m256i __B)
6944 return (__m256i) __builtin_ia32_prolvd256_mask ((__v8si) __A,
6945 (__v8si) __B,
6946 (__v8si)
6947 _mm256_setzero_si256 (),
6948 (__mmask8) -1);
6951 extern __inline __m256i
6952 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6953 _mm256_mask_rolv_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
6954 __m256i __B)
6956 return (__m256i) __builtin_ia32_prolvd256_mask ((__v8si) __A,
6957 (__v8si) __B,
6958 (__v8si) __W,
6959 (__mmask8) __U);
6962 extern __inline __m256i
6963 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6964 _mm256_maskz_rolv_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
6966 return (__m256i) __builtin_ia32_prolvd256_mask ((__v8si) __A,
6967 (__v8si) __B,
6968 (__v8si)
6969 _mm256_setzero_si256 (),
6970 (__mmask8) __U);
6973 extern __inline __m128i
6974 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6975 _mm_rolv_epi32 (__m128i __A, __m128i __B)
6977 return (__m128i) __builtin_ia32_prolvd128_mask ((__v4si) __A,
6978 (__v4si) __B,
6979 (__v4si)
6980 _mm_setzero_si128 (),
6981 (__mmask8) -1);
6984 extern __inline __m128i
6985 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6986 _mm_mask_rolv_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
6987 __m128i __B)
6989 return (__m128i) __builtin_ia32_prolvd128_mask ((__v4si) __A,
6990 (__v4si) __B,
6991 (__v4si) __W,
6992 (__mmask8) __U);
6995 extern __inline __m128i
6996 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
6997 _mm_maskz_rolv_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
6999 return (__m128i) __builtin_ia32_prolvd128_mask ((__v4si) __A,
7000 (__v4si) __B,
7001 (__v4si)
7002 _mm_setzero_si128 (),
7003 (__mmask8) __U);
7006 extern __inline __m256i
7007 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7008 _mm256_rorv_epi32 (__m256i __A, __m256i __B)
7010 return (__m256i) __builtin_ia32_prorvd256_mask ((__v8si) __A,
7011 (__v8si) __B,
7012 (__v8si)
7013 _mm256_setzero_si256 (),
7014 (__mmask8) -1);
7017 extern __inline __m256i
7018 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7019 _mm256_mask_rorv_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
7020 __m256i __B)
7022 return (__m256i) __builtin_ia32_prorvd256_mask ((__v8si) __A,
7023 (__v8si) __B,
7024 (__v8si) __W,
7025 (__mmask8) __U);
7028 extern __inline __m256i
7029 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7030 _mm256_maskz_rorv_epi32 (__mmask8 __U, __m256i __A, __m256i __B)
7032 return (__m256i) __builtin_ia32_prorvd256_mask ((__v8si) __A,
7033 (__v8si) __B,
7034 (__v8si)
7035 _mm256_setzero_si256 (),
7036 (__mmask8) __U);
7039 extern __inline __m128i
7040 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7041 _mm_rorv_epi32 (__m128i __A, __m128i __B)
7043 return (__m128i) __builtin_ia32_prorvd128_mask ((__v4si) __A,
7044 (__v4si) __B,
7045 (__v4si)
7046 _mm_setzero_si128 (),
7047 (__mmask8) -1);
7050 extern __inline __m128i
7051 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7052 _mm_mask_rorv_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
7053 __m128i __B)
7055 return (__m128i) __builtin_ia32_prorvd128_mask ((__v4si) __A,
7056 (__v4si) __B,
7057 (__v4si) __W,
7058 (__mmask8) __U);
7061 extern __inline __m128i
7062 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7063 _mm_maskz_rorv_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
7065 return (__m128i) __builtin_ia32_prorvd128_mask ((__v4si) __A,
7066 (__v4si) __B,
7067 (__v4si)
7068 _mm_setzero_si128 (),
7069 (__mmask8) __U);
7072 extern __inline __m256i
7073 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7074 _mm256_rolv_epi64 (__m256i __A, __m256i __B)
7076 return (__m256i) __builtin_ia32_prolvq256_mask ((__v4di) __A,
7077 (__v4di) __B,
7078 (__v4di)
7079 _mm256_setzero_si256 (),
7080 (__mmask8) -1);
7083 extern __inline __m256i
7084 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7085 _mm256_mask_rolv_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
7086 __m256i __B)
7088 return (__m256i) __builtin_ia32_prolvq256_mask ((__v4di) __A,
7089 (__v4di) __B,
7090 (__v4di) __W,
7091 (__mmask8) __U);
7094 extern __inline __m256i
7095 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7096 _mm256_maskz_rolv_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
7098 return (__m256i) __builtin_ia32_prolvq256_mask ((__v4di) __A,
7099 (__v4di) __B,
7100 (__v4di)
7101 _mm256_setzero_si256 (),
7102 (__mmask8) __U);
7105 extern __inline __m128i
7106 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7107 _mm_rolv_epi64 (__m128i __A, __m128i __B)
7109 return (__m128i) __builtin_ia32_prolvq128_mask ((__v2di) __A,
7110 (__v2di) __B,
7111 (__v2di)
7112 _mm_setzero_si128 (),
7113 (__mmask8) -1);
7116 extern __inline __m128i
7117 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7118 _mm_mask_rolv_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
7119 __m128i __B)
7121 return (__m128i) __builtin_ia32_prolvq128_mask ((__v2di) __A,
7122 (__v2di) __B,
7123 (__v2di) __W,
7124 (__mmask8) __U);
7127 extern __inline __m128i
7128 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7129 _mm_maskz_rolv_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
7131 return (__m128i) __builtin_ia32_prolvq128_mask ((__v2di) __A,
7132 (__v2di) __B,
7133 (__v2di)
7134 _mm_setzero_si128 (),
7135 (__mmask8) __U);
7138 extern __inline __m256i
7139 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7140 _mm256_rorv_epi64 (__m256i __A, __m256i __B)
7142 return (__m256i) __builtin_ia32_prorvq256_mask ((__v4di) __A,
7143 (__v4di) __B,
7144 (__v4di)
7145 _mm256_setzero_si256 (),
7146 (__mmask8) -1);
7149 extern __inline __m256i
7150 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7151 _mm256_mask_rorv_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
7152 __m256i __B)
7154 return (__m256i) __builtin_ia32_prorvq256_mask ((__v4di) __A,
7155 (__v4di) __B,
7156 (__v4di) __W,
7157 (__mmask8) __U);
7160 extern __inline __m256i
7161 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7162 _mm256_maskz_rorv_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
7164 return (__m256i) __builtin_ia32_prorvq256_mask ((__v4di) __A,
7165 (__v4di) __B,
7166 (__v4di)
7167 _mm256_setzero_si256 (),
7168 (__mmask8) __U);
7171 extern __inline __m128i
7172 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7173 _mm_rorv_epi64 (__m128i __A, __m128i __B)
7175 return (__m128i) __builtin_ia32_prorvq128_mask ((__v2di) __A,
7176 (__v2di) __B,
7177 (__v2di)
7178 _mm_setzero_si128 (),
7179 (__mmask8) -1);
7182 extern __inline __m128i
7183 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7184 _mm_mask_rorv_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
7185 __m128i __B)
7187 return (__m128i) __builtin_ia32_prorvq128_mask ((__v2di) __A,
7188 (__v2di) __B,
7189 (__v2di) __W,
7190 (__mmask8) __U);
7193 extern __inline __m128i
7194 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7195 _mm_maskz_rorv_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
7197 return (__m128i) __builtin_ia32_prorvq128_mask ((__v2di) __A,
7198 (__v2di) __B,
7199 (__v2di)
7200 _mm_setzero_si128 (),
7201 (__mmask8) __U);
7204 extern __inline __m256i
7205 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7206 _mm256_srav_epi64 (__m256i __X, __m256i __Y)
7208 return (__m256i) __builtin_ia32_psravq256_mask ((__v4di) __X,
7209 (__v4di) __Y,
7210 (__v4di)
7211 _mm256_setzero_si256 (),
7212 (__mmask8) -1);
7215 extern __inline __m256i
7216 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7217 _mm256_mask_srav_epi64 (__m256i __W, __mmask8 __U, __m256i __X,
7218 __m256i __Y)
7220 return (__m256i) __builtin_ia32_psravq256_mask ((__v4di) __X,
7221 (__v4di) __Y,
7222 (__v4di) __W,
7223 (__mmask8) __U);
7226 extern __inline __m256i
7227 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7228 _mm256_maskz_srav_epi64 (__mmask8 __U, __m256i __X, __m256i __Y)
7230 return (__m256i) __builtin_ia32_psravq256_mask ((__v4di) __X,
7231 (__v4di) __Y,
7232 (__v4di)
7233 _mm256_setzero_si256 (),
7234 (__mmask8) __U);
7237 extern __inline __m256i
7238 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7239 _mm256_mask_and_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
7240 __m256i __B)
7242 return (__m256i) __builtin_ia32_pandq256_mask ((__v4di) __A,
7243 (__v4di) __B,
7244 (__v4di) __W, __U);
7247 extern __inline __m256i
7248 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7249 _mm256_maskz_and_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
7251 return (__m256i) __builtin_ia32_pandq256_mask ((__v4di) __A,
7252 (__v4di) __B,
7253 (__v4di)
7254 _mm256_setzero_pd (),
7255 __U);
7258 extern __inline __m128i
7259 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7260 _mm_mask_and_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
7261 __m128i __B)
7263 return (__m128i) __builtin_ia32_pandq128_mask ((__v2di) __A,
7264 (__v2di) __B,
7265 (__v2di) __W, __U);
7268 extern __inline __m128i
7269 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7270 _mm_maskz_and_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
7272 return (__m128i) __builtin_ia32_pandq128_mask ((__v2di) __A,
7273 (__v2di) __B,
7274 (__v2di)
7275 _mm_setzero_pd (),
7276 __U);
7279 extern __inline __m256i
7280 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7281 _mm256_mask_andnot_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
7282 __m256i __B)
7284 return (__m256i) __builtin_ia32_pandnq256_mask ((__v4di) __A,
7285 (__v4di) __B,
7286 (__v4di) __W, __U);
7289 extern __inline __m256i
7290 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7291 _mm256_maskz_andnot_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
7293 return (__m256i) __builtin_ia32_pandnq256_mask ((__v4di) __A,
7294 (__v4di) __B,
7295 (__v4di)
7296 _mm256_setzero_pd (),
7297 __U);
7300 extern __inline __m128i
7301 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7302 _mm_mask_andnot_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
7303 __m128i __B)
7305 return (__m128i) __builtin_ia32_pandnq128_mask ((__v2di) __A,
7306 (__v2di) __B,
7307 (__v2di) __W, __U);
7310 extern __inline __m128i
7311 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7312 _mm_maskz_andnot_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
7314 return (__m128i) __builtin_ia32_pandnq128_mask ((__v2di) __A,
7315 (__v2di) __B,
7316 (__v2di)
7317 _mm_setzero_pd (),
7318 __U);
7321 extern __inline __m256i
7322 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7323 _mm256_mask_or_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
7324 __m256i __B)
7326 return (__m256i) __builtin_ia32_porq256_mask ((__v4di) __A,
7327 (__v4di) __B,
7328 (__v4di) __W,
7329 (__mmask8) __U);
7332 extern __inline __m256i
7333 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7334 _mm256_maskz_or_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
7336 return (__m256i) __builtin_ia32_porq256_mask ((__v4di) __A,
7337 (__v4di) __B,
7338 (__v4di)
7339 _mm256_setzero_si256 (),
7340 (__mmask8) __U);
7343 extern __inline __m128i
7344 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7345 _mm_mask_or_epi64 (__m128i __W, __mmask8 __U, __m128i __A, __m128i __B)
7347 return (__m128i) __builtin_ia32_porq128_mask ((__v2di) __A,
7348 (__v2di) __B,
7349 (__v2di) __W,
7350 (__mmask8) __U);
7353 extern __inline __m128i
7354 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7355 _mm_maskz_or_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
7357 return (__m128i) __builtin_ia32_porq128_mask ((__v2di) __A,
7358 (__v2di) __B,
7359 (__v2di)
7360 _mm_setzero_si128 (),
7361 (__mmask8) __U);
7364 extern __inline __m256i
7365 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7366 _mm256_mask_xor_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
7367 __m256i __B)
7369 return (__m256i) __builtin_ia32_pxorq256_mask ((__v4di) __A,
7370 (__v4di) __B,
7371 (__v4di) __W,
7372 (__mmask8) __U);
7375 extern __inline __m256i
7376 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7377 _mm256_maskz_xor_epi64 (__mmask8 __U, __m256i __A, __m256i __B)
7379 return (__m256i) __builtin_ia32_pxorq256_mask ((__v4di) __A,
7380 (__v4di) __B,
7381 (__v4di)
7382 _mm256_setzero_si256 (),
7383 (__mmask8) __U);
7386 extern __inline __m128i
7387 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7388 _mm_mask_xor_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
7389 __m128i __B)
7391 return (__m128i) __builtin_ia32_pxorq128_mask ((__v2di) __A,
7392 (__v2di) __B,
7393 (__v2di) __W,
7394 (__mmask8) __U);
7397 extern __inline __m128i
7398 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7399 _mm_maskz_xor_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
7401 return (__m128i) __builtin_ia32_pxorq128_mask ((__v2di) __A,
7402 (__v2di) __B,
7403 (__v2di)
7404 _mm_setzero_si128 (),
7405 (__mmask8) __U);
7408 extern __inline __m256d
7409 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7410 _mm256_mask_max_pd (__m256d __W, __mmask8 __U, __m256d __A,
7411 __m256d __B)
7413 return (__m256d) __builtin_ia32_maxpd256_mask ((__v4df) __A,
7414 (__v4df) __B,
7415 (__v4df) __W,
7416 (__mmask8) __U);
7419 extern __inline __m256d
7420 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7421 _mm256_maskz_max_pd (__mmask8 __U, __m256d __A, __m256d __B)
7423 return (__m256d) __builtin_ia32_maxpd256_mask ((__v4df) __A,
7424 (__v4df) __B,
7425 (__v4df)
7426 _mm256_setzero_pd (),
7427 (__mmask8) __U);
7430 extern __inline __m256
7431 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7432 _mm256_mask_max_ps (__m256 __W, __mmask8 __U, __m256 __A, __m256 __B)
7434 return (__m256) __builtin_ia32_maxps256_mask ((__v8sf) __A,
7435 (__v8sf) __B,
7436 (__v8sf) __W,
7437 (__mmask8) __U);
7440 extern __inline __m256
7441 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7442 _mm256_maskz_max_ps (__mmask8 __U, __m256 __A, __m256 __B)
7444 return (__m256) __builtin_ia32_maxps256_mask ((__v8sf) __A,
7445 (__v8sf) __B,
7446 (__v8sf)
7447 _mm256_setzero_ps (),
7448 (__mmask8) __U);
7451 extern __inline __m128
7452 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7453 _mm_mask_div_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B)
7455 return (__m128) __builtin_ia32_divps_mask ((__v4sf) __A,
7456 (__v4sf) __B,
7457 (__v4sf) __W,
7458 (__mmask8) __U);
7461 extern __inline __m128
7462 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7463 _mm_maskz_div_ps (__mmask8 __U, __m128 __A, __m128 __B)
7465 return (__m128) __builtin_ia32_divps_mask ((__v4sf) __A,
7466 (__v4sf) __B,
7467 (__v4sf)
7468 _mm_setzero_ps (),
7469 (__mmask8) __U);
7472 extern __inline __m128d
7473 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7474 _mm_mask_div_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B)
7476 return (__m128d) __builtin_ia32_divpd_mask ((__v2df) __A,
7477 (__v2df) __B,
7478 (__v2df) __W,
7479 (__mmask8) __U);
7482 extern __inline __m128d
7483 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7484 _mm_maskz_div_pd (__mmask8 __U, __m128d __A, __m128d __B)
7486 return (__m128d) __builtin_ia32_divpd_mask ((__v2df) __A,
7487 (__v2df) __B,
7488 (__v2df)
7489 _mm_setzero_pd (),
7490 (__mmask8) __U);
7493 extern __inline __m256d
7494 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7495 _mm256_mask_min_pd (__m256d __W, __mmask8 __U, __m256d __A,
7496 __m256d __B)
7498 return (__m256d) __builtin_ia32_minpd256_mask ((__v4df) __A,
7499 (__v4df) __B,
7500 (__v4df) __W,
7501 (__mmask8) __U);
7504 extern __inline __m256d
7505 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7506 _mm256_mask_div_pd (__m256d __W, __mmask8 __U, __m256d __A,
7507 __m256d __B)
7509 return (__m256d) __builtin_ia32_divpd256_mask ((__v4df) __A,
7510 (__v4df) __B,
7511 (__v4df) __W,
7512 (__mmask8) __U);
7515 extern __inline __m256d
7516 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7517 _mm256_maskz_min_pd (__mmask8 __U, __m256d __A, __m256d __B)
7519 return (__m256d) __builtin_ia32_minpd256_mask ((__v4df) __A,
7520 (__v4df) __B,
7521 (__v4df)
7522 _mm256_setzero_pd (),
7523 (__mmask8) __U);
7526 extern __inline __m256
7527 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7528 _mm256_mask_min_ps (__m256 __W, __mmask8 __U, __m256 __A, __m256 __B)
7530 return (__m256) __builtin_ia32_minps256_mask ((__v8sf) __A,
7531 (__v8sf) __B,
7532 (__v8sf) __W,
7533 (__mmask8) __U);
7536 extern __inline __m256d
7537 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7538 _mm256_maskz_div_pd (__mmask8 __U, __m256d __A, __m256d __B)
7540 return (__m256d) __builtin_ia32_divpd256_mask ((__v4df) __A,
7541 (__v4df) __B,
7542 (__v4df)
7543 _mm256_setzero_pd (),
7544 (__mmask8) __U);
7547 extern __inline __m256
7548 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7549 _mm256_mask_div_ps (__m256 __W, __mmask8 __U, __m256 __A, __m256 __B)
7551 return (__m256) __builtin_ia32_divps256_mask ((__v8sf) __A,
7552 (__v8sf) __B,
7553 (__v8sf) __W,
7554 (__mmask8) __U);
7557 extern __inline __m256
7558 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7559 _mm256_maskz_min_ps (__mmask8 __U, __m256 __A, __m256 __B)
7561 return (__m256) __builtin_ia32_minps256_mask ((__v8sf) __A,
7562 (__v8sf) __B,
7563 (__v8sf)
7564 _mm256_setzero_ps (),
7565 (__mmask8) __U);
7568 extern __inline __m256
7569 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7570 _mm256_maskz_div_ps (__mmask8 __U, __m256 __A, __m256 __B)
7572 return (__m256) __builtin_ia32_divps256_mask ((__v8sf) __A,
7573 (__v8sf) __B,
7574 (__v8sf)
7575 _mm256_setzero_ps (),
7576 (__mmask8) __U);
7579 extern __inline __m128
7580 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7581 _mm_mask_min_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B)
7583 return (__m128) __builtin_ia32_minps_mask ((__v4sf) __A,
7584 (__v4sf) __B,
7585 (__v4sf) __W,
7586 (__mmask8) __U);
7589 extern __inline __m128
7590 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7591 _mm_mask_mul_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B)
7593 return (__m128) __builtin_ia32_mulps_mask ((__v4sf) __A,
7594 (__v4sf) __B,
7595 (__v4sf) __W,
7596 (__mmask8) __U);
7599 extern __inline __m128
7600 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7601 _mm_maskz_min_ps (__mmask8 __U, __m128 __A, __m128 __B)
7603 return (__m128) __builtin_ia32_minps_mask ((__v4sf) __A,
7604 (__v4sf) __B,
7605 (__v4sf)
7606 _mm_setzero_ps (),
7607 (__mmask8) __U);
7610 extern __inline __m128
7611 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7612 _mm_maskz_mul_ps (__mmask8 __U, __m128 __A, __m128 __B)
7614 return (__m128) __builtin_ia32_mulps_mask ((__v4sf) __A,
7615 (__v4sf) __B,
7616 (__v4sf)
7617 _mm_setzero_ps (),
7618 (__mmask8) __U);
7621 extern __inline __m128
7622 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7623 _mm_mask_max_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B)
7625 return (__m128) __builtin_ia32_maxps_mask ((__v4sf) __A,
7626 (__v4sf) __B,
7627 (__v4sf) __W,
7628 (__mmask8) __U);
7631 extern __inline __m128
7632 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7633 _mm_maskz_max_ps (__mmask8 __U, __m128 __A, __m128 __B)
7635 return (__m128) __builtin_ia32_maxps_mask ((__v4sf) __A,
7636 (__v4sf) __B,
7637 (__v4sf)
7638 _mm_setzero_ps (),
7639 (__mmask8) __U);
7642 extern __inline __m128d
7643 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7644 _mm_mask_min_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B)
7646 return (__m128d) __builtin_ia32_minpd_mask ((__v2df) __A,
7647 (__v2df) __B,
7648 (__v2df) __W,
7649 (__mmask8) __U);
7652 extern __inline __m128d
7653 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7654 _mm_maskz_min_pd (__mmask8 __U, __m128d __A, __m128d __B)
7656 return (__m128d) __builtin_ia32_minpd_mask ((__v2df) __A,
7657 (__v2df) __B,
7658 (__v2df)
7659 _mm_setzero_pd (),
7660 (__mmask8) __U);
7663 extern __inline __m128d
7664 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7665 _mm_mask_max_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B)
7667 return (__m128d) __builtin_ia32_maxpd_mask ((__v2df) __A,
7668 (__v2df) __B,
7669 (__v2df) __W,
7670 (__mmask8) __U);
7673 extern __inline __m128d
7674 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7675 _mm_maskz_max_pd (__mmask8 __U, __m128d __A, __m128d __B)
7677 return (__m128d) __builtin_ia32_maxpd_mask ((__v2df) __A,
7678 (__v2df) __B,
7679 (__v2df)
7680 _mm_setzero_pd (),
7681 (__mmask8) __U);
7684 extern __inline __m128d
7685 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7686 _mm_mask_mul_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B)
7688 return (__m128d) __builtin_ia32_mulpd_mask ((__v2df) __A,
7689 (__v2df) __B,
7690 (__v2df) __W,
7691 (__mmask8) __U);
7694 extern __inline __m128d
7695 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7696 _mm_maskz_mul_pd (__mmask8 __U, __m128d __A, __m128d __B)
7698 return (__m128d) __builtin_ia32_mulpd_mask ((__v2df) __A,
7699 (__v2df) __B,
7700 (__v2df)
7701 _mm_setzero_pd (),
7702 (__mmask8) __U);
7705 extern __inline __m256
7706 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7707 _mm256_mask_mul_ps (__m256 __W, __mmask8 __U, __m256 __A, __m256 __B)
7709 return (__m256) __builtin_ia32_mulps256_mask ((__v8sf) __A,
7710 (__v8sf) __B,
7711 (__v8sf) __W,
7712 (__mmask8) __U);
7715 extern __inline __m256
7716 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7717 _mm256_maskz_mul_ps (__mmask8 __U, __m256 __A, __m256 __B)
7719 return (__m256) __builtin_ia32_mulps256_mask ((__v8sf) __A,
7720 (__v8sf) __B,
7721 (__v8sf)
7722 _mm256_setzero_ps (),
7723 (__mmask8) __U);
7726 extern __inline __m256d
7727 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7728 _mm256_mask_mul_pd (__m256d __W, __mmask8 __U, __m256d __A,
7729 __m256d __B)
7731 return (__m256d) __builtin_ia32_mulpd256_mask ((__v4df) __A,
7732 (__v4df) __B,
7733 (__v4df) __W,
7734 (__mmask8) __U);
7737 extern __inline __m256d
7738 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7739 _mm256_maskz_mul_pd (__mmask8 __U, __m256d __A, __m256d __B)
7741 return (__m256d) __builtin_ia32_mulpd256_mask ((__v4df) __A,
7742 (__v4df) __B,
7743 (__v4df)
7744 _mm256_setzero_pd (),
7745 (__mmask8) __U);
7748 extern __inline __m256i
7749 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7750 _mm256_maskz_max_epi64 (__mmask8 __M, __m256i __A, __m256i __B)
7752 return (__m256i) __builtin_ia32_pmaxsq256_mask ((__v4di) __A,
7753 (__v4di) __B,
7754 (__v4di)
7755 _mm256_setzero_si256 (),
7756 __M);
7759 extern __inline __m256i
7760 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7761 _mm256_mask_max_epi64 (__m256i __W, __mmask8 __M, __m256i __A,
7762 __m256i __B)
7764 return (__m256i) __builtin_ia32_pmaxsq256_mask ((__v4di) __A,
7765 (__v4di) __B,
7766 (__v4di) __W, __M);
7769 extern __inline __m256i
7770 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7771 _mm256_min_epi64 (__m256i __A, __m256i __B)
7773 return (__m256i) __builtin_ia32_pminsq256_mask ((__v4di) __A,
7774 (__v4di) __B,
7775 (__v4di)
7776 _mm256_setzero_si256 (),
7777 (__mmask8) -1);
7780 extern __inline __m256i
7781 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7782 _mm256_mask_min_epi64 (__m256i __W, __mmask8 __M, __m256i __A,
7783 __m256i __B)
7785 return (__m256i) __builtin_ia32_pminsq256_mask ((__v4di) __A,
7786 (__v4di) __B,
7787 (__v4di) __W, __M);
7790 extern __inline __m256i
7791 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7792 _mm256_maskz_min_epi64 (__mmask8 __M, __m256i __A, __m256i __B)
7794 return (__m256i) __builtin_ia32_pminsq256_mask ((__v4di) __A,
7795 (__v4di) __B,
7796 (__v4di)
7797 _mm256_setzero_si256 (),
7798 __M);
7801 extern __inline __m256i
7802 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7803 _mm256_maskz_max_epu64 (__mmask8 __M, __m256i __A, __m256i __B)
7805 return (__m256i) __builtin_ia32_pmaxuq256_mask ((__v4di) __A,
7806 (__v4di) __B,
7807 (__v4di)
7808 _mm256_setzero_si256 (),
7809 __M);
7812 extern __inline __m256i
7813 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7814 _mm256_max_epi64 (__m256i __A, __m256i __B)
7816 return (__m256i) __builtin_ia32_pmaxsq256_mask ((__v4di) __A,
7817 (__v4di) __B,
7818 (__v4di)
7819 _mm256_setzero_si256 (),
7820 (__mmask8) -1);
7823 extern __inline __m256i
7824 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7825 _mm256_max_epu64 (__m256i __A, __m256i __B)
7827 return (__m256i) __builtin_ia32_pmaxuq256_mask ((__v4di) __A,
7828 (__v4di) __B,
7829 (__v4di)
7830 _mm256_setzero_si256 (),
7831 (__mmask8) -1);
7834 extern __inline __m256i
7835 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7836 _mm256_mask_max_epu64 (__m256i __W, __mmask8 __M, __m256i __A,
7837 __m256i __B)
7839 return (__m256i) __builtin_ia32_pmaxuq256_mask ((__v4di) __A,
7840 (__v4di) __B,
7841 (__v4di) __W, __M);
7844 extern __inline __m256i
7845 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7846 _mm256_min_epu64 (__m256i __A, __m256i __B)
7848 return (__m256i) __builtin_ia32_pminuq256_mask ((__v4di) __A,
7849 (__v4di) __B,
7850 (__v4di)
7851 _mm256_setzero_si256 (),
7852 (__mmask8) -1);
7855 extern __inline __m256i
7856 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7857 _mm256_mask_min_epu64 (__m256i __W, __mmask8 __M, __m256i __A,
7858 __m256i __B)
7860 return (__m256i) __builtin_ia32_pminuq256_mask ((__v4di) __A,
7861 (__v4di) __B,
7862 (__v4di) __W, __M);
7865 extern __inline __m256i
7866 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7867 _mm256_maskz_min_epu64 (__mmask8 __M, __m256i __A, __m256i __B)
7869 return (__m256i) __builtin_ia32_pminuq256_mask ((__v4di) __A,
7870 (__v4di) __B,
7871 (__v4di)
7872 _mm256_setzero_si256 (),
7873 __M);
7876 extern __inline __m256i
7877 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7878 _mm256_maskz_max_epi32 (__mmask8 __M, __m256i __A, __m256i __B)
7880 return (__m256i) __builtin_ia32_pmaxsd256_mask ((__v8si) __A,
7881 (__v8si) __B,
7882 (__v8si)
7883 _mm256_setzero_si256 (),
7884 __M);
7887 extern __inline __m256i
7888 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7889 _mm256_mask_max_epi32 (__m256i __W, __mmask8 __M, __m256i __A,
7890 __m256i __B)
7892 return (__m256i) __builtin_ia32_pmaxsd256_mask ((__v8si) __A,
7893 (__v8si) __B,
7894 (__v8si) __W, __M);
7897 extern __inline __m256i
7898 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7899 _mm256_maskz_min_epi32 (__mmask8 __M, __m256i __A, __m256i __B)
7901 return (__m256i) __builtin_ia32_pminsd256_mask ((__v8si) __A,
7902 (__v8si) __B,
7903 (__v8si)
7904 _mm256_setzero_si256 (),
7905 __M);
7908 extern __inline __m256i
7909 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7910 _mm256_mask_min_epi32 (__m256i __W, __mmask8 __M, __m256i __A,
7911 __m256i __B)
7913 return (__m256i) __builtin_ia32_pminsd256_mask ((__v8si) __A,
7914 (__v8si) __B,
7915 (__v8si) __W, __M);
7918 extern __inline __m256i
7919 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7920 _mm256_maskz_max_epu32 (__mmask8 __M, __m256i __A, __m256i __B)
7922 return (__m256i) __builtin_ia32_pmaxud256_mask ((__v8si) __A,
7923 (__v8si) __B,
7924 (__v8si)
7925 _mm256_setzero_si256 (),
7926 __M);
7929 extern __inline __m256i
7930 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7931 _mm256_mask_max_epu32 (__m256i __W, __mmask8 __M, __m256i __A,
7932 __m256i __B)
7934 return (__m256i) __builtin_ia32_pmaxud256_mask ((__v8si) __A,
7935 (__v8si) __B,
7936 (__v8si) __W, __M);
7939 extern __inline __m256i
7940 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7941 _mm256_maskz_min_epu32 (__mmask8 __M, __m256i __A, __m256i __B)
7943 return (__m256i) __builtin_ia32_pminud256_mask ((__v8si) __A,
7944 (__v8si) __B,
7945 (__v8si)
7946 _mm256_setzero_si256 (),
7947 __M);
7950 extern __inline __m256i
7951 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7952 _mm256_mask_min_epu32 (__m256i __W, __mmask8 __M, __m256i __A,
7953 __m256i __B)
7955 return (__m256i) __builtin_ia32_pminud256_mask ((__v8si) __A,
7956 (__v8si) __B,
7957 (__v8si) __W, __M);
7960 extern __inline __m128i
7961 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7962 _mm_maskz_max_epi64 (__mmask8 __M, __m128i __A, __m128i __B)
7964 return (__m128i) __builtin_ia32_pmaxsq128_mask ((__v2di) __A,
7965 (__v2di) __B,
7966 (__v2di)
7967 _mm_setzero_si128 (),
7968 __M);
7971 extern __inline __m128i
7972 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7973 _mm_mask_max_epi64 (__m128i __W, __mmask8 __M, __m128i __A,
7974 __m128i __B)
7976 return (__m128i) __builtin_ia32_pmaxsq128_mask ((__v2di) __A,
7977 (__v2di) __B,
7978 (__v2di) __W, __M);
7981 extern __inline __m128i
7982 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7983 _mm_min_epi64 (__m128i __A, __m128i __B)
7985 return (__m128i) __builtin_ia32_pminsq128_mask ((__v2di) __A,
7986 (__v2di) __B,
7987 (__v2di)
7988 _mm_setzero_si128 (),
7989 (__mmask8) -1);
7992 extern __inline __m128i
7993 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
7994 _mm_mask_min_epi64 (__m128i __W, __mmask8 __M, __m128i __A,
7995 __m128i __B)
7997 return (__m128i) __builtin_ia32_pminsq128_mask ((__v2di) __A,
7998 (__v2di) __B,
7999 (__v2di) __W, __M);
8002 extern __inline __m128i
8003 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8004 _mm_maskz_min_epi64 (__mmask8 __M, __m128i __A, __m128i __B)
8006 return (__m128i) __builtin_ia32_pminsq128_mask ((__v2di) __A,
8007 (__v2di) __B,
8008 (__v2di)
8009 _mm_setzero_si128 (),
8010 __M);
8013 extern __inline __m128i
8014 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8015 _mm_maskz_max_epu64 (__mmask8 __M, __m128i __A, __m128i __B)
8017 return (__m128i) __builtin_ia32_pmaxuq128_mask ((__v2di) __A,
8018 (__v2di) __B,
8019 (__v2di)
8020 _mm_setzero_si128 (),
8021 __M);
8024 extern __inline __m128i
8025 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8026 _mm_max_epi64 (__m128i __A, __m128i __B)
8028 return (__m128i) __builtin_ia32_pmaxsq128_mask ((__v2di) __A,
8029 (__v2di) __B,
8030 (__v2di)
8031 _mm_setzero_si128 (),
8032 (__mmask8) -1);
8035 extern __inline __m128i
8036 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8037 _mm_max_epu64 (__m128i __A, __m128i __B)
8039 return (__m128i) __builtin_ia32_pmaxuq128_mask ((__v2di) __A,
8040 (__v2di) __B,
8041 (__v2di)
8042 _mm_setzero_si128 (),
8043 (__mmask8) -1);
8046 extern __inline __m128i
8047 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8048 _mm_mask_max_epu64 (__m128i __W, __mmask8 __M, __m128i __A,
8049 __m128i __B)
8051 return (__m128i) __builtin_ia32_pmaxuq128_mask ((__v2di) __A,
8052 (__v2di) __B,
8053 (__v2di) __W, __M);
8056 extern __inline __m128i
8057 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8058 _mm_min_epu64 (__m128i __A, __m128i __B)
8060 return (__m128i) __builtin_ia32_pminuq128_mask ((__v2di) __A,
8061 (__v2di) __B,
8062 (__v2di)
8063 _mm_setzero_si128 (),
8064 (__mmask8) -1);
8067 extern __inline __m128i
8068 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8069 _mm_mask_min_epu64 (__m128i __W, __mmask8 __M, __m128i __A,
8070 __m128i __B)
8072 return (__m128i) __builtin_ia32_pminuq128_mask ((__v2di) __A,
8073 (__v2di) __B,
8074 (__v2di) __W, __M);
8077 extern __inline __m128i
8078 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8079 _mm_maskz_min_epu64 (__mmask8 __M, __m128i __A, __m128i __B)
8081 return (__m128i) __builtin_ia32_pminuq128_mask ((__v2di) __A,
8082 (__v2di) __B,
8083 (__v2di)
8084 _mm_setzero_si128 (),
8085 __M);
8088 extern __inline __m128i
8089 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8090 _mm_maskz_max_epi32 (__mmask8 __M, __m128i __A, __m128i __B)
8092 return (__m128i) __builtin_ia32_pmaxsd128_mask ((__v4si) __A,
8093 (__v4si) __B,
8094 (__v4si)
8095 _mm_setzero_si128 (),
8096 __M);
8099 extern __inline __m128i
8100 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8101 _mm_mask_max_epi32 (__m128i __W, __mmask8 __M, __m128i __A,
8102 __m128i __B)
8104 return (__m128i) __builtin_ia32_pmaxsd128_mask ((__v4si) __A,
8105 (__v4si) __B,
8106 (__v4si) __W, __M);
8109 extern __inline __m128i
8110 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8111 _mm_maskz_min_epi32 (__mmask8 __M, __m128i __A, __m128i __B)
8113 return (__m128i) __builtin_ia32_pminsd128_mask ((__v4si) __A,
8114 (__v4si) __B,
8115 (__v4si)
8116 _mm_setzero_si128 (),
8117 __M);
8120 extern __inline __m128i
8121 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8122 _mm_mask_min_epi32 (__m128i __W, __mmask8 __M, __m128i __A,
8123 __m128i __B)
8125 return (__m128i) __builtin_ia32_pminsd128_mask ((__v4si) __A,
8126 (__v4si) __B,
8127 (__v4si) __W, __M);
8130 extern __inline __m128i
8131 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8132 _mm_maskz_max_epu32 (__mmask8 __M, __m128i __A, __m128i __B)
8134 return (__m128i) __builtin_ia32_pmaxud128_mask ((__v4si) __A,
8135 (__v4si) __B,
8136 (__v4si)
8137 _mm_setzero_si128 (),
8138 __M);
8141 extern __inline __m128i
8142 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8143 _mm_mask_max_epu32 (__m128i __W, __mmask8 __M, __m128i __A,
8144 __m128i __B)
8146 return (__m128i) __builtin_ia32_pmaxud128_mask ((__v4si) __A,
8147 (__v4si) __B,
8148 (__v4si) __W, __M);
8151 extern __inline __m128i
8152 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8153 _mm_maskz_min_epu32 (__mmask8 __M, __m128i __A, __m128i __B)
8155 return (__m128i) __builtin_ia32_pminud128_mask ((__v4si) __A,
8156 (__v4si) __B,
8157 (__v4si)
8158 _mm_setzero_si128 (),
8159 __M);
8162 extern __inline __m128i
8163 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8164 _mm_mask_min_epu32 (__m128i __W, __mmask8 __M, __m128i __A,
8165 __m128i __B)
8167 return (__m128i) __builtin_ia32_pminud128_mask ((__v4si) __A,
8168 (__v4si) __B,
8169 (__v4si) __W, __M);
8172 #ifndef __AVX512CD__
8173 #pragma GCC push_options
8174 #pragma GCC target("avx512vl,avx512cd")
8175 #define __DISABLE_AVX512VLCD__
8176 #endif
8178 extern __inline __m128i
8179 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8180 _mm_broadcastmb_epi64 (__mmask8 __A)
8182 return (__m128i) __builtin_ia32_broadcastmb128 (__A);
8185 extern __inline __m256i
8186 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8187 _mm256_broadcastmb_epi64 (__mmask8 __A)
8189 return (__m256i) __builtin_ia32_broadcastmb256 (__A);
8192 extern __inline __m128i
8193 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8194 _mm_broadcastmw_epi32 (__mmask16 __A)
8196 return (__m128i) __builtin_ia32_broadcastmw128 (__A);
8199 extern __inline __m256i
8200 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8201 _mm256_broadcastmw_epi32 (__mmask16 __A)
8203 return (__m256i) __builtin_ia32_broadcastmw256 (__A);
8206 extern __inline __m256i
8207 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8208 _mm256_lzcnt_epi32 (__m256i __A)
8210 return (__m256i) __builtin_ia32_vplzcntd_256_mask ((__v8si) __A,
8211 (__v8si)
8212 _mm256_setzero_si256 (),
8213 (__mmask8) -1);
8216 extern __inline __m256i
8217 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8218 _mm256_mask_lzcnt_epi32 (__m256i __W, __mmask8 __U, __m256i __A)
8220 return (__m256i) __builtin_ia32_vplzcntd_256_mask ((__v8si) __A,
8221 (__v8si) __W,
8222 (__mmask8) __U);
8225 extern __inline __m256i
8226 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8227 _mm256_maskz_lzcnt_epi32 (__mmask8 __U, __m256i __A)
8229 return (__m256i) __builtin_ia32_vplzcntd_256_mask ((__v8si) __A,
8230 (__v8si)
8231 _mm256_setzero_si256 (),
8232 (__mmask8) __U);
8235 extern __inline __m256i
8236 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8237 _mm256_lzcnt_epi64 (__m256i __A)
8239 return (__m256i) __builtin_ia32_vplzcntq_256_mask ((__v4di) __A,
8240 (__v4di)
8241 _mm256_setzero_si256 (),
8242 (__mmask8) -1);
8245 extern __inline __m256i
8246 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8247 _mm256_mask_lzcnt_epi64 (__m256i __W, __mmask8 __U, __m256i __A)
8249 return (__m256i) __builtin_ia32_vplzcntq_256_mask ((__v4di) __A,
8250 (__v4di) __W,
8251 (__mmask8) __U);
8254 extern __inline __m256i
8255 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8256 _mm256_maskz_lzcnt_epi64 (__mmask8 __U, __m256i __A)
8258 return (__m256i) __builtin_ia32_vplzcntq_256_mask ((__v4di) __A,
8259 (__v4di)
8260 _mm256_setzero_si256 (),
8261 (__mmask8) __U);
8264 extern __inline __m256i
8265 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8266 _mm256_conflict_epi64 (__m256i __A)
8268 return (__m256i) __builtin_ia32_vpconflictdi_256_mask ((__v4di) __A,
8269 (__v4di)
8270 _mm256_setzero_si256 (),
8271 (__mmask8) -1);
8274 extern __inline __m256i
8275 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8276 _mm256_mask_conflict_epi64 (__m256i __W, __mmask8 __U, __m256i __A)
8278 return (__m256i) __builtin_ia32_vpconflictdi_256_mask ((__v4di) __A,
8279 (__v4di) __W,
8280 (__mmask8)
8281 __U);
8284 extern __inline __m256i
8285 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8286 _mm256_maskz_conflict_epi64 (__mmask8 __U, __m256i __A)
8288 return (__m256i) __builtin_ia32_vpconflictdi_256_mask ((__v4di) __A,
8289 (__v4di)
8290 _mm256_setzero_si256 (),
8291 (__mmask8)
8292 __U);
8295 extern __inline __m256i
8296 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8297 _mm256_conflict_epi32 (__m256i __A)
8299 return (__m256i) __builtin_ia32_vpconflictsi_256_mask ((__v8si) __A,
8300 (__v8si)
8301 _mm256_setzero_si256 (),
8302 (__mmask8) -1);
8305 extern __inline __m256i
8306 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8307 _mm256_mask_conflict_epi32 (__m256i __W, __mmask8 __U, __m256i __A)
8309 return (__m256i) __builtin_ia32_vpconflictsi_256_mask ((__v8si) __A,
8310 (__v8si) __W,
8311 (__mmask8)
8312 __U);
8315 extern __inline __m256i
8316 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8317 _mm256_maskz_conflict_epi32 (__mmask8 __U, __m256i __A)
8319 return (__m256i) __builtin_ia32_vpconflictsi_256_mask ((__v8si) __A,
8320 (__v8si)
8321 _mm256_setzero_si256 (),
8322 (__mmask8)
8323 __U);
8326 extern __inline __m128i
8327 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8328 _mm_lzcnt_epi32 (__m128i __A)
8330 return (__m128i) __builtin_ia32_vplzcntd_128_mask ((__v4si) __A,
8331 (__v4si)
8332 _mm_setzero_si128 (),
8333 (__mmask8) -1);
8336 extern __inline __m128i
8337 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8338 _mm_mask_lzcnt_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
8340 return (__m128i) __builtin_ia32_vplzcntd_128_mask ((__v4si) __A,
8341 (__v4si) __W,
8342 (__mmask8) __U);
8345 extern __inline __m128i
8346 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8347 _mm_maskz_lzcnt_epi32 (__mmask8 __U, __m128i __A)
8349 return (__m128i) __builtin_ia32_vplzcntd_128_mask ((__v4si) __A,
8350 (__v4si)
8351 _mm_setzero_si128 (),
8352 (__mmask8) __U);
8355 extern __inline __m128i
8356 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8357 _mm_lzcnt_epi64 (__m128i __A)
8359 return (__m128i) __builtin_ia32_vplzcntq_128_mask ((__v2di) __A,
8360 (__v2di)
8361 _mm_setzero_si128 (),
8362 (__mmask8) -1);
8365 extern __inline __m128i
8366 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8367 _mm_mask_lzcnt_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
8369 return (__m128i) __builtin_ia32_vplzcntq_128_mask ((__v2di) __A,
8370 (__v2di) __W,
8371 (__mmask8) __U);
8374 extern __inline __m128i
8375 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8376 _mm_maskz_lzcnt_epi64 (__mmask8 __U, __m128i __A)
8378 return (__m128i) __builtin_ia32_vplzcntq_128_mask ((__v2di) __A,
8379 (__v2di)
8380 _mm_setzero_si128 (),
8381 (__mmask8) __U);
8384 extern __inline __m128i
8385 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8386 _mm_conflict_epi64 (__m128i __A)
8388 return (__m128i) __builtin_ia32_vpconflictdi_128_mask ((__v2di) __A,
8389 (__v2di)
8390 _mm_setzero_si128 (),
8391 (__mmask8) -1);
8394 extern __inline __m128i
8395 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8396 _mm_mask_conflict_epi64 (__m128i __W, __mmask8 __U, __m128i __A)
8398 return (__m128i) __builtin_ia32_vpconflictdi_128_mask ((__v2di) __A,
8399 (__v2di) __W,
8400 (__mmask8)
8401 __U);
8404 extern __inline __m128i
8405 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8406 _mm_maskz_conflict_epi64 (__mmask8 __U, __m128i __A)
8408 return (__m128i) __builtin_ia32_vpconflictdi_128_mask ((__v2di) __A,
8409 (__v2di)
8410 _mm_setzero_si128 (),
8411 (__mmask8)
8412 __U);
8415 extern __inline __m128i
8416 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8417 _mm_conflict_epi32 (__m128i __A)
8419 return (__m128i) __builtin_ia32_vpconflictsi_128_mask ((__v4si) __A,
8420 (__v4si)
8421 _mm_setzero_si128 (),
8422 (__mmask8) -1);
8425 extern __inline __m128i
8426 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8427 _mm_mask_conflict_epi32 (__m128i __W, __mmask8 __U, __m128i __A)
8429 return (__m128i) __builtin_ia32_vpconflictsi_128_mask ((__v4si) __A,
8430 (__v4si) __W,
8431 (__mmask8)
8432 __U);
8435 extern __inline __m128i
8436 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8437 _mm_maskz_conflict_epi32 (__mmask8 __U, __m128i __A)
8439 return (__m128i) __builtin_ia32_vpconflictsi_128_mask ((__v4si) __A,
8440 (__v4si)
8441 _mm_setzero_si128 (),
8442 (__mmask8)
8443 __U);
8446 #ifdef __DISABLE_AVX512VLCD__
8447 #pragma GCC pop_options
8448 #endif
8450 extern __inline __m256d
8451 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8452 _mm256_mask_unpacklo_pd (__m256d __W, __mmask8 __U, __m256d __A,
8453 __m256d __B)
8455 return (__m256d) __builtin_ia32_unpcklpd256_mask ((__v4df) __A,
8456 (__v4df) __B,
8457 (__v4df) __W,
8458 (__mmask8) __U);
8461 extern __inline __m256d
8462 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8463 _mm256_maskz_unpacklo_pd (__mmask8 __U, __m256d __A, __m256d __B)
8465 return (__m256d) __builtin_ia32_unpcklpd256_mask ((__v4df) __A,
8466 (__v4df) __B,
8467 (__v4df)
8468 _mm256_setzero_pd (),
8469 (__mmask8) __U);
8472 extern __inline __m128d
8473 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8474 _mm_mask_unpacklo_pd (__m128d __W, __mmask8 __U, __m128d __A,
8475 __m128d __B)
8477 return (__m128d) __builtin_ia32_unpcklpd128_mask ((__v2df) __A,
8478 (__v2df) __B,
8479 (__v2df) __W,
8480 (__mmask8) __U);
8483 extern __inline __m128d
8484 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8485 _mm_maskz_unpacklo_pd (__mmask8 __U, __m128d __A, __m128d __B)
8487 return (__m128d) __builtin_ia32_unpcklpd128_mask ((__v2df) __A,
8488 (__v2df) __B,
8489 (__v2df)
8490 _mm_setzero_pd (),
8491 (__mmask8) __U);
8494 extern __inline __m256
8495 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8496 _mm256_mask_unpacklo_ps (__m256 __W, __mmask8 __U, __m256 __A,
8497 __m256 __B)
8499 return (__m256) __builtin_ia32_unpcklps256_mask ((__v8sf) __A,
8500 (__v8sf) __B,
8501 (__v8sf) __W,
8502 (__mmask8) __U);
8505 extern __inline __m256d
8506 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8507 _mm256_mask_unpackhi_pd (__m256d __W, __mmask8 __U, __m256d __A,
8508 __m256d __B)
8510 return (__m256d) __builtin_ia32_unpckhpd256_mask ((__v4df) __A,
8511 (__v4df) __B,
8512 (__v4df) __W,
8513 (__mmask8) __U);
8516 extern __inline __m256d
8517 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8518 _mm256_maskz_unpackhi_pd (__mmask8 __U, __m256d __A, __m256d __B)
8520 return (__m256d) __builtin_ia32_unpckhpd256_mask ((__v4df) __A,
8521 (__v4df) __B,
8522 (__v4df)
8523 _mm256_setzero_pd (),
8524 (__mmask8) __U);
8527 extern __inline __m128d
8528 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8529 _mm_mask_unpackhi_pd (__m128d __W, __mmask8 __U, __m128d __A,
8530 __m128d __B)
8532 return (__m128d) __builtin_ia32_unpckhpd128_mask ((__v2df) __A,
8533 (__v2df) __B,
8534 (__v2df) __W,
8535 (__mmask8) __U);
8538 extern __inline __m128d
8539 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8540 _mm_maskz_unpackhi_pd (__mmask8 __U, __m128d __A, __m128d __B)
8542 return (__m128d) __builtin_ia32_unpckhpd128_mask ((__v2df) __A,
8543 (__v2df) __B,
8544 (__v2df)
8545 _mm_setzero_pd (),
8546 (__mmask8) __U);
8549 extern __inline __m256
8550 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8551 _mm256_mask_unpackhi_ps (__m256 __W, __mmask8 __U, __m256 __A,
8552 __m256 __B)
8554 return (__m256) __builtin_ia32_unpckhps256_mask ((__v8sf) __A,
8555 (__v8sf) __B,
8556 (__v8sf) __W,
8557 (__mmask8) __U);
8560 extern __inline __m256
8561 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8562 _mm256_maskz_unpackhi_ps (__mmask8 __U, __m256 __A, __m256 __B)
8564 return (__m256) __builtin_ia32_unpckhps256_mask ((__v8sf) __A,
8565 (__v8sf) __B,
8566 (__v8sf)
8567 _mm256_setzero_ps (),
8568 (__mmask8) __U);
8571 extern __inline __m128
8572 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8573 _mm_mask_unpackhi_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B)
8575 return (__m128) __builtin_ia32_unpckhps128_mask ((__v4sf) __A,
8576 (__v4sf) __B,
8577 (__v4sf) __W,
8578 (__mmask8) __U);
8581 extern __inline __m128
8582 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8583 _mm_maskz_unpackhi_ps (__mmask8 __U, __m128 __A, __m128 __B)
8585 return (__m128) __builtin_ia32_unpckhps128_mask ((__v4sf) __A,
8586 (__v4sf) __B,
8587 (__v4sf)
8588 _mm_setzero_ps (),
8589 (__mmask8) __U);
8592 extern __inline __m128
8593 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8594 _mm_mask_cvtph_ps (__m128 __W, __mmask8 __U, __m128i __A)
8596 return (__m128) __builtin_ia32_vcvtph2ps_mask ((__v8hi) __A,
8597 (__v4sf) __W,
8598 (__mmask8) __U);
8601 extern __inline __m128
8602 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8603 _mm_maskz_cvtph_ps (__mmask8 __U, __m128i __A)
8605 return (__m128) __builtin_ia32_vcvtph2ps_mask ((__v8hi) __A,
8606 (__v4sf)
8607 _mm_setzero_ps (),
8608 (__mmask8) __U);
8611 extern __inline __m256
8612 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8613 _mm256_maskz_unpacklo_ps (__mmask8 __U, __m256 __A, __m256 __B)
8615 return (__m256) __builtin_ia32_unpcklps256_mask ((__v8sf) __A,
8616 (__v8sf) __B,
8617 (__v8sf)
8618 _mm256_setzero_ps (),
8619 (__mmask8) __U);
8622 extern __inline __m256
8623 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8624 _mm256_mask_cvtph_ps (__m256 __W, __mmask8 __U, __m128i __A)
8626 return (__m256) __builtin_ia32_vcvtph2ps256_mask ((__v8hi) __A,
8627 (__v8sf) __W,
8628 (__mmask8) __U);
8631 extern __inline __m256
8632 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8633 _mm256_maskz_cvtph_ps (__mmask8 __U, __m128i __A)
8635 return (__m256) __builtin_ia32_vcvtph2ps256_mask ((__v8hi) __A,
8636 (__v8sf)
8637 _mm256_setzero_ps (),
8638 (__mmask8) __U);
8641 extern __inline __m128
8642 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8643 _mm_mask_unpacklo_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B)
8645 return (__m128) __builtin_ia32_unpcklps128_mask ((__v4sf) __A,
8646 (__v4sf) __B,
8647 (__v4sf) __W,
8648 (__mmask8) __U);
8651 extern __inline __m128
8652 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8653 _mm_maskz_unpacklo_ps (__mmask8 __U, __m128 __A, __m128 __B)
8655 return (__m128) __builtin_ia32_unpcklps128_mask ((__v4sf) __A,
8656 (__v4sf) __B,
8657 (__v4sf)
8658 _mm_setzero_ps (),
8659 (__mmask8) __U);
8662 extern __inline __m256i
8663 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8664 _mm256_mask_sra_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
8665 __m128i __B)
8667 return (__m256i) __builtin_ia32_psrad256_mask ((__v8si) __A,
8668 (__v4si) __B,
8669 (__v8si) __W,
8670 (__mmask8) __U);
8673 extern __inline __m256i
8674 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8675 _mm256_maskz_sra_epi32 (__mmask8 __U, __m256i __A, __m128i __B)
8677 return (__m256i) __builtin_ia32_psrad256_mask ((__v8si) __A,
8678 (__v4si) __B,
8679 (__v8si)
8680 _mm256_setzero_si256 (),
8681 (__mmask8) __U);
8684 extern __inline __m128i
8685 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8686 _mm_mask_sra_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
8687 __m128i __B)
8689 return (__m128i) __builtin_ia32_psrad128_mask ((__v4si) __A,
8690 (__v4si) __B,
8691 (__v4si) __W,
8692 (__mmask8) __U);
8695 extern __inline __m128i
8696 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8697 _mm_maskz_sra_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
8699 return (__m128i) __builtin_ia32_psrad128_mask ((__v4si) __A,
8700 (__v4si) __B,
8701 (__v4si)
8702 _mm_setzero_si128 (),
8703 (__mmask8) __U);
8706 extern __inline __m256i
8707 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8708 _mm256_sra_epi64 (__m256i __A, __m128i __B)
8710 return (__m256i) __builtin_ia32_psraq256_mask ((__v4di) __A,
8711 (__v2di) __B,
8712 (__v4di)
8713 _mm256_setzero_si256 (),
8714 (__mmask8) -1);
8717 extern __inline __m256i
8718 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8719 _mm256_mask_sra_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
8720 __m128i __B)
8722 return (__m256i) __builtin_ia32_psraq256_mask ((__v4di) __A,
8723 (__v2di) __B,
8724 (__v4di) __W,
8725 (__mmask8) __U);
8728 extern __inline __m256i
8729 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8730 _mm256_maskz_sra_epi64 (__mmask8 __U, __m256i __A, __m128i __B)
8732 return (__m256i) __builtin_ia32_psraq256_mask ((__v4di) __A,
8733 (__v2di) __B,
8734 (__v4di)
8735 _mm256_setzero_si256 (),
8736 (__mmask8) __U);
8739 extern __inline __m128i
8740 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8741 _mm_sra_epi64 (__m128i __A, __m128i __B)
8743 return (__m128i) __builtin_ia32_psraq128_mask ((__v2di) __A,
8744 (__v2di) __B,
8745 (__v2di)
8746 _mm_setzero_si128 (),
8747 (__mmask8) -1);
8750 extern __inline __m128i
8751 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8752 _mm_mask_sra_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
8753 __m128i __B)
8755 return (__m128i) __builtin_ia32_psraq128_mask ((__v2di) __A,
8756 (__v2di) __B,
8757 (__v2di) __W,
8758 (__mmask8) __U);
8761 extern __inline __m128i
8762 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8763 _mm_maskz_sra_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
8765 return (__m128i) __builtin_ia32_psraq128_mask ((__v2di) __A,
8766 (__v2di) __B,
8767 (__v2di)
8768 _mm_setzero_si128 (),
8769 (__mmask8) __U);
8772 extern __inline __m128i
8773 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8774 _mm_mask_sll_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
8775 __m128i __B)
8777 return (__m128i) __builtin_ia32_pslld128_mask ((__v4si) __A,
8778 (__v4si) __B,
8779 (__v4si) __W,
8780 (__mmask8) __U);
8783 extern __inline __m128i
8784 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8785 _mm_maskz_sll_epi32 (__mmask8 __U, __m128i __A, __m128i __B)
8787 return (__m128i) __builtin_ia32_pslld128_mask ((__v4si) __A,
8788 (__v4si) __B,
8789 (__v4si)
8790 _mm_setzero_si128 (),
8791 (__mmask8) __U);
8794 extern __inline __m128i
8795 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8796 _mm_mask_sll_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
8797 __m128i __B)
8799 return (__m128i) __builtin_ia32_psllq128_mask ((__v2di) __A,
8800 (__v2di) __B,
8801 (__v2di) __W,
8802 (__mmask8) __U);
8805 extern __inline __m128i
8806 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8807 _mm_maskz_sll_epi64 (__mmask8 __U, __m128i __A, __m128i __B)
8809 return (__m128i) __builtin_ia32_psllq128_mask ((__v2di) __A,
8810 (__v2di) __B,
8811 (__v2di)
8812 _mm_setzero_si128 (),
8813 (__mmask8) __U);
8816 extern __inline __m256i
8817 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8818 _mm256_mask_sll_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
8819 __m128i __B)
8821 return (__m256i) __builtin_ia32_pslld256_mask ((__v8si) __A,
8822 (__v4si) __B,
8823 (__v8si) __W,
8824 (__mmask8) __U);
8827 extern __inline __m256i
8828 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8829 _mm256_maskz_sll_epi32 (__mmask8 __U, __m256i __A, __m128i __B)
8831 return (__m256i) __builtin_ia32_pslld256_mask ((__v8si) __A,
8832 (__v4si) __B,
8833 (__v8si)
8834 _mm256_setzero_si256 (),
8835 (__mmask8) __U);
8838 extern __inline __m256i
8839 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8840 _mm256_mask_sll_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
8841 __m128i __B)
8843 return (__m256i) __builtin_ia32_psllq256_mask ((__v4di) __A,
8844 (__v2di) __B,
8845 (__v4di) __W,
8846 (__mmask8) __U);
8849 extern __inline __m256i
8850 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8851 _mm256_maskz_sll_epi64 (__mmask8 __U, __m256i __A, __m128i __B)
8853 return (__m256i) __builtin_ia32_psllq256_mask ((__v4di) __A,
8854 (__v2di) __B,
8855 (__v4di)
8856 _mm256_setzero_si256 (),
8857 (__mmask8) __U);
8860 extern __inline __m256
8861 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8862 _mm256_mask_permutexvar_ps (__m256 __W, __mmask8 __U, __m256i __X,
8863 __m256 __Y)
8865 return (__m256) __builtin_ia32_permvarsf256_mask ((__v8sf) __Y,
8866 (__v8si) __X,
8867 (__v8sf) __W,
8868 (__mmask8) __U);
8871 extern __inline __m256
8872 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8873 _mm256_maskz_permutexvar_ps (__mmask8 __U, __m256i __X, __m256 __Y)
8875 return (__m256) __builtin_ia32_permvarsf256_mask ((__v8sf) __Y,
8876 (__v8si) __X,
8877 (__v8sf)
8878 _mm256_setzero_ps (),
8879 (__mmask8) __U);
8882 extern __inline __m256d
8883 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8884 _mm256_permutexvar_pd (__m256i __X, __m256d __Y)
8886 return (__m256d) __builtin_ia32_permvardf256_mask ((__v4df) __Y,
8887 (__v4di) __X,
8888 (__v4df)
8889 _mm256_setzero_pd (),
8890 (__mmask8) -1);
8893 extern __inline __m256d
8894 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8895 _mm256_mask_permutexvar_pd (__m256d __W, __mmask8 __U, __m256i __X,
8896 __m256d __Y)
8898 return (__m256d) __builtin_ia32_permvardf256_mask ((__v4df) __Y,
8899 (__v4di) __X,
8900 (__v4df) __W,
8901 (__mmask8) __U);
8904 extern __inline __m256d
8905 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8906 _mm256_maskz_permutexvar_pd (__mmask8 __U, __m256i __X, __m256d __Y)
8908 return (__m256d) __builtin_ia32_permvardf256_mask ((__v4df) __Y,
8909 (__v4di) __X,
8910 (__v4df)
8911 _mm256_setzero_pd (),
8912 (__mmask8) __U);
8915 extern __inline __m256d
8916 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8917 _mm256_mask_permutevar_pd (__m256d __W, __mmask8 __U, __m256d __A,
8918 __m256i __C)
8920 return (__m256d) __builtin_ia32_vpermilvarpd256_mask ((__v4df) __A,
8921 (__v4di) __C,
8922 (__v4df) __W,
8923 (__mmask8)
8924 __U);
8927 extern __inline __m256d
8928 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8929 _mm256_maskz_permutevar_pd (__mmask8 __U, __m256d __A, __m256i __C)
8931 return (__m256d) __builtin_ia32_vpermilvarpd256_mask ((__v4df) __A,
8932 (__v4di) __C,
8933 (__v4df)
8934 _mm256_setzero_pd (),
8935 (__mmask8)
8936 __U);
8939 extern __inline __m256
8940 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8941 _mm256_mask_permutevar_ps (__m256 __W, __mmask8 __U, __m256 __A,
8942 __m256i __C)
8944 return (__m256) __builtin_ia32_vpermilvarps256_mask ((__v8sf) __A,
8945 (__v8si) __C,
8946 (__v8sf) __W,
8947 (__mmask8) __U);
8950 extern __inline __m256
8951 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8952 _mm256_maskz_permutevar_ps (__mmask8 __U, __m256 __A, __m256i __C)
8954 return (__m256) __builtin_ia32_vpermilvarps256_mask ((__v8sf) __A,
8955 (__v8si) __C,
8956 (__v8sf)
8957 _mm256_setzero_ps (),
8958 (__mmask8) __U);
8961 extern __inline __m128d
8962 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8963 _mm_mask_permutevar_pd (__m128d __W, __mmask8 __U, __m128d __A,
8964 __m128i __C)
8966 return (__m128d) __builtin_ia32_vpermilvarpd_mask ((__v2df) __A,
8967 (__v2di) __C,
8968 (__v2df) __W,
8969 (__mmask8) __U);
8972 extern __inline __m128d
8973 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8974 _mm_maskz_permutevar_pd (__mmask8 __U, __m128d __A, __m128i __C)
8976 return (__m128d) __builtin_ia32_vpermilvarpd_mask ((__v2df) __A,
8977 (__v2di) __C,
8978 (__v2df)
8979 _mm_setzero_pd (),
8980 (__mmask8) __U);
8983 extern __inline __m128
8984 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8985 _mm_mask_permutevar_ps (__m128 __W, __mmask8 __U, __m128 __A,
8986 __m128i __C)
8988 return (__m128) __builtin_ia32_vpermilvarps_mask ((__v4sf) __A,
8989 (__v4si) __C,
8990 (__v4sf) __W,
8991 (__mmask8) __U);
8994 extern __inline __m128
8995 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
8996 _mm_maskz_permutevar_ps (__mmask8 __U, __m128 __A, __m128i __C)
8998 return (__m128) __builtin_ia32_vpermilvarps_mask ((__v4sf) __A,
8999 (__v4si) __C,
9000 (__v4sf)
9001 _mm_setzero_ps (),
9002 (__mmask8) __U);
9005 extern __inline __m256i
9006 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9007 _mm256_maskz_mullo_epi32 (__mmask8 __M, __m256i __A, __m256i __B)
9009 return (__m256i) __builtin_ia32_pmulld256_mask ((__v8si) __A,
9010 (__v8si) __B,
9011 (__v8si)
9012 _mm256_setzero_si256 (),
9013 __M);
9016 extern __inline __m256i
9017 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9018 _mm256_maskz_permutexvar_epi64 (__mmask8 __M, __m256i __X, __m256i __Y)
9020 return (__m256i) __builtin_ia32_permvardi256_mask ((__v4di) __Y,
9021 (__v4di) __X,
9022 (__v4di)
9023 _mm256_setzero_si256 (),
9024 __M);
9027 extern __inline __m256i
9028 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9029 _mm256_mask_mullo_epi32 (__m256i __W, __mmask8 __M, __m256i __A,
9030 __m256i __B)
9032 return (__m256i) __builtin_ia32_pmulld256_mask ((__v8si) __A,
9033 (__v8si) __B,
9034 (__v8si) __W, __M);
9037 extern __inline __m128i
9038 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9039 _mm_maskz_mullo_epi32 (__mmask8 __M, __m128i __A, __m128i __B)
9041 return (__m128i) __builtin_ia32_pmulld128_mask ((__v4si) __A,
9042 (__v4si) __B,
9043 (__v4si)
9044 _mm_setzero_si128 (),
9045 __M);
9048 extern __inline __m128i
9049 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9050 _mm_mask_mullo_epi32 (__m128i __W, __mmask16 __M, __m128i __A,
9051 __m128i __B)
9053 return (__m128i) __builtin_ia32_pmulld128_mask ((__v4si) __A,
9054 (__v4si) __B,
9055 (__v4si) __W, __M);
9058 extern __inline __m256i
9059 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9060 _mm256_mask_mul_epi32 (__m256i __W, __mmask8 __M, __m256i __X,
9061 __m256i __Y)
9063 return (__m256i) __builtin_ia32_pmuldq256_mask ((__v8si) __X,
9064 (__v8si) __Y,
9065 (__v4di) __W, __M);
9068 extern __inline __m256i
9069 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9070 _mm256_maskz_mul_epi32 (__mmask8 __M, __m256i __X, __m256i __Y)
9072 return (__m256i) __builtin_ia32_pmuldq256_mask ((__v8si) __X,
9073 (__v8si) __Y,
9074 (__v4di)
9075 _mm256_setzero_si256 (),
9076 __M);
9079 extern __inline __m128i
9080 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9081 _mm_mask_mul_epi32 (__m128i __W, __mmask8 __M, __m128i __X,
9082 __m128i __Y)
9084 return (__m128i) __builtin_ia32_pmuldq128_mask ((__v4si) __X,
9085 (__v4si) __Y,
9086 (__v2di) __W, __M);
9089 extern __inline __m128i
9090 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9091 _mm_maskz_mul_epi32 (__mmask8 __M, __m128i __X, __m128i __Y)
9093 return (__m128i) __builtin_ia32_pmuldq128_mask ((__v4si) __X,
9094 (__v4si) __Y,
9095 (__v2di)
9096 _mm_setzero_si128 (),
9097 __M);
9100 extern __inline __m256i
9101 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9102 _mm256_permutexvar_epi64 (__m256i __X, __m256i __Y)
9104 return (__m256i) __builtin_ia32_permvardi256_mask ((__v4di) __Y,
9105 (__v4di) __X,
9106 (__v4di)
9107 _mm256_setzero_si256 (),
9108 (__mmask8) -1);
9111 extern __inline __m256i
9112 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9113 _mm256_mask_permutexvar_epi64 (__m256i __W, __mmask8 __M, __m256i __X,
9114 __m256i __Y)
9116 return (__m256i) __builtin_ia32_permvardi256_mask ((__v4di) __Y,
9117 (__v4di) __X,
9118 (__v4di) __W,
9119 __M);
9122 extern __inline __m256i
9123 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9124 _mm256_mask_mul_epu32 (__m256i __W, __mmask8 __M, __m256i __X,
9125 __m256i __Y)
9127 return (__m256i) __builtin_ia32_pmuludq256_mask ((__v8si) __X,
9128 (__v8si) __Y,
9129 (__v4di) __W, __M);
9132 extern __inline __m256i
9133 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9134 _mm256_maskz_permutexvar_epi32 (__mmask8 __M, __m256i __X, __m256i __Y)
9136 return (__m256i) __builtin_ia32_permvarsi256_mask ((__v8si) __Y,
9137 (__v8si) __X,
9138 (__v8si)
9139 _mm256_setzero_si256 (),
9140 __M);
9143 extern __inline __m256i
9144 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9145 _mm256_maskz_mul_epu32 (__mmask8 __M, __m256i __X, __m256i __Y)
9147 return (__m256i) __builtin_ia32_pmuludq256_mask ((__v8si) __X,
9148 (__v8si) __Y,
9149 (__v4di)
9150 _mm256_setzero_si256 (),
9151 __M);
9154 extern __inline __m128i
9155 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9156 _mm_mask_mul_epu32 (__m128i __W, __mmask8 __M, __m128i __X,
9157 __m128i __Y)
9159 return (__m128i) __builtin_ia32_pmuludq128_mask ((__v4si) __X,
9160 (__v4si) __Y,
9161 (__v2di) __W, __M);
9164 extern __inline __m128i
9165 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9166 _mm_maskz_mul_epu32 (__mmask8 __M, __m128i __X, __m128i __Y)
9168 return (__m128i) __builtin_ia32_pmuludq128_mask ((__v4si) __X,
9169 (__v4si) __Y,
9170 (__v2di)
9171 _mm_setzero_si128 (),
9172 __M);
9175 extern __inline __m256i
9176 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9177 _mm256_permutexvar_epi32 (__m256i __X, __m256i __Y)
9179 return (__m256i) __builtin_ia32_permvarsi256_mask ((__v8si) __Y,
9180 (__v8si) __X,
9181 (__v8si)
9182 _mm256_setzero_si256 (),
9183 (__mmask8) -1);
9186 extern __inline __m256i
9187 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9188 _mm256_mask_permutexvar_epi32 (__m256i __W, __mmask8 __M, __m256i __X,
9189 __m256i __Y)
9191 return (__m256i) __builtin_ia32_permvarsi256_mask ((__v8si) __Y,
9192 (__v8si) __X,
9193 (__v8si) __W,
9194 __M);
9197 extern __inline __mmask8
9198 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9199 _mm256_mask_cmpneq_epu32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9201 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9202 (__v8si) __Y, 4,
9203 (__mmask8) __M);
9206 extern __inline __mmask8
9207 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9208 _mm256_cmpneq_epu32_mask (__m256i __X, __m256i __Y)
9210 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9211 (__v8si) __Y, 4,
9212 (__mmask8) -1);
9215 extern __inline __mmask8
9216 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9217 _mm256_mask_cmplt_epu32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9219 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9220 (__v8si) __Y, 1,
9221 (__mmask8) __M);
9224 extern __inline __mmask8
9225 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9226 _mm256_cmplt_epu32_mask (__m256i __X, __m256i __Y)
9228 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9229 (__v8si) __Y, 1,
9230 (__mmask8) -1);
9233 extern __inline __mmask8
9234 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9235 _mm256_mask_cmpge_epu32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9237 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9238 (__v8si) __Y, 5,
9239 (__mmask8) __M);
9242 extern __inline __mmask8
9243 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9244 _mm256_cmpge_epu32_mask (__m256i __X, __m256i __Y)
9246 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9247 (__v8si) __Y, 5,
9248 (__mmask8) -1);
9251 extern __inline __mmask8
9252 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9253 _mm256_mask_cmple_epu32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9255 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9256 (__v8si) __Y, 2,
9257 (__mmask8) __M);
9260 extern __inline __mmask8
9261 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9262 _mm256_cmple_epu32_mask (__m256i __X, __m256i __Y)
9264 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
9265 (__v8si) __Y, 2,
9266 (__mmask8) -1);
9269 extern __inline __mmask8
9270 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9271 _mm256_mask_cmpneq_epu64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9273 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9274 (__v4di) __Y, 4,
9275 (__mmask8) __M);
9278 extern __inline __mmask8
9279 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9280 _mm256_cmpneq_epu64_mask (__m256i __X, __m256i __Y)
9282 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9283 (__v4di) __Y, 4,
9284 (__mmask8) -1);
9287 extern __inline __mmask8
9288 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9289 _mm256_mask_cmplt_epu64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9291 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9292 (__v4di) __Y, 1,
9293 (__mmask8) __M);
9296 extern __inline __mmask8
9297 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9298 _mm256_cmplt_epu64_mask (__m256i __X, __m256i __Y)
9300 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9301 (__v4di) __Y, 1,
9302 (__mmask8) -1);
9305 extern __inline __mmask8
9306 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9307 _mm256_mask_cmpge_epu64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9309 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9310 (__v4di) __Y, 5,
9311 (__mmask8) __M);
9314 extern __inline __mmask8
9315 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9316 _mm256_cmpge_epu64_mask (__m256i __X, __m256i __Y)
9318 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9319 (__v4di) __Y, 5,
9320 (__mmask8) -1);
9323 extern __inline __mmask8
9324 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9325 _mm256_mask_cmple_epu64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9327 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9328 (__v4di) __Y, 2,
9329 (__mmask8) __M);
9332 extern __inline __mmask8
9333 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9334 _mm256_cmple_epu64_mask (__m256i __X, __m256i __Y)
9336 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
9337 (__v4di) __Y, 2,
9338 (__mmask8) -1);
9341 extern __inline __mmask8
9342 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9343 _mm256_mask_cmpneq_epi32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9345 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9346 (__v8si) __Y, 4,
9347 (__mmask8) __M);
9350 extern __inline __mmask8
9351 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9352 _mm256_cmpneq_epi32_mask (__m256i __X, __m256i __Y)
9354 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9355 (__v8si) __Y, 4,
9356 (__mmask8) -1);
9359 extern __inline __mmask8
9360 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9361 _mm256_mask_cmplt_epi32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9363 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9364 (__v8si) __Y, 1,
9365 (__mmask8) __M);
9368 extern __inline __mmask8
9369 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9370 _mm256_cmplt_epi32_mask (__m256i __X, __m256i __Y)
9372 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9373 (__v8si) __Y, 1,
9374 (__mmask8) -1);
9377 extern __inline __mmask8
9378 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9379 _mm256_mask_cmpge_epi32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9381 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9382 (__v8si) __Y, 5,
9383 (__mmask8) __M);
9386 extern __inline __mmask8
9387 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9388 _mm256_cmpge_epi32_mask (__m256i __X, __m256i __Y)
9390 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9391 (__v8si) __Y, 5,
9392 (__mmask8) -1);
9395 extern __inline __mmask8
9396 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9397 _mm256_mask_cmple_epi32_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9399 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9400 (__v8si) __Y, 2,
9401 (__mmask8) __M);
9404 extern __inline __mmask8
9405 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9406 _mm256_cmple_epi32_mask (__m256i __X, __m256i __Y)
9408 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
9409 (__v8si) __Y, 2,
9410 (__mmask8) -1);
9413 extern __inline __mmask8
9414 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9415 _mm256_mask_cmpneq_epi64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9417 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9418 (__v4di) __Y, 4,
9419 (__mmask8) __M);
9422 extern __inline __mmask8
9423 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9424 _mm256_cmpneq_epi64_mask (__m256i __X, __m256i __Y)
9426 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9427 (__v4di) __Y, 4,
9428 (__mmask8) -1);
9431 extern __inline __mmask8
9432 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9433 _mm256_mask_cmplt_epi64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9435 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9436 (__v4di) __Y, 1,
9437 (__mmask8) __M);
9440 extern __inline __mmask8
9441 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9442 _mm256_cmplt_epi64_mask (__m256i __X, __m256i __Y)
9444 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9445 (__v4di) __Y, 1,
9446 (__mmask8) -1);
9449 extern __inline __mmask8
9450 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9451 _mm256_mask_cmpge_epi64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9453 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9454 (__v4di) __Y, 5,
9455 (__mmask8) __M);
9458 extern __inline __mmask8
9459 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9460 _mm256_cmpge_epi64_mask (__m256i __X, __m256i __Y)
9462 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9463 (__v4di) __Y, 5,
9464 (__mmask8) -1);
9467 extern __inline __mmask8
9468 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9469 _mm256_mask_cmple_epi64_mask (__mmask8 __M, __m256i __X, __m256i __Y)
9471 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9472 (__v4di) __Y, 2,
9473 (__mmask8) __M);
9476 extern __inline __mmask8
9477 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9478 _mm256_cmple_epi64_mask (__m256i __X, __m256i __Y)
9480 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
9481 (__v4di) __Y, 2,
9482 (__mmask8) -1);
9485 extern __inline __mmask8
9486 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9487 _mm_mask_cmpneq_epu32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9489 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9490 (__v4si) __Y, 4,
9491 (__mmask8) __M);
9494 extern __inline __mmask8
9495 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9496 _mm_cmpneq_epu32_mask (__m128i __X, __m128i __Y)
9498 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9499 (__v4si) __Y, 4,
9500 (__mmask8) -1);
9503 extern __inline __mmask8
9504 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9505 _mm_mask_cmplt_epu32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9507 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9508 (__v4si) __Y, 1,
9509 (__mmask8) __M);
9512 extern __inline __mmask8
9513 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9514 _mm_cmplt_epu32_mask (__m128i __X, __m128i __Y)
9516 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9517 (__v4si) __Y, 1,
9518 (__mmask8) -1);
9521 extern __inline __mmask8
9522 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9523 _mm_mask_cmpge_epu32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9525 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9526 (__v4si) __Y, 5,
9527 (__mmask8) __M);
9530 extern __inline __mmask8
9531 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9532 _mm_cmpge_epu32_mask (__m128i __X, __m128i __Y)
9534 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9535 (__v4si) __Y, 5,
9536 (__mmask8) -1);
9539 extern __inline __mmask8
9540 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9541 _mm_mask_cmple_epu32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9543 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9544 (__v4si) __Y, 2,
9545 (__mmask8) __M);
9548 extern __inline __mmask8
9549 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9550 _mm_cmple_epu32_mask (__m128i __X, __m128i __Y)
9552 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
9553 (__v4si) __Y, 2,
9554 (__mmask8) -1);
9557 extern __inline __mmask8
9558 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9559 _mm_mask_cmpneq_epu64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9561 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9562 (__v2di) __Y, 4,
9563 (__mmask8) __M);
9566 extern __inline __mmask8
9567 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9568 _mm_cmpneq_epu64_mask (__m128i __X, __m128i __Y)
9570 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9571 (__v2di) __Y, 4,
9572 (__mmask8) -1);
9575 extern __inline __mmask8
9576 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9577 _mm_mask_cmplt_epu64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9579 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9580 (__v2di) __Y, 1,
9581 (__mmask8) __M);
9584 extern __inline __mmask8
9585 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9586 _mm_cmplt_epu64_mask (__m128i __X, __m128i __Y)
9588 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9589 (__v2di) __Y, 1,
9590 (__mmask8) -1);
9593 extern __inline __mmask8
9594 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9595 _mm_mask_cmpge_epu64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9597 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9598 (__v2di) __Y, 5,
9599 (__mmask8) __M);
9602 extern __inline __mmask8
9603 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9604 _mm_cmpge_epu64_mask (__m128i __X, __m128i __Y)
9606 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9607 (__v2di) __Y, 5,
9608 (__mmask8) -1);
9611 extern __inline __mmask8
9612 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9613 _mm_mask_cmple_epu64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9615 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9616 (__v2di) __Y, 2,
9617 (__mmask8) __M);
9620 extern __inline __mmask8
9621 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9622 _mm_cmple_epu64_mask (__m128i __X, __m128i __Y)
9624 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
9625 (__v2di) __Y, 2,
9626 (__mmask8) -1);
9629 extern __inline __mmask8
9630 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9631 _mm_mask_cmpneq_epi32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9633 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9634 (__v4si) __Y, 4,
9635 (__mmask8) __M);
9638 extern __inline __mmask8
9639 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9640 _mm_cmpneq_epi32_mask (__m128i __X, __m128i __Y)
9642 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9643 (__v4si) __Y, 4,
9644 (__mmask8) -1);
9647 extern __inline __mmask8
9648 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9649 _mm_mask_cmplt_epi32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9651 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9652 (__v4si) __Y, 1,
9653 (__mmask8) __M);
9656 extern __inline __mmask8
9657 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9658 _mm_cmplt_epi32_mask (__m128i __X, __m128i __Y)
9660 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9661 (__v4si) __Y, 1,
9662 (__mmask8) -1);
9665 extern __inline __mmask8
9666 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9667 _mm_mask_cmpge_epi32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9669 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9670 (__v4si) __Y, 5,
9671 (__mmask8) __M);
9674 extern __inline __mmask8
9675 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9676 _mm_cmpge_epi32_mask (__m128i __X, __m128i __Y)
9678 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9679 (__v4si) __Y, 5,
9680 (__mmask8) -1);
9683 extern __inline __mmask8
9684 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9685 _mm_mask_cmple_epi32_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9687 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9688 (__v4si) __Y, 2,
9689 (__mmask8) __M);
9692 extern __inline __mmask8
9693 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9694 _mm_cmple_epi32_mask (__m128i __X, __m128i __Y)
9696 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
9697 (__v4si) __Y, 2,
9698 (__mmask8) -1);
9701 extern __inline __mmask8
9702 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9703 _mm_mask_cmpneq_epi64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9705 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9706 (__v2di) __Y, 4,
9707 (__mmask8) __M);
9710 extern __inline __mmask8
9711 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9712 _mm_cmpneq_epi64_mask (__m128i __X, __m128i __Y)
9714 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9715 (__v2di) __Y, 4,
9716 (__mmask8) -1);
9719 extern __inline __mmask8
9720 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9721 _mm_mask_cmplt_epi64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9723 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9724 (__v2di) __Y, 1,
9725 (__mmask8) __M);
9728 extern __inline __mmask8
9729 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9730 _mm_cmplt_epi64_mask (__m128i __X, __m128i __Y)
9732 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9733 (__v2di) __Y, 1,
9734 (__mmask8) -1);
9737 extern __inline __mmask8
9738 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9739 _mm_mask_cmpge_epi64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9741 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9742 (__v2di) __Y, 5,
9743 (__mmask8) __M);
9746 extern __inline __mmask8
9747 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9748 _mm_cmpge_epi64_mask (__m128i __X, __m128i __Y)
9750 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9751 (__v2di) __Y, 5,
9752 (__mmask8) -1);
9755 extern __inline __mmask8
9756 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9757 _mm_mask_cmple_epi64_mask (__mmask8 __M, __m128i __X, __m128i __Y)
9759 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9760 (__v2di) __Y, 2,
9761 (__mmask8) __M);
9764 extern __inline __mmask8
9765 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9766 _mm_cmple_epi64_mask (__m128i __X, __m128i __Y)
9768 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
9769 (__v2di) __Y, 2,
9770 (__mmask8) -1);
9773 #ifdef __OPTIMIZE__
9774 extern __inline __m256i
9775 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9776 _mm256_permutex_epi64 (__m256i __X, const int __I)
9778 return (__m256i) __builtin_ia32_permdi256_mask ((__v4di) __X,
9779 __I,
9780 (__v4di)
9781 _mm256_setzero_si256(),
9782 (__mmask8) -1);
9785 extern __inline __m256i
9786 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9787 _mm256_mask_permutex_epi64 (__m256i __W, __mmask8 __M,
9788 __m256i __X, const int __I)
9790 return (__m256i) __builtin_ia32_permdi256_mask ((__v4di) __X,
9791 __I,
9792 (__v4di) __W,
9793 (__mmask8) __M);
9796 extern __inline __m256i
9797 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9798 _mm256_maskz_permutex_epi64 (__mmask8 __M, __m256i __X, const int __I)
9800 return (__m256i) __builtin_ia32_permdi256_mask ((__v4di) __X,
9801 __I,
9802 (__v4di)
9803 _mm256_setzero_si256 (),
9804 (__mmask8) __M);
9807 extern __inline __m256d
9808 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9809 _mm256_mask_shuffle_pd (__m256d __W, __mmask8 __U, __m256d __A,
9810 __m256d __B, const int __imm)
9812 return (__m256d) __builtin_ia32_shufpd256_mask ((__v4df) __A,
9813 (__v4df) __B, __imm,
9814 (__v4df) __W,
9815 (__mmask8) __U);
9818 extern __inline __m256d
9819 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9820 _mm256_maskz_shuffle_pd (__mmask8 __U, __m256d __A, __m256d __B,
9821 const int __imm)
9823 return (__m256d) __builtin_ia32_shufpd256_mask ((__v4df) __A,
9824 (__v4df) __B, __imm,
9825 (__v4df)
9826 _mm256_setzero_pd (),
9827 (__mmask8) __U);
9830 extern __inline __m128d
9831 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9832 _mm_mask_shuffle_pd (__m128d __W, __mmask8 __U, __m128d __A,
9833 __m128d __B, const int __imm)
9835 return (__m128d) __builtin_ia32_shufpd128_mask ((__v2df) __A,
9836 (__v2df) __B, __imm,
9837 (__v2df) __W,
9838 (__mmask8) __U);
9841 extern __inline __m128d
9842 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9843 _mm_maskz_shuffle_pd (__mmask8 __U, __m128d __A, __m128d __B,
9844 const int __imm)
9846 return (__m128d) __builtin_ia32_shufpd128_mask ((__v2df) __A,
9847 (__v2df) __B, __imm,
9848 (__v2df)
9849 _mm_setzero_pd (),
9850 (__mmask8) __U);
9853 extern __inline __m256
9854 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9855 _mm256_mask_shuffle_ps (__m256 __W, __mmask8 __U, __m256 __A,
9856 __m256 __B, const int __imm)
9858 return (__m256) __builtin_ia32_shufps256_mask ((__v8sf) __A,
9859 (__v8sf) __B, __imm,
9860 (__v8sf) __W,
9861 (__mmask8) __U);
9864 extern __inline __m256
9865 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9866 _mm256_maskz_shuffle_ps (__mmask8 __U, __m256 __A, __m256 __B,
9867 const int __imm)
9869 return (__m256) __builtin_ia32_shufps256_mask ((__v8sf) __A,
9870 (__v8sf) __B, __imm,
9871 (__v8sf)
9872 _mm256_setzero_ps (),
9873 (__mmask8) __U);
9876 extern __inline __m128
9877 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9878 _mm_mask_shuffle_ps (__m128 __W, __mmask8 __U, __m128 __A, __m128 __B,
9879 const int __imm)
9881 return (__m128) __builtin_ia32_shufps128_mask ((__v4sf) __A,
9882 (__v4sf) __B, __imm,
9883 (__v4sf) __W,
9884 (__mmask8) __U);
9887 extern __inline __m128
9888 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9889 _mm_maskz_shuffle_ps (__mmask8 __U, __m128 __A, __m128 __B,
9890 const int __imm)
9892 return (__m128) __builtin_ia32_shufps128_mask ((__v4sf) __A,
9893 (__v4sf) __B, __imm,
9894 (__v4sf)
9895 _mm_setzero_ps (),
9896 (__mmask8) __U);
9899 extern __inline __m256i
9900 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9901 _mm256_inserti32x4 (__m256i __A, __m128i __B, const int __imm)
9903 return (__m256i) __builtin_ia32_inserti32x4_256_mask ((__v8si) __A,
9904 (__v4si) __B,
9905 __imm,
9906 (__v8si)
9907 _mm256_setzero_si256 (),
9908 (__mmask8) -1);
9911 extern __inline __m256i
9912 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9913 _mm256_mask_inserti32x4 (__m256i __W, __mmask8 __U, __m256i __A,
9914 __m128i __B, const int __imm)
9916 return (__m256i) __builtin_ia32_inserti32x4_256_mask ((__v8si) __A,
9917 (__v4si) __B,
9918 __imm,
9919 (__v8si) __W,
9920 (__mmask8)
9921 __U);
9924 extern __inline __m256i
9925 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9926 _mm256_maskz_inserti32x4 (__mmask8 __U, __m256i __A, __m128i __B,
9927 const int __imm)
9929 return (__m256i) __builtin_ia32_inserti32x4_256_mask ((__v8si) __A,
9930 (__v4si) __B,
9931 __imm,
9932 (__v8si)
9933 _mm256_setzero_si256 (),
9934 (__mmask8)
9935 __U);
9938 extern __inline __m256
9939 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9940 _mm256_insertf32x4 (__m256 __A, __m128 __B, const int __imm)
9942 return (__m256) __builtin_ia32_insertf32x4_256_mask ((__v8sf) __A,
9943 (__v4sf) __B,
9944 __imm,
9945 (__v8sf)
9946 _mm256_setzero_ps (),
9947 (__mmask8) -1);
9950 extern __inline __m256
9951 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9952 _mm256_mask_insertf32x4 (__m256 __W, __mmask8 __U, __m256 __A,
9953 __m128 __B, const int __imm)
9955 return (__m256) __builtin_ia32_insertf32x4_256_mask ((__v8sf) __A,
9956 (__v4sf) __B,
9957 __imm,
9958 (__v8sf) __W,
9959 (__mmask8) __U);
9962 extern __inline __m256
9963 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9964 _mm256_maskz_insertf32x4 (__mmask8 __U, __m256 __A, __m128 __B,
9965 const int __imm)
9967 return (__m256) __builtin_ia32_insertf32x4_256_mask ((__v8sf) __A,
9968 (__v4sf) __B,
9969 __imm,
9970 (__v8sf)
9971 _mm256_setzero_ps (),
9972 (__mmask8) __U);
9975 extern __inline __m128i
9976 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9977 _mm256_extracti32x4_epi32 (__m256i __A, const int __imm)
9979 return (__m128i) __builtin_ia32_extracti32x4_256_mask ((__v8si) __A,
9980 __imm,
9981 (__v4si)
9982 _mm_setzero_si128 (),
9983 (__mmask8) -1);
9986 extern __inline __m128i
9987 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
9988 _mm256_mask_extracti32x4_epi32 (__m128i __W, __mmask8 __U, __m256i __A,
9989 const int __imm)
9991 return (__m128i) __builtin_ia32_extracti32x4_256_mask ((__v8si) __A,
9992 __imm,
9993 (__v4si) __W,
9994 (__mmask8)
9995 __U);
9998 extern __inline __m128i
9999 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10000 _mm256_maskz_extracti32x4_epi32 (__mmask8 __U, __m256i __A,
10001 const int __imm)
10003 return (__m128i) __builtin_ia32_extracti32x4_256_mask ((__v8si) __A,
10004 __imm,
10005 (__v4si)
10006 _mm_setzero_si128 (),
10007 (__mmask8)
10008 __U);
10011 extern __inline __m128
10012 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10013 _mm256_extractf32x4_ps (__m256 __A, const int __imm)
10015 return (__m128) __builtin_ia32_extractf32x4_256_mask ((__v8sf) __A,
10016 __imm,
10017 (__v4sf)
10018 _mm_setzero_ps (),
10019 (__mmask8) -1);
10022 extern __inline __m128
10023 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10024 _mm256_mask_extractf32x4_ps (__m128 __W, __mmask8 __U, __m256 __A,
10025 const int __imm)
10027 return (__m128) __builtin_ia32_extractf32x4_256_mask ((__v8sf) __A,
10028 __imm,
10029 (__v4sf) __W,
10030 (__mmask8)
10031 __U);
10034 extern __inline __m128
10035 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10036 _mm256_maskz_extractf32x4_ps (__mmask8 __U, __m256 __A,
10037 const int __imm)
10039 return (__m128) __builtin_ia32_extractf32x4_256_mask ((__v8sf) __A,
10040 __imm,
10041 (__v4sf)
10042 _mm_setzero_ps (),
10043 (__mmask8)
10044 __U);
10047 extern __inline __m256i
10048 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10049 _mm256_shuffle_i64x2 (__m256i __A, __m256i __B, const int __imm)
10051 return (__m256i) __builtin_ia32_shuf_i64x2_256_mask ((__v4di) __A,
10052 (__v4di) __B,
10053 __imm,
10054 (__v4di)
10055 _mm256_setzero_si256 (),
10056 (__mmask8) -1);
10059 extern __inline __m256i
10060 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10061 _mm256_mask_shuffle_i64x2 (__m256i __W, __mmask8 __U, __m256i __A,
10062 __m256i __B, const int __imm)
10064 return (__m256i) __builtin_ia32_shuf_i64x2_256_mask ((__v4di) __A,
10065 (__v4di) __B,
10066 __imm,
10067 (__v4di) __W,
10068 (__mmask8) __U);
10071 extern __inline __m256i
10072 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10073 _mm256_maskz_shuffle_i64x2 (__mmask8 __U, __m256i __A, __m256i __B,
10074 const int __imm)
10076 return (__m256i) __builtin_ia32_shuf_i64x2_256_mask ((__v4di) __A,
10077 (__v4di) __B,
10078 __imm,
10079 (__v4di)
10080 _mm256_setzero_si256 (),
10081 (__mmask8) __U);
10084 extern __inline __m256i
10085 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10086 _mm256_shuffle_i32x4 (__m256i __A, __m256i __B, const int __imm)
10088 return (__m256i) __builtin_ia32_shuf_i32x4_256_mask ((__v8si) __A,
10089 (__v8si) __B,
10090 __imm,
10091 (__v8si)
10092 _mm256_setzero_si256 (),
10093 (__mmask8) -1);
10096 extern __inline __m256i
10097 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10098 _mm256_mask_shuffle_i32x4 (__m256i __W, __mmask8 __U, __m256i __A,
10099 __m256i __B, const int __imm)
10101 return (__m256i) __builtin_ia32_shuf_i32x4_256_mask ((__v8si) __A,
10102 (__v8si) __B,
10103 __imm,
10104 (__v8si) __W,
10105 (__mmask8) __U);
10108 extern __inline __m256i
10109 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10110 _mm256_maskz_shuffle_i32x4 (__mmask8 __U, __m256i __A, __m256i __B,
10111 const int __imm)
10113 return (__m256i) __builtin_ia32_shuf_i32x4_256_mask ((__v8si) __A,
10114 (__v8si) __B,
10115 __imm,
10116 (__v8si)
10117 _mm256_setzero_si256 (),
10118 (__mmask8) __U);
10121 extern __inline __m256d
10122 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10123 _mm256_shuffle_f64x2 (__m256d __A, __m256d __B, const int __imm)
10125 return (__m256d) __builtin_ia32_shuf_f64x2_256_mask ((__v4df) __A,
10126 (__v4df) __B,
10127 __imm,
10128 (__v4df)
10129 _mm256_setzero_pd (),
10130 (__mmask8) -1);
10133 extern __inline __m256d
10134 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10135 _mm256_mask_shuffle_f64x2 (__m256d __W, __mmask8 __U, __m256d __A,
10136 __m256d __B, const int __imm)
10138 return (__m256d) __builtin_ia32_shuf_f64x2_256_mask ((__v4df) __A,
10139 (__v4df) __B,
10140 __imm,
10141 (__v4df) __W,
10142 (__mmask8) __U);
10145 extern __inline __m256d
10146 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10147 _mm256_maskz_shuffle_f64x2 (__mmask8 __U, __m256d __A, __m256d __B,
10148 const int __imm)
10150 return (__m256d) __builtin_ia32_shuf_f64x2_256_mask ((__v4df) __A,
10151 (__v4df) __B,
10152 __imm,
10153 (__v4df)
10154 _mm256_setzero_pd (),
10155 (__mmask8) __U);
10158 extern __inline __m256
10159 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10160 _mm256_shuffle_f32x4 (__m256 __A, __m256 __B, const int __imm)
10162 return (__m256) __builtin_ia32_shuf_f32x4_256_mask ((__v8sf) __A,
10163 (__v8sf) __B,
10164 __imm,
10165 (__v8sf)
10166 _mm256_setzero_ps (),
10167 (__mmask8) -1);
10170 extern __inline __m256
10171 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10172 _mm256_mask_shuffle_f32x4 (__m256 __W, __mmask8 __U, __m256 __A,
10173 __m256 __B, const int __imm)
10175 return (__m256) __builtin_ia32_shuf_f32x4_256_mask ((__v8sf) __A,
10176 (__v8sf) __B,
10177 __imm,
10178 (__v8sf) __W,
10179 (__mmask8) __U);
10182 extern __inline __m256
10183 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10184 _mm256_maskz_shuffle_f32x4 (__mmask8 __U, __m256 __A, __m256 __B,
10185 const int __imm)
10187 return (__m256) __builtin_ia32_shuf_f32x4_256_mask ((__v8sf) __A,
10188 (__v8sf) __B,
10189 __imm,
10190 (__v8sf)
10191 _mm256_setzero_ps (),
10192 (__mmask8) __U);
10195 extern __inline __m256d
10196 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10197 _mm256_fixupimm_pd (__m256d __A, __m256d __B, __m256i __C,
10198 const int __imm)
10200 return (__m256d) __builtin_ia32_fixupimmpd256_mask ((__v4df) __A,
10201 (__v4df) __B,
10202 (__v4di) __C,
10203 __imm,
10204 (__mmask8) -1);
10207 extern __inline __m256d
10208 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10209 _mm256_mask_fixupimm_pd (__m256d __A, __mmask8 __U, __m256d __B,
10210 __m256i __C, const int __imm)
10212 return (__m256d) __builtin_ia32_fixupimmpd256_mask ((__v4df) __A,
10213 (__v4df) __B,
10214 (__v4di) __C,
10215 __imm,
10216 (__mmask8) __U);
10219 extern __inline __m256d
10220 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10221 _mm256_maskz_fixupimm_pd (__mmask8 __U, __m256d __A, __m256d __B,
10222 __m256i __C, const int __imm)
10224 return (__m256d) __builtin_ia32_fixupimmpd256_maskz ((__v4df) __A,
10225 (__v4df) __B,
10226 (__v4di) __C,
10227 __imm,
10228 (__mmask8) __U);
10231 extern __inline __m256
10232 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10233 _mm256_fixupimm_ps (__m256 __A, __m256 __B, __m256i __C,
10234 const int __imm)
10236 return (__m256) __builtin_ia32_fixupimmps256_mask ((__v8sf) __A,
10237 (__v8sf) __B,
10238 (__v8si) __C,
10239 __imm,
10240 (__mmask8) -1);
10243 extern __inline __m256
10244 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10245 _mm256_mask_fixupimm_ps (__m256 __A, __mmask8 __U, __m256 __B,
10246 __m256i __C, const int __imm)
10248 return (__m256) __builtin_ia32_fixupimmps256_mask ((__v8sf) __A,
10249 (__v8sf) __B,
10250 (__v8si) __C,
10251 __imm,
10252 (__mmask8) __U);
10255 extern __inline __m256
10256 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10257 _mm256_maskz_fixupimm_ps (__mmask8 __U, __m256 __A, __m256 __B,
10258 __m256i __C, const int __imm)
10260 return (__m256) __builtin_ia32_fixupimmps256_maskz ((__v8sf) __A,
10261 (__v8sf) __B,
10262 (__v8si) __C,
10263 __imm,
10264 (__mmask8) __U);
10267 extern __inline __m128d
10268 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10269 _mm_fixupimm_pd (__m128d __A, __m128d __B, __m128i __C,
10270 const int __imm)
10272 return (__m128d) __builtin_ia32_fixupimmpd128_mask ((__v2df) __A,
10273 (__v2df) __B,
10274 (__v2di) __C,
10275 __imm,
10276 (__mmask8) -1);
10279 extern __inline __m128d
10280 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10281 _mm_mask_fixupimm_pd (__m128d __A, __mmask8 __U, __m128d __B,
10282 __m128i __C, const int __imm)
10284 return (__m128d) __builtin_ia32_fixupimmpd128_mask ((__v2df) __A,
10285 (__v2df) __B,
10286 (__v2di) __C,
10287 __imm,
10288 (__mmask8) __U);
10291 extern __inline __m128d
10292 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10293 _mm_maskz_fixupimm_pd (__mmask8 __U, __m128d __A, __m128d __B,
10294 __m128i __C, const int __imm)
10296 return (__m128d) __builtin_ia32_fixupimmpd128_maskz ((__v2df) __A,
10297 (__v2df) __B,
10298 (__v2di) __C,
10299 __imm,
10300 (__mmask8) __U);
10303 extern __inline __m128
10304 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10305 _mm_fixupimm_ps (__m128 __A, __m128 __B, __m128i __C, const int __imm)
10307 return (__m128) __builtin_ia32_fixupimmps128_mask ((__v4sf) __A,
10308 (__v4sf) __B,
10309 (__v4si) __C,
10310 __imm,
10311 (__mmask8) -1);
10314 extern __inline __m128
10315 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10316 _mm_mask_fixupimm_ps (__m128 __A, __mmask8 __U, __m128 __B,
10317 __m128i __C, const int __imm)
10319 return (__m128) __builtin_ia32_fixupimmps128_mask ((__v4sf) __A,
10320 (__v4sf) __B,
10321 (__v4si) __C,
10322 __imm,
10323 (__mmask8) __U);
10326 extern __inline __m128
10327 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10328 _mm_maskz_fixupimm_ps (__mmask8 __U, __m128 __A, __m128 __B,
10329 __m128i __C, const int __imm)
10331 return (__m128) __builtin_ia32_fixupimmps128_maskz ((__v4sf) __A,
10332 (__v4sf) __B,
10333 (__v4si) __C,
10334 __imm,
10335 (__mmask8) __U);
10338 extern __inline __m256i
10339 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10340 _mm256_mask_srli_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
10341 const int __imm)
10343 return (__m256i) __builtin_ia32_psrldi256_mask ((__v8si) __A, __imm,
10344 (__v8si) __W,
10345 (__mmask8) __U);
10348 extern __inline __m256i
10349 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10350 _mm256_maskz_srli_epi32 (__mmask8 __U, __m256i __A, const int __imm)
10352 return (__m256i) __builtin_ia32_psrldi256_mask ((__v8si) __A, __imm,
10353 (__v8si)
10354 _mm256_setzero_si256 (),
10355 (__mmask8) __U);
10358 extern __inline __m128i
10359 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10360 _mm_mask_srli_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
10361 const int __imm)
10363 return (__m128i) __builtin_ia32_psrldi128_mask ((__v4si) __A, __imm,
10364 (__v4si) __W,
10365 (__mmask8) __U);
10368 extern __inline __m128i
10369 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10370 _mm_maskz_srli_epi32 (__mmask8 __U, __m128i __A, const int __imm)
10372 return (__m128i) __builtin_ia32_psrldi128_mask ((__v4si) __A, __imm,
10373 (__v4si)
10374 _mm_setzero_si128 (),
10375 (__mmask8) __U);
10378 extern __inline __m256i
10379 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10380 _mm256_mask_srli_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
10381 const int __imm)
10383 return (__m256i) __builtin_ia32_psrlqi256_mask ((__v4di) __A, __imm,
10384 (__v4di) __W,
10385 (__mmask8) __U);
10388 extern __inline __m256i
10389 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10390 _mm256_maskz_srli_epi64 (__mmask8 __U, __m256i __A, const int __imm)
10392 return (__m256i) __builtin_ia32_psrlqi256_mask ((__v4di) __A, __imm,
10393 (__v4di)
10394 _mm256_setzero_si256 (),
10395 (__mmask8) __U);
10398 extern __inline __m128i
10399 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10400 _mm_mask_srli_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
10401 const int __imm)
10403 return (__m128i) __builtin_ia32_psrlqi128_mask ((__v2di) __A, __imm,
10404 (__v2di) __W,
10405 (__mmask8) __U);
10408 extern __inline __m128i
10409 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10410 _mm_maskz_srli_epi64 (__mmask8 __U, __m128i __A, const int __imm)
10412 return (__m128i) __builtin_ia32_psrlqi128_mask ((__v2di) __A, __imm,
10413 (__v2di)
10414 _mm_setzero_si128 (),
10415 (__mmask8) __U);
10418 extern __inline __m256i
10419 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10420 _mm256_ternarylogic_epi64 (__m256i __A, __m256i __B, __m256i __C,
10421 const int __imm)
10423 return (__m256i) __builtin_ia32_pternlogq256_mask ((__v4di) __A,
10424 (__v4di) __B,
10425 (__v4di) __C, __imm,
10426 (__mmask8) -1);
10429 extern __inline __m256i
10430 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10431 _mm256_mask_ternarylogic_epi64 (__m256i __A, __mmask8 __U,
10432 __m256i __B, __m256i __C,
10433 const int __imm)
10435 return (__m256i) __builtin_ia32_pternlogq256_mask ((__v4di) __A,
10436 (__v4di) __B,
10437 (__v4di) __C, __imm,
10438 (__mmask8) __U);
10441 extern __inline __m256i
10442 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10443 _mm256_maskz_ternarylogic_epi64 (__mmask8 __U, __m256i __A,
10444 __m256i __B, __m256i __C,
10445 const int __imm)
10447 return (__m256i) __builtin_ia32_pternlogq256_maskz ((__v4di) __A,
10448 (__v4di) __B,
10449 (__v4di) __C,
10450 __imm,
10451 (__mmask8) __U);
10454 extern __inline __m256i
10455 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10456 _mm256_ternarylogic_epi32 (__m256i __A, __m256i __B, __m256i __C,
10457 const int __imm)
10459 return (__m256i) __builtin_ia32_pternlogd256_mask ((__v8si) __A,
10460 (__v8si) __B,
10461 (__v8si) __C, __imm,
10462 (__mmask8) -1);
10465 extern __inline __m256i
10466 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10467 _mm256_mask_ternarylogic_epi32 (__m256i __A, __mmask8 __U,
10468 __m256i __B, __m256i __C,
10469 const int __imm)
10471 return (__m256i) __builtin_ia32_pternlogd256_mask ((__v8si) __A,
10472 (__v8si) __B,
10473 (__v8si) __C, __imm,
10474 (__mmask8) __U);
10477 extern __inline __m256i
10478 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10479 _mm256_maskz_ternarylogic_epi32 (__mmask8 __U, __m256i __A,
10480 __m256i __B, __m256i __C,
10481 const int __imm)
10483 return (__m256i) __builtin_ia32_pternlogd256_maskz ((__v8si) __A,
10484 (__v8si) __B,
10485 (__v8si) __C,
10486 __imm,
10487 (__mmask8) __U);
10490 extern __inline __m128i
10491 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10492 _mm_ternarylogic_epi64 (__m128i __A, __m128i __B, __m128i __C,
10493 const int __imm)
10495 return (__m128i) __builtin_ia32_pternlogq128_mask ((__v2di) __A,
10496 (__v2di) __B,
10497 (__v2di) __C, __imm,
10498 (__mmask8) -1);
10501 extern __inline __m128i
10502 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10503 _mm_mask_ternarylogic_epi64 (__m128i __A, __mmask8 __U,
10504 __m128i __B, __m128i __C, const int __imm)
10506 return (__m128i) __builtin_ia32_pternlogq128_mask ((__v2di) __A,
10507 (__v2di) __B,
10508 (__v2di) __C, __imm,
10509 (__mmask8) __U);
10512 extern __inline __m128i
10513 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10514 _mm_maskz_ternarylogic_epi64 (__mmask8 __U, __m128i __A,
10515 __m128i __B, __m128i __C, const int __imm)
10517 return (__m128i) __builtin_ia32_pternlogq128_maskz ((__v2di) __A,
10518 (__v2di) __B,
10519 (__v2di) __C,
10520 __imm,
10521 (__mmask8) __U);
10524 extern __inline __m128i
10525 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10526 _mm_ternarylogic_epi32 (__m128i __A, __m128i __B, __m128i __C,
10527 const int __imm)
10529 return (__m128i) __builtin_ia32_pternlogd128_mask ((__v4si) __A,
10530 (__v4si) __B,
10531 (__v4si) __C, __imm,
10532 (__mmask8) -1);
10535 extern __inline __m128i
10536 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10537 _mm_mask_ternarylogic_epi32 (__m128i __A, __mmask8 __U,
10538 __m128i __B, __m128i __C, const int __imm)
10540 return (__m128i) __builtin_ia32_pternlogd128_mask ((__v4si) __A,
10541 (__v4si) __B,
10542 (__v4si) __C, __imm,
10543 (__mmask8) __U);
10546 extern __inline __m128i
10547 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10548 _mm_maskz_ternarylogic_epi32 (__mmask8 __U, __m128i __A,
10549 __m128i __B, __m128i __C, const int __imm)
10551 return (__m128i) __builtin_ia32_pternlogd128_maskz ((__v4si) __A,
10552 (__v4si) __B,
10553 (__v4si) __C,
10554 __imm,
10555 (__mmask8) __U);
10558 extern __inline __m256
10559 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10560 _mm256_roundscale_ps (__m256 __A, const int __imm)
10562 return (__m256) __builtin_ia32_rndscaleps_256_mask ((__v8sf) __A,
10563 __imm,
10564 (__v8sf)
10565 _mm256_setzero_ps (),
10566 (__mmask8) -1);
10569 extern __inline __m256
10570 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10571 _mm256_mask_roundscale_ps (__m256 __W, __mmask8 __U, __m256 __A,
10572 const int __imm)
10574 return (__m256) __builtin_ia32_rndscaleps_256_mask ((__v8sf) __A,
10575 __imm,
10576 (__v8sf) __W,
10577 (__mmask8) __U);
10580 extern __inline __m256
10581 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10582 _mm256_maskz_roundscale_ps (__mmask8 __U, __m256 __A, const int __imm)
10584 return (__m256) __builtin_ia32_rndscaleps_256_mask ((__v8sf) __A,
10585 __imm,
10586 (__v8sf)
10587 _mm256_setzero_ps (),
10588 (__mmask8) __U);
10591 extern __inline __m256d
10592 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10593 _mm256_roundscale_pd (__m256d __A, const int __imm)
10595 return (__m256d) __builtin_ia32_rndscalepd_256_mask ((__v4df) __A,
10596 __imm,
10597 (__v4df)
10598 _mm256_setzero_pd (),
10599 (__mmask8) -1);
10602 extern __inline __m256d
10603 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10604 _mm256_mask_roundscale_pd (__m256d __W, __mmask8 __U, __m256d __A,
10605 const int __imm)
10607 return (__m256d) __builtin_ia32_rndscalepd_256_mask ((__v4df) __A,
10608 __imm,
10609 (__v4df) __W,
10610 (__mmask8) __U);
10613 extern __inline __m256d
10614 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10615 _mm256_maskz_roundscale_pd (__mmask8 __U, __m256d __A, const int __imm)
10617 return (__m256d) __builtin_ia32_rndscalepd_256_mask ((__v4df) __A,
10618 __imm,
10619 (__v4df)
10620 _mm256_setzero_pd (),
10621 (__mmask8) __U);
10624 extern __inline __m128
10625 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10626 _mm_roundscale_ps (__m128 __A, const int __imm)
10628 return (__m128) __builtin_ia32_rndscaleps_128_mask ((__v4sf) __A,
10629 __imm,
10630 (__v4sf)
10631 _mm_setzero_ps (),
10632 (__mmask8) -1);
10635 extern __inline __m128
10636 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10637 _mm_mask_roundscale_ps (__m128 __W, __mmask8 __U, __m128 __A,
10638 const int __imm)
10640 return (__m128) __builtin_ia32_rndscaleps_128_mask ((__v4sf) __A,
10641 __imm,
10642 (__v4sf) __W,
10643 (__mmask8) __U);
10646 extern __inline __m128
10647 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10648 _mm_maskz_roundscale_ps (__mmask8 __U, __m128 __A, const int __imm)
10650 return (__m128) __builtin_ia32_rndscaleps_128_mask ((__v4sf) __A,
10651 __imm,
10652 (__v4sf)
10653 _mm_setzero_ps (),
10654 (__mmask8) __U);
10657 extern __inline __m128d
10658 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10659 _mm_roundscale_pd (__m128d __A, const int __imm)
10661 return (__m128d) __builtin_ia32_rndscalepd_128_mask ((__v2df) __A,
10662 __imm,
10663 (__v2df)
10664 _mm_setzero_pd (),
10665 (__mmask8) -1);
10668 extern __inline __m128d
10669 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10670 _mm_mask_roundscale_pd (__m128d __W, __mmask8 __U, __m128d __A,
10671 const int __imm)
10673 return (__m128d) __builtin_ia32_rndscalepd_128_mask ((__v2df) __A,
10674 __imm,
10675 (__v2df) __W,
10676 (__mmask8) __U);
10679 extern __inline __m128d
10680 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10681 _mm_maskz_roundscale_pd (__mmask8 __U, __m128d __A, const int __imm)
10683 return (__m128d) __builtin_ia32_rndscalepd_128_mask ((__v2df) __A,
10684 __imm,
10685 (__v2df)
10686 _mm_setzero_pd (),
10687 (__mmask8) __U);
10690 extern __inline __m256
10691 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10692 _mm256_getmant_ps (__m256 __A, _MM_MANTISSA_NORM_ENUM __B,
10693 _MM_MANTISSA_SIGN_ENUM __C)
10695 return (__m256) __builtin_ia32_getmantps256_mask ((__v8sf) __A,
10696 (__C << 2) | __B,
10697 (__v8sf)
10698 _mm256_setzero_ps (),
10699 (__mmask8) -1);
10702 extern __inline __m256
10703 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10704 _mm256_mask_getmant_ps (__m256 __W, __mmask8 __U, __m256 __A,
10705 _MM_MANTISSA_NORM_ENUM __B,
10706 _MM_MANTISSA_SIGN_ENUM __C)
10708 return (__m256) __builtin_ia32_getmantps256_mask ((__v8sf) __A,
10709 (__C << 2) | __B,
10710 (__v8sf) __W,
10711 (__mmask8) __U);
10714 extern __inline __m256
10715 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10716 _mm256_maskz_getmant_ps (__mmask8 __U, __m256 __A,
10717 _MM_MANTISSA_NORM_ENUM __B,
10718 _MM_MANTISSA_SIGN_ENUM __C)
10720 return (__m256) __builtin_ia32_getmantps256_mask ((__v8sf) __A,
10721 (__C << 2) | __B,
10722 (__v8sf)
10723 _mm256_setzero_ps (),
10724 (__mmask8) __U);
10727 extern __inline __m128
10728 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10729 _mm_getmant_ps (__m128 __A, _MM_MANTISSA_NORM_ENUM __B,
10730 _MM_MANTISSA_SIGN_ENUM __C)
10732 return (__m128) __builtin_ia32_getmantps128_mask ((__v4sf) __A,
10733 (__C << 2) | __B,
10734 (__v4sf)
10735 _mm_setzero_ps (),
10736 (__mmask8) -1);
10739 extern __inline __m128
10740 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10741 _mm_mask_getmant_ps (__m128 __W, __mmask8 __U, __m128 __A,
10742 _MM_MANTISSA_NORM_ENUM __B,
10743 _MM_MANTISSA_SIGN_ENUM __C)
10745 return (__m128) __builtin_ia32_getmantps128_mask ((__v4sf) __A,
10746 (__C << 2) | __B,
10747 (__v4sf) __W,
10748 (__mmask8) __U);
10751 extern __inline __m128
10752 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10753 _mm_maskz_getmant_ps (__mmask8 __U, __m128 __A,
10754 _MM_MANTISSA_NORM_ENUM __B,
10755 _MM_MANTISSA_SIGN_ENUM __C)
10757 return (__m128) __builtin_ia32_getmantps128_mask ((__v4sf) __A,
10758 (__C << 2) | __B,
10759 (__v4sf)
10760 _mm_setzero_ps (),
10761 (__mmask8) __U);
10764 extern __inline __m256d
10765 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10766 _mm256_getmant_pd (__m256d __A, _MM_MANTISSA_NORM_ENUM __B,
10767 _MM_MANTISSA_SIGN_ENUM __C)
10769 return (__m256d) __builtin_ia32_getmantpd256_mask ((__v4df) __A,
10770 (__C << 2) | __B,
10771 (__v4df)
10772 _mm256_setzero_pd (),
10773 (__mmask8) -1);
10776 extern __inline __m256d
10777 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10778 _mm256_mask_getmant_pd (__m256d __W, __mmask8 __U, __m256d __A,
10779 _MM_MANTISSA_NORM_ENUM __B,
10780 _MM_MANTISSA_SIGN_ENUM __C)
10782 return (__m256d) __builtin_ia32_getmantpd256_mask ((__v4df) __A,
10783 (__C << 2) | __B,
10784 (__v4df) __W,
10785 (__mmask8) __U);
10788 extern __inline __m256d
10789 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10790 _mm256_maskz_getmant_pd (__mmask8 __U, __m256d __A,
10791 _MM_MANTISSA_NORM_ENUM __B,
10792 _MM_MANTISSA_SIGN_ENUM __C)
10794 return (__m256d) __builtin_ia32_getmantpd256_mask ((__v4df) __A,
10795 (__C << 2) | __B,
10796 (__v4df)
10797 _mm256_setzero_pd (),
10798 (__mmask8) __U);
10801 extern __inline __m128d
10802 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10803 _mm_getmant_pd (__m128d __A, _MM_MANTISSA_NORM_ENUM __B,
10804 _MM_MANTISSA_SIGN_ENUM __C)
10806 return (__m128d) __builtin_ia32_getmantpd128_mask ((__v2df) __A,
10807 (__C << 2) | __B,
10808 (__v2df)
10809 _mm_setzero_pd (),
10810 (__mmask8) -1);
10813 extern __inline __m128d
10814 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10815 _mm_mask_getmant_pd (__m128d __W, __mmask8 __U, __m128d __A,
10816 _MM_MANTISSA_NORM_ENUM __B,
10817 _MM_MANTISSA_SIGN_ENUM __C)
10819 return (__m128d) __builtin_ia32_getmantpd128_mask ((__v2df) __A,
10820 (__C << 2) | __B,
10821 (__v2df) __W,
10822 (__mmask8) __U);
10825 extern __inline __m128d
10826 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10827 _mm_maskz_getmant_pd (__mmask8 __U, __m128d __A,
10828 _MM_MANTISSA_NORM_ENUM __B,
10829 _MM_MANTISSA_SIGN_ENUM __C)
10831 return (__m128d) __builtin_ia32_getmantpd128_mask ((__v2df) __A,
10832 (__C << 2) | __B,
10833 (__v2df)
10834 _mm_setzero_pd (),
10835 (__mmask8) __U);
10838 extern __inline __m256
10839 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10840 _mm256_mmask_i32gather_ps (__m256 __v1_old, __mmask8 __mask,
10841 __m256i __index, void const *__addr,
10842 int __scale)
10844 return (__m256) __builtin_ia32_gather3siv8sf ((__v8sf) __v1_old,
10845 __addr,
10846 (__v8si) __index,
10847 __mask, __scale);
10850 extern __inline __m128
10851 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10852 _mm_mmask_i32gather_ps (__m128 __v1_old, __mmask8 __mask,
10853 __m128i __index, void const *__addr,
10854 int __scale)
10856 return (__m128) __builtin_ia32_gather3siv4sf ((__v4sf) __v1_old,
10857 __addr,
10858 (__v4si) __index,
10859 __mask, __scale);
10862 extern __inline __m256d
10863 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10864 _mm256_mmask_i32gather_pd (__m256d __v1_old, __mmask8 __mask,
10865 __m128i __index, void const *__addr,
10866 int __scale)
10868 return (__m256d) __builtin_ia32_gather3siv4df ((__v4df) __v1_old,
10869 __addr,
10870 (__v4si) __index,
10871 __mask, __scale);
10874 extern __inline __m128d
10875 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10876 _mm_mmask_i32gather_pd (__m128d __v1_old, __mmask8 __mask,
10877 __m128i __index, void const *__addr,
10878 int __scale)
10880 return (__m128d) __builtin_ia32_gather3siv2df ((__v2df) __v1_old,
10881 __addr,
10882 (__v4si) __index,
10883 __mask, __scale);
10886 extern __inline __m128
10887 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10888 _mm256_mmask_i64gather_ps (__m128 __v1_old, __mmask8 __mask,
10889 __m256i __index, void const *__addr,
10890 int __scale)
10892 return (__m128) __builtin_ia32_gather3div8sf ((__v4sf) __v1_old,
10893 __addr,
10894 (__v4di) __index,
10895 __mask, __scale);
10898 extern __inline __m128
10899 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10900 _mm_mmask_i64gather_ps (__m128 __v1_old, __mmask8 __mask,
10901 __m128i __index, void const *__addr,
10902 int __scale)
10904 return (__m128) __builtin_ia32_gather3div4sf ((__v4sf) __v1_old,
10905 __addr,
10906 (__v2di) __index,
10907 __mask, __scale);
10910 extern __inline __m256d
10911 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10912 _mm256_mmask_i64gather_pd (__m256d __v1_old, __mmask8 __mask,
10913 __m256i __index, void const *__addr,
10914 int __scale)
10916 return (__m256d) __builtin_ia32_gather3div4df ((__v4df) __v1_old,
10917 __addr,
10918 (__v4di) __index,
10919 __mask, __scale);
10922 extern __inline __m128d
10923 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10924 _mm_mmask_i64gather_pd (__m128d __v1_old, __mmask8 __mask,
10925 __m128i __index, void const *__addr,
10926 int __scale)
10928 return (__m128d) __builtin_ia32_gather3div2df ((__v2df) __v1_old,
10929 __addr,
10930 (__v2di) __index,
10931 __mask, __scale);
10934 extern __inline __m256i
10935 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10936 _mm256_mmask_i32gather_epi32 (__m256i __v1_old, __mmask8 __mask,
10937 __m256i __index, void const *__addr,
10938 int __scale)
10940 return (__m256i) __builtin_ia32_gather3siv8si ((__v8si) __v1_old,
10941 __addr,
10942 (__v8si) __index,
10943 __mask, __scale);
10946 extern __inline __m128i
10947 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10948 _mm_mmask_i32gather_epi32 (__m128i __v1_old, __mmask8 __mask,
10949 __m128i __index, void const *__addr,
10950 int __scale)
10952 return (__m128i) __builtin_ia32_gather3siv4si ((__v4si) __v1_old,
10953 __addr,
10954 (__v4si) __index,
10955 __mask, __scale);
10958 extern __inline __m256i
10959 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10960 _mm256_mmask_i32gather_epi64 (__m256i __v1_old, __mmask8 __mask,
10961 __m128i __index, void const *__addr,
10962 int __scale)
10964 return (__m256i) __builtin_ia32_gather3siv4di ((__v4di) __v1_old,
10965 __addr,
10966 (__v4si) __index,
10967 __mask, __scale);
10970 extern __inline __m128i
10971 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10972 _mm_mmask_i32gather_epi64 (__m128i __v1_old, __mmask8 __mask,
10973 __m128i __index, void const *__addr,
10974 int __scale)
10976 return (__m128i) __builtin_ia32_gather3siv2di ((__v2di) __v1_old,
10977 __addr,
10978 (__v4si) __index,
10979 __mask, __scale);
10982 extern __inline __m128i
10983 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10984 _mm256_mmask_i64gather_epi32 (__m128i __v1_old, __mmask8 __mask,
10985 __m256i __index, void const *__addr,
10986 int __scale)
10988 return (__m128i) __builtin_ia32_gather3div8si ((__v4si) __v1_old,
10989 __addr,
10990 (__v4di) __index,
10991 __mask, __scale);
10994 extern __inline __m128i
10995 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
10996 _mm_mmask_i64gather_epi32 (__m128i __v1_old, __mmask8 __mask,
10997 __m128i __index, void const *__addr,
10998 int __scale)
11000 return (__m128i) __builtin_ia32_gather3div4si ((__v4si) __v1_old,
11001 __addr,
11002 (__v2di) __index,
11003 __mask, __scale);
11006 extern __inline __m256i
11007 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11008 _mm256_mmask_i64gather_epi64 (__m256i __v1_old, __mmask8 __mask,
11009 __m256i __index, void const *__addr,
11010 int __scale)
11012 return (__m256i) __builtin_ia32_gather3div4di ((__v4di) __v1_old,
11013 __addr,
11014 (__v4di) __index,
11015 __mask, __scale);
11018 extern __inline __m128i
11019 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11020 _mm_mmask_i64gather_epi64 (__m128i __v1_old, __mmask8 __mask,
11021 __m128i __index, void const *__addr,
11022 int __scale)
11024 return (__m128i) __builtin_ia32_gather3div2di ((__v2di) __v1_old,
11025 __addr,
11026 (__v2di) __index,
11027 __mask, __scale);
11030 extern __inline void
11031 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11032 _mm256_i32scatter_ps (void *__addr, __m256i __index,
11033 __m256 __v1, const int __scale)
11035 __builtin_ia32_scattersiv8sf (__addr, (__mmask8) 0xFF,
11036 (__v8si) __index, (__v8sf) __v1,
11037 __scale);
11040 extern __inline void
11041 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11042 _mm256_mask_i32scatter_ps (void *__addr, __mmask8 __mask,
11043 __m256i __index, __m256 __v1,
11044 const int __scale)
11046 __builtin_ia32_scattersiv8sf (__addr, __mask, (__v8si) __index,
11047 (__v8sf) __v1, __scale);
11050 extern __inline void
11051 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11052 _mm_i32scatter_ps (void *__addr, __m128i __index, __m128 __v1,
11053 const int __scale)
11055 __builtin_ia32_scattersiv4sf (__addr, (__mmask8) 0xFF,
11056 (__v4si) __index, (__v4sf) __v1,
11057 __scale);
11060 extern __inline void
11061 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11062 _mm_mask_i32scatter_ps (void *__addr, __mmask8 __mask,
11063 __m128i __index, __m128 __v1,
11064 const int __scale)
11066 __builtin_ia32_scattersiv4sf (__addr, __mask, (__v4si) __index,
11067 (__v4sf) __v1, __scale);
11070 extern __inline void
11071 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11072 _mm256_i32scatter_pd (void *__addr, __m128i __index,
11073 __m256d __v1, const int __scale)
11075 __builtin_ia32_scattersiv4df (__addr, (__mmask8) 0xFF,
11076 (__v4si) __index, (__v4df) __v1,
11077 __scale);
11080 extern __inline void
11081 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11082 _mm256_mask_i32scatter_pd (void *__addr, __mmask8 __mask,
11083 __m128i __index, __m256d __v1,
11084 const int __scale)
11086 __builtin_ia32_scattersiv4df (__addr, __mask, (__v4si) __index,
11087 (__v4df) __v1, __scale);
11090 extern __inline void
11091 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11092 _mm_i32scatter_pd (void *__addr, __m128i __index,
11093 __m128d __v1, const int __scale)
11095 __builtin_ia32_scattersiv2df (__addr, (__mmask8) 0xFF,
11096 (__v4si) __index, (__v2df) __v1,
11097 __scale);
11100 extern __inline void
11101 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11102 _mm_mask_i32scatter_pd (void *__addr, __mmask8 __mask,
11103 __m128i __index, __m128d __v1,
11104 const int __scale)
11106 __builtin_ia32_scattersiv2df (__addr, __mask, (__v4si) __index,
11107 (__v2df) __v1, __scale);
11110 extern __inline void
11111 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11112 _mm256_i64scatter_ps (void *__addr, __m256i __index,
11113 __m128 __v1, const int __scale)
11115 __builtin_ia32_scatterdiv8sf (__addr, (__mmask8) 0xFF,
11116 (__v4di) __index, (__v4sf) __v1,
11117 __scale);
11120 extern __inline void
11121 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11122 _mm256_mask_i64scatter_ps (void *__addr, __mmask8 __mask,
11123 __m256i __index, __m128 __v1,
11124 const int __scale)
11126 __builtin_ia32_scatterdiv8sf (__addr, __mask, (__v4di) __index,
11127 (__v4sf) __v1, __scale);
11130 extern __inline void
11131 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11132 _mm_i64scatter_ps (void *__addr, __m128i __index, __m128 __v1,
11133 const int __scale)
11135 __builtin_ia32_scatterdiv4sf (__addr, (__mmask8) 0xFF,
11136 (__v2di) __index, (__v4sf) __v1,
11137 __scale);
11140 extern __inline void
11141 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11142 _mm_mask_i64scatter_ps (void *__addr, __mmask8 __mask,
11143 __m128i __index, __m128 __v1,
11144 const int __scale)
11146 __builtin_ia32_scatterdiv4sf (__addr, __mask, (__v2di) __index,
11147 (__v4sf) __v1, __scale);
11150 extern __inline void
11151 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11152 _mm256_i64scatter_pd (void *__addr, __m256i __index,
11153 __m256d __v1, const int __scale)
11155 __builtin_ia32_scatterdiv4df (__addr, (__mmask8) 0xFF,
11156 (__v4di) __index, (__v4df) __v1,
11157 __scale);
11160 extern __inline void
11161 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11162 _mm256_mask_i64scatter_pd (void *__addr, __mmask8 __mask,
11163 __m256i __index, __m256d __v1,
11164 const int __scale)
11166 __builtin_ia32_scatterdiv4df (__addr, __mask, (__v4di) __index,
11167 (__v4df) __v1, __scale);
11170 extern __inline void
11171 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11172 _mm_i64scatter_pd (void *__addr, __m128i __index,
11173 __m128d __v1, const int __scale)
11175 __builtin_ia32_scatterdiv2df (__addr, (__mmask8) 0xFF,
11176 (__v2di) __index, (__v2df) __v1,
11177 __scale);
11180 extern __inline void
11181 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11182 _mm_mask_i64scatter_pd (void *__addr, __mmask8 __mask,
11183 __m128i __index, __m128d __v1,
11184 const int __scale)
11186 __builtin_ia32_scatterdiv2df (__addr, __mask, (__v2di) __index,
11187 (__v2df) __v1, __scale);
11190 extern __inline void
11191 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11192 _mm256_i32scatter_epi32 (void *__addr, __m256i __index,
11193 __m256i __v1, const int __scale)
11195 __builtin_ia32_scattersiv8si (__addr, (__mmask8) 0xFF,
11196 (__v8si) __index, (__v8si) __v1,
11197 __scale);
11200 extern __inline void
11201 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11202 _mm256_mask_i32scatter_epi32 (void *__addr, __mmask8 __mask,
11203 __m256i __index, __m256i __v1,
11204 const int __scale)
11206 __builtin_ia32_scattersiv8si (__addr, __mask, (__v8si) __index,
11207 (__v8si) __v1, __scale);
11210 extern __inline void
11211 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11212 _mm_i32scatter_epi32 (void *__addr, __m128i __index,
11213 __m128i __v1, const int __scale)
11215 __builtin_ia32_scattersiv4si (__addr, (__mmask8) 0xFF,
11216 (__v4si) __index, (__v4si) __v1,
11217 __scale);
11220 extern __inline void
11221 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11222 _mm_mask_i32scatter_epi32 (void *__addr, __mmask8 __mask,
11223 __m128i __index, __m128i __v1,
11224 const int __scale)
11226 __builtin_ia32_scattersiv4si (__addr, __mask, (__v4si) __index,
11227 (__v4si) __v1, __scale);
11230 extern __inline void
11231 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11232 _mm256_i32scatter_epi64 (void *__addr, __m128i __index,
11233 __m256i __v1, const int __scale)
11235 __builtin_ia32_scattersiv4di (__addr, (__mmask8) 0xFF,
11236 (__v4si) __index, (__v4di) __v1,
11237 __scale);
11240 extern __inline void
11241 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11242 _mm256_mask_i32scatter_epi64 (void *__addr, __mmask8 __mask,
11243 __m128i __index, __m256i __v1,
11244 const int __scale)
11246 __builtin_ia32_scattersiv4di (__addr, __mask, (__v4si) __index,
11247 (__v4di) __v1, __scale);
11250 extern __inline void
11251 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11252 _mm_i32scatter_epi64 (void *__addr, __m128i __index,
11253 __m128i __v1, const int __scale)
11255 __builtin_ia32_scattersiv2di (__addr, (__mmask8) 0xFF,
11256 (__v4si) __index, (__v2di) __v1,
11257 __scale);
11260 extern __inline void
11261 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11262 _mm_mask_i32scatter_epi64 (void *__addr, __mmask8 __mask,
11263 __m128i __index, __m128i __v1,
11264 const int __scale)
11266 __builtin_ia32_scattersiv2di (__addr, __mask, (__v4si) __index,
11267 (__v2di) __v1, __scale);
11270 extern __inline void
11271 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11272 _mm256_i64scatter_epi32 (void *__addr, __m256i __index,
11273 __m128i __v1, const int __scale)
11275 __builtin_ia32_scatterdiv8si (__addr, (__mmask8) 0xFF,
11276 (__v4di) __index, (__v4si) __v1,
11277 __scale);
11280 extern __inline void
11281 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11282 _mm256_mask_i64scatter_epi32 (void *__addr, __mmask8 __mask,
11283 __m256i __index, __m128i __v1,
11284 const int __scale)
11286 __builtin_ia32_scatterdiv8si (__addr, __mask, (__v4di) __index,
11287 (__v4si) __v1, __scale);
11290 extern __inline void
11291 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11292 _mm_i64scatter_epi32 (void *__addr, __m128i __index,
11293 __m128i __v1, const int __scale)
11295 __builtin_ia32_scatterdiv4si (__addr, (__mmask8) 0xFF,
11296 (__v2di) __index, (__v4si) __v1,
11297 __scale);
11300 extern __inline void
11301 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11302 _mm_mask_i64scatter_epi32 (void *__addr, __mmask8 __mask,
11303 __m128i __index, __m128i __v1,
11304 const int __scale)
11306 __builtin_ia32_scatterdiv4si (__addr, __mask, (__v2di) __index,
11307 (__v4si) __v1, __scale);
11310 extern __inline void
11311 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11312 _mm256_i64scatter_epi64 (void *__addr, __m256i __index,
11313 __m256i __v1, const int __scale)
11315 __builtin_ia32_scatterdiv4di (__addr, (__mmask8) 0xFF,
11316 (__v4di) __index, (__v4di) __v1,
11317 __scale);
11320 extern __inline void
11321 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11322 _mm256_mask_i64scatter_epi64 (void *__addr, __mmask8 __mask,
11323 __m256i __index, __m256i __v1,
11324 const int __scale)
11326 __builtin_ia32_scatterdiv4di (__addr, __mask, (__v4di) __index,
11327 (__v4di) __v1, __scale);
11330 extern __inline void
11331 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11332 _mm_i64scatter_epi64 (void *__addr, __m128i __index,
11333 __m128i __v1, const int __scale)
11335 __builtin_ia32_scatterdiv2di (__addr, (__mmask8) 0xFF,
11336 (__v2di) __index, (__v2di) __v1,
11337 __scale);
11340 extern __inline void
11341 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11342 _mm_mask_i64scatter_epi64 (void *__addr, __mmask8 __mask,
11343 __m128i __index, __m128i __v1,
11344 const int __scale)
11346 __builtin_ia32_scatterdiv2di (__addr, __mask, (__v2di) __index,
11347 (__v2di) __v1, __scale);
11350 extern __inline __m256i
11351 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11352 _mm256_mask_shuffle_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
11353 _MM_PERM_ENUM __mask)
11355 return (__m256i) __builtin_ia32_pshufd256_mask ((__v8si) __A, __mask,
11356 (__v8si) __W,
11357 (__mmask8) __U);
11360 extern __inline __m256i
11361 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11362 _mm256_maskz_shuffle_epi32 (__mmask8 __U, __m256i __A,
11363 _MM_PERM_ENUM __mask)
11365 return (__m256i) __builtin_ia32_pshufd256_mask ((__v8si) __A, __mask,
11366 (__v8si)
11367 _mm256_setzero_si256 (),
11368 (__mmask8) __U);
11371 extern __inline __m128i
11372 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11373 _mm_mask_shuffle_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
11374 _MM_PERM_ENUM __mask)
11376 return (__m128i) __builtin_ia32_pshufd128_mask ((__v4si) __A, __mask,
11377 (__v4si) __W,
11378 (__mmask8) __U);
11381 extern __inline __m128i
11382 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11383 _mm_maskz_shuffle_epi32 (__mmask8 __U, __m128i __A,
11384 _MM_PERM_ENUM __mask)
11386 return (__m128i) __builtin_ia32_pshufd128_mask ((__v4si) __A, __mask,
11387 (__v4si)
11388 _mm_setzero_si128 (),
11389 (__mmask8) __U);
11392 extern __inline __m256i
11393 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11394 _mm256_rol_epi32 (__m256i __A, const int __B)
11396 return (__m256i) __builtin_ia32_prold256_mask ((__v8si) __A, __B,
11397 (__v8si)
11398 _mm256_setzero_si256 (),
11399 (__mmask8) -1);
11402 extern __inline __m256i
11403 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11404 _mm256_mask_rol_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
11405 const int __B)
11407 return (__m256i) __builtin_ia32_prold256_mask ((__v8si) __A, __B,
11408 (__v8si) __W,
11409 (__mmask8) __U);
11412 extern __inline __m256i
11413 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11414 _mm256_maskz_rol_epi32 (__mmask8 __U, __m256i __A, const int __B)
11416 return (__m256i) __builtin_ia32_prold256_mask ((__v8si) __A, __B,
11417 (__v8si)
11418 _mm256_setzero_si256 (),
11419 (__mmask8) __U);
11422 extern __inline __m128i
11423 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11424 _mm_rol_epi32 (__m128i __A, const int __B)
11426 return (__m128i) __builtin_ia32_prold128_mask ((__v4si) __A, __B,
11427 (__v4si)
11428 _mm_setzero_si128 (),
11429 (__mmask8) -1);
11432 extern __inline __m128i
11433 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11434 _mm_mask_rol_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
11435 const int __B)
11437 return (__m128i) __builtin_ia32_prold128_mask ((__v4si) __A, __B,
11438 (__v4si) __W,
11439 (__mmask8) __U);
11442 extern __inline __m128i
11443 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11444 _mm_maskz_rol_epi32 (__mmask8 __U, __m128i __A, const int __B)
11446 return (__m128i) __builtin_ia32_prold128_mask ((__v4si) __A, __B,
11447 (__v4si)
11448 _mm_setzero_si128 (),
11449 (__mmask8) __U);
11452 extern __inline __m256i
11453 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11454 _mm256_ror_epi32 (__m256i __A, const int __B)
11456 return (__m256i) __builtin_ia32_prord256_mask ((__v8si) __A, __B,
11457 (__v8si)
11458 _mm256_setzero_si256 (),
11459 (__mmask8) -1);
11462 extern __inline __m256i
11463 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11464 _mm256_mask_ror_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
11465 const int __B)
11467 return (__m256i) __builtin_ia32_prord256_mask ((__v8si) __A, __B,
11468 (__v8si) __W,
11469 (__mmask8) __U);
11472 extern __inline __m256i
11473 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11474 _mm256_maskz_ror_epi32 (__mmask8 __U, __m256i __A, const int __B)
11476 return (__m256i) __builtin_ia32_prord256_mask ((__v8si) __A, __B,
11477 (__v8si)
11478 _mm256_setzero_si256 (),
11479 (__mmask8) __U);
11482 extern __inline __m128i
11483 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11484 _mm_ror_epi32 (__m128i __A, const int __B)
11486 return (__m128i) __builtin_ia32_prord128_mask ((__v4si) __A, __B,
11487 (__v4si)
11488 _mm_setzero_si128 (),
11489 (__mmask8) -1);
11492 extern __inline __m128i
11493 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11494 _mm_mask_ror_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
11495 const int __B)
11497 return (__m128i) __builtin_ia32_prord128_mask ((__v4si) __A, __B,
11498 (__v4si) __W,
11499 (__mmask8) __U);
11502 extern __inline __m128i
11503 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11504 _mm_maskz_ror_epi32 (__mmask8 __U, __m128i __A, const int __B)
11506 return (__m128i) __builtin_ia32_prord128_mask ((__v4si) __A, __B,
11507 (__v4si)
11508 _mm_setzero_si128 (),
11509 (__mmask8) __U);
11512 extern __inline __m256i
11513 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11514 _mm256_rol_epi64 (__m256i __A, const int __B)
11516 return (__m256i) __builtin_ia32_prolq256_mask ((__v4di) __A, __B,
11517 (__v4di)
11518 _mm256_setzero_si256 (),
11519 (__mmask8) -1);
11522 extern __inline __m256i
11523 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11524 _mm256_mask_rol_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
11525 const int __B)
11527 return (__m256i) __builtin_ia32_prolq256_mask ((__v4di) __A, __B,
11528 (__v4di) __W,
11529 (__mmask8) __U);
11532 extern __inline __m256i
11533 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11534 _mm256_maskz_rol_epi64 (__mmask8 __U, __m256i __A, const int __B)
11536 return (__m256i) __builtin_ia32_prolq256_mask ((__v4di) __A, __B,
11537 (__v4di)
11538 _mm256_setzero_si256 (),
11539 (__mmask8) __U);
11542 extern __inline __m128i
11543 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11544 _mm_rol_epi64 (__m128i __A, const int __B)
11546 return (__m128i) __builtin_ia32_prolq128_mask ((__v2di) __A, __B,
11547 (__v2di)
11548 _mm_setzero_si128 (),
11549 (__mmask8) -1);
11552 extern __inline __m128i
11553 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11554 _mm_mask_rol_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
11555 const int __B)
11557 return (__m128i) __builtin_ia32_prolq128_mask ((__v2di) __A, __B,
11558 (__v2di) __W,
11559 (__mmask8) __U);
11562 extern __inline __m128i
11563 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11564 _mm_maskz_rol_epi64 (__mmask8 __U, __m128i __A, const int __B)
11566 return (__m128i) __builtin_ia32_prolq128_mask ((__v2di) __A, __B,
11567 (__v2di)
11568 _mm_setzero_si128 (),
11569 (__mmask8) __U);
11572 extern __inline __m256i
11573 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11574 _mm256_ror_epi64 (__m256i __A, const int __B)
11576 return (__m256i) __builtin_ia32_prorq256_mask ((__v4di) __A, __B,
11577 (__v4di)
11578 _mm256_setzero_si256 (),
11579 (__mmask8) -1);
11582 extern __inline __m256i
11583 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11584 _mm256_mask_ror_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
11585 const int __B)
11587 return (__m256i) __builtin_ia32_prorq256_mask ((__v4di) __A, __B,
11588 (__v4di) __W,
11589 (__mmask8) __U);
11592 extern __inline __m256i
11593 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11594 _mm256_maskz_ror_epi64 (__mmask8 __U, __m256i __A, const int __B)
11596 return (__m256i) __builtin_ia32_prorq256_mask ((__v4di) __A, __B,
11597 (__v4di)
11598 _mm256_setzero_si256 (),
11599 (__mmask8) __U);
11602 extern __inline __m128i
11603 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11604 _mm_ror_epi64 (__m128i __A, const int __B)
11606 return (__m128i) __builtin_ia32_prorq128_mask ((__v2di) __A, __B,
11607 (__v2di)
11608 _mm_setzero_si128 (),
11609 (__mmask8) -1);
11612 extern __inline __m128i
11613 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11614 _mm_mask_ror_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
11615 const int __B)
11617 return (__m128i) __builtin_ia32_prorq128_mask ((__v2di) __A, __B,
11618 (__v2di) __W,
11619 (__mmask8) __U);
11622 extern __inline __m128i
11623 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11624 _mm_maskz_ror_epi64 (__mmask8 __U, __m128i __A, const int __B)
11626 return (__m128i) __builtin_ia32_prorq128_mask ((__v2di) __A, __B,
11627 (__v2di)
11628 _mm_setzero_si128 (),
11629 (__mmask8) __U);
11632 extern __inline __m128i
11633 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11634 _mm_alignr_epi32 (__m128i __A, __m128i __B, const int __imm)
11636 return (__m128i) __builtin_ia32_alignd128_mask ((__v4si) __A,
11637 (__v4si) __B, __imm,
11638 (__v4si)
11639 _mm_setzero_si128 (),
11640 (__mmask8) -1);
11643 extern __inline __m128i
11644 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11645 _mm_mask_alignr_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
11646 __m128i __B, const int __imm)
11648 return (__m128i) __builtin_ia32_alignd128_mask ((__v4si) __A,
11649 (__v4si) __B, __imm,
11650 (__v4si) __W,
11651 (__mmask8) __U);
11654 extern __inline __m128i
11655 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11656 _mm_maskz_alignr_epi32 (__mmask8 __U, __m128i __A, __m128i __B,
11657 const int __imm)
11659 return (__m128i) __builtin_ia32_alignd128_mask ((__v4si) __A,
11660 (__v4si) __B, __imm,
11661 (__v4si)
11662 _mm_setzero_si128 (),
11663 (__mmask8) __U);
11666 extern __inline __m128i
11667 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11668 _mm_alignr_epi64 (__m128i __A, __m128i __B, const int __imm)
11670 return (__m128i) __builtin_ia32_alignq128_mask ((__v2di) __A,
11671 (__v2di) __B, __imm,
11672 (__v2di)
11673 _mm_setzero_si128 (),
11674 (__mmask8) -1);
11677 extern __inline __m128i
11678 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11679 _mm_mask_alignr_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
11680 __m128i __B, const int __imm)
11682 return (__m128i) __builtin_ia32_alignq128_mask ((__v2di) __A,
11683 (__v2di) __B, __imm,
11684 (__v2di) __W,
11685 (__mmask8) __U);
11688 extern __inline __m128i
11689 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11690 _mm_maskz_alignr_epi64 (__mmask8 __U, __m128i __A, __m128i __B,
11691 const int __imm)
11693 return (__m128i) __builtin_ia32_alignq128_mask ((__v2di) __A,
11694 (__v2di) __B, __imm,
11695 (__v2di)
11696 _mm_setzero_si128 (),
11697 (__mmask8) __U);
11700 extern __inline __m256i
11701 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11702 _mm256_alignr_epi32 (__m256i __A, __m256i __B, const int __imm)
11704 return (__m256i) __builtin_ia32_alignd256_mask ((__v8si) __A,
11705 (__v8si) __B, __imm,
11706 (__v8si)
11707 _mm256_setzero_si256 (),
11708 (__mmask8) -1);
11711 extern __inline __m256i
11712 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11713 _mm256_mask_alignr_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
11714 __m256i __B, const int __imm)
11716 return (__m256i) __builtin_ia32_alignd256_mask ((__v8si) __A,
11717 (__v8si) __B, __imm,
11718 (__v8si) __W,
11719 (__mmask8) __U);
11722 extern __inline __m256i
11723 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11724 _mm256_maskz_alignr_epi32 (__mmask8 __U, __m256i __A, __m256i __B,
11725 const int __imm)
11727 return (__m256i) __builtin_ia32_alignd256_mask ((__v8si) __A,
11728 (__v8si) __B, __imm,
11729 (__v8si)
11730 _mm256_setzero_si256 (),
11731 (__mmask8) __U);
11734 extern __inline __m256i
11735 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11736 _mm256_alignr_epi64 (__m256i __A, __m256i __B, const int __imm)
11738 return (__m256i) __builtin_ia32_alignq256_mask ((__v4di) __A,
11739 (__v4di) __B, __imm,
11740 (__v4di)
11741 _mm256_setzero_si256 (),
11742 (__mmask8) -1);
11745 extern __inline __m256i
11746 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11747 _mm256_mask_alignr_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
11748 __m256i __B, const int __imm)
11750 return (__m256i) __builtin_ia32_alignq256_mask ((__v4di) __A,
11751 (__v4di) __B, __imm,
11752 (__v4di) __W,
11753 (__mmask8) __U);
11756 extern __inline __m256i
11757 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11758 _mm256_maskz_alignr_epi64 (__mmask8 __U, __m256i __A, __m256i __B,
11759 const int __imm)
11761 return (__m256i) __builtin_ia32_alignq256_mask ((__v4di) __A,
11762 (__v4di) __B, __imm,
11763 (__v4di)
11764 _mm256_setzero_si256 (),
11765 (__mmask8) __U);
11768 extern __inline __m128i
11769 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11770 _mm_mask_cvtps_ph (__m128i __W, __mmask8 __U, __m128 __A,
11771 const int __I)
11773 return (__m128i) __builtin_ia32_vcvtps2ph_mask ((__v4sf) __A, __I,
11774 (__v8hi) __W,
11775 (__mmask8) __U);
11778 extern __inline __m128i
11779 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11780 _mm_maskz_cvtps_ph (__mmask8 __U, __m128 __A, const int __I)
11782 return (__m128i) __builtin_ia32_vcvtps2ph_mask ((__v4sf) __A, __I,
11783 (__v8hi)
11784 _mm_setzero_si128 (),
11785 (__mmask8) __U);
11788 extern __inline __m128i
11789 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11790 _mm256_mask_cvtps_ph (__m128i __W, __mmask8 __U, __m256 __A,
11791 const int __I)
11793 return (__m128i) __builtin_ia32_vcvtps2ph256_mask ((__v8sf) __A, __I,
11794 (__v8hi) __W,
11795 (__mmask8) __U);
11798 extern __inline __m128i
11799 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11800 _mm256_maskz_cvtps_ph (__mmask8 __U, __m256 __A, const int __I)
11802 return (__m128i) __builtin_ia32_vcvtps2ph256_mask ((__v8sf) __A, __I,
11803 (__v8hi)
11804 _mm_setzero_si128 (),
11805 (__mmask8) __U);
11808 extern __inline __m256i
11809 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11810 _mm256_mask_srai_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
11811 const int __imm)
11813 return (__m256i) __builtin_ia32_psradi256_mask ((__v8si) __A, __imm,
11814 (__v8si) __W,
11815 (__mmask8) __U);
11818 extern __inline __m256i
11819 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11820 _mm256_maskz_srai_epi32 (__mmask8 __U, __m256i __A, const int __imm)
11822 return (__m256i) __builtin_ia32_psradi256_mask ((__v8si) __A, __imm,
11823 (__v8si)
11824 _mm256_setzero_si256 (),
11825 (__mmask8) __U);
11828 extern __inline __m128i
11829 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11830 _mm_mask_srai_epi32 (__m128i __W, __mmask8 __U, __m128i __A,
11831 const int __imm)
11833 return (__m128i) __builtin_ia32_psradi128_mask ((__v4si) __A, __imm,
11834 (__v4si) __W,
11835 (__mmask8) __U);
11838 extern __inline __m128i
11839 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11840 _mm_maskz_srai_epi32 (__mmask8 __U, __m128i __A, const int __imm)
11842 return (__m128i) __builtin_ia32_psradi128_mask ((__v4si) __A, __imm,
11843 (__v4si)
11844 _mm_setzero_si128 (),
11845 (__mmask8) __U);
11848 extern __inline __m256i
11849 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11850 _mm256_srai_epi64 (__m256i __A, const int __imm)
11852 return (__m256i) __builtin_ia32_psraqi256_mask ((__v4di) __A, __imm,
11853 (__v4di)
11854 _mm256_setzero_si256 (),
11855 (__mmask8) -1);
11858 extern __inline __m256i
11859 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11860 _mm256_mask_srai_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
11861 const int __imm)
11863 return (__m256i) __builtin_ia32_psraqi256_mask ((__v4di) __A, __imm,
11864 (__v4di) __W,
11865 (__mmask8) __U);
11868 extern __inline __m256i
11869 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11870 _mm256_maskz_srai_epi64 (__mmask8 __U, __m256i __A, const int __imm)
11872 return (__m256i) __builtin_ia32_psraqi256_mask ((__v4di) __A, __imm,
11873 (__v4di)
11874 _mm256_setzero_si256 (),
11875 (__mmask8) __U);
11878 extern __inline __m128i
11879 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11880 _mm_srai_epi64 (__m128i __A, const int __imm)
11882 return (__m128i) __builtin_ia32_psraqi128_mask ((__v2di) __A, __imm,
11883 (__v2di)
11884 _mm_setzero_si128 (),
11885 (__mmask8) -1);
11888 extern __inline __m128i
11889 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11890 _mm_mask_srai_epi64 (__m128i __W, __mmask8 __U, __m128i __A,
11891 const int __imm)
11893 return (__m128i) __builtin_ia32_psraqi128_mask ((__v2di) __A, __imm,
11894 (__v2di) __W,
11895 (__mmask8) __U);
11898 extern __inline __m128i
11899 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11900 _mm_maskz_srai_epi64 (__mmask8 __U, __m128i __A, const int __imm)
11902 return (__m128i) __builtin_ia32_psraqi128_mask ((__v2di) __A, __imm,
11903 (__v2di)
11904 _mm_setzero_si128 (),
11905 (__mmask8) __U);
11908 extern __inline __m128i
11909 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11910 _mm_mask_slli_epi32 (__m128i __W, __mmask8 __U, __m128i __A, int __B)
11912 return (__m128i) __builtin_ia32_pslldi128_mask ((__v4si) __A, __B,
11913 (__v4si) __W,
11914 (__mmask8) __U);
11917 extern __inline __m128i
11918 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11919 _mm_maskz_slli_epi32 (__mmask8 __U, __m128i __A, int __B)
11921 return (__m128i) __builtin_ia32_pslldi128_mask ((__v4si) __A, __B,
11922 (__v4si)
11923 _mm_setzero_si128 (),
11924 (__mmask8) __U);
11927 extern __inline __m128i
11928 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11929 _mm_mask_slli_epi64 (__m128i __W, __mmask8 __U, __m128i __A, int __B)
11931 return (__m128i) __builtin_ia32_psllqi128_mask ((__v2di) __A, __B,
11932 (__v2di) __W,
11933 (__mmask8) __U);
11936 extern __inline __m128i
11937 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11938 _mm_maskz_slli_epi64 (__mmask8 __U, __m128i __A, int __B)
11940 return (__m128i) __builtin_ia32_psllqi128_mask ((__v2di) __A, __B,
11941 (__v2di)
11942 _mm_setzero_si128 (),
11943 (__mmask8) __U);
11946 extern __inline __m256i
11947 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11948 _mm256_mask_slli_epi32 (__m256i __W, __mmask8 __U, __m256i __A,
11949 int __B)
11951 return (__m256i) __builtin_ia32_pslldi256_mask ((__v8si) __A, __B,
11952 (__v8si) __W,
11953 (__mmask8) __U);
11956 extern __inline __m256i
11957 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11958 _mm256_maskz_slli_epi32 (__mmask8 __U, __m256i __A, int __B)
11960 return (__m256i) __builtin_ia32_pslldi256_mask ((__v8si) __A, __B,
11961 (__v8si)
11962 _mm256_setzero_si256 (),
11963 (__mmask8) __U);
11966 extern __inline __m256i
11967 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11968 _mm256_mask_slli_epi64 (__m256i __W, __mmask8 __U, __m256i __A,
11969 int __B)
11971 return (__m256i) __builtin_ia32_psllqi256_mask ((__v4di) __A, __B,
11972 (__v4di) __W,
11973 (__mmask8) __U);
11976 extern __inline __m256i
11977 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11978 _mm256_maskz_slli_epi64 (__mmask8 __U, __m256i __A, int __B)
11980 return (__m256i) __builtin_ia32_psllqi256_mask ((__v4di) __A, __B,
11981 (__v4di)
11982 _mm256_setzero_si256 (),
11983 (__mmask8) __U);
11986 extern __inline __m256d
11987 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11988 _mm256_mask_permutex_pd (__m256d __W, __mmask8 __U, __m256d __X,
11989 const int __imm)
11991 return (__m256d) __builtin_ia32_permdf256_mask ((__v4df) __X, __imm,
11992 (__v4df) __W,
11993 (__mmask8) __U);
11996 extern __inline __m256d
11997 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
11998 _mm256_maskz_permutex_pd (__mmask8 __U, __m256d __X, const int __imm)
12000 return (__m256d) __builtin_ia32_permdf256_mask ((__v4df) __X, __imm,
12001 (__v4df)
12002 _mm256_setzero_pd (),
12003 (__mmask8) __U);
12006 extern __inline __m256d
12007 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12008 _mm256_mask_permute_pd (__m256d __W, __mmask8 __U, __m256d __X,
12009 const int __C)
12011 return (__m256d) __builtin_ia32_vpermilpd256_mask ((__v4df) __X, __C,
12012 (__v4df) __W,
12013 (__mmask8) __U);
12016 extern __inline __m256d
12017 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12018 _mm256_maskz_permute_pd (__mmask8 __U, __m256d __X, const int __C)
12020 return (__m256d) __builtin_ia32_vpermilpd256_mask ((__v4df) __X, __C,
12021 (__v4df)
12022 _mm256_setzero_pd (),
12023 (__mmask8) __U);
12026 extern __inline __m128d
12027 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12028 _mm_mask_permute_pd (__m128d __W, __mmask8 __U, __m128d __X,
12029 const int __C)
12031 return (__m128d) __builtin_ia32_vpermilpd_mask ((__v2df) __X, __C,
12032 (__v2df) __W,
12033 (__mmask8) __U);
12036 extern __inline __m128d
12037 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12038 _mm_maskz_permute_pd (__mmask8 __U, __m128d __X, const int __C)
12040 return (__m128d) __builtin_ia32_vpermilpd_mask ((__v2df) __X, __C,
12041 (__v2df)
12042 _mm_setzero_pd (),
12043 (__mmask8) __U);
12046 extern __inline __m256
12047 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12048 _mm256_mask_permute_ps (__m256 __W, __mmask8 __U, __m256 __X,
12049 const int __C)
12051 return (__m256) __builtin_ia32_vpermilps256_mask ((__v8sf) __X, __C,
12052 (__v8sf) __W,
12053 (__mmask8) __U);
12056 extern __inline __m256
12057 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12058 _mm256_maskz_permute_ps (__mmask8 __U, __m256 __X, const int __C)
12060 return (__m256) __builtin_ia32_vpermilps256_mask ((__v8sf) __X, __C,
12061 (__v8sf)
12062 _mm256_setzero_ps (),
12063 (__mmask8) __U);
12066 extern __inline __m128
12067 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12068 _mm_mask_permute_ps (__m128 __W, __mmask8 __U, __m128 __X,
12069 const int __C)
12071 return (__m128) __builtin_ia32_vpermilps_mask ((__v4sf) __X, __C,
12072 (__v4sf) __W,
12073 (__mmask8) __U);
12076 extern __inline __m128
12077 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12078 _mm_maskz_permute_ps (__mmask8 __U, __m128 __X, const int __C)
12080 return (__m128) __builtin_ia32_vpermilps_mask ((__v4sf) __X, __C,
12081 (__v4sf)
12082 _mm_setzero_ps (),
12083 (__mmask8) __U);
12086 extern __inline __m256d
12087 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12088 _mm256_mask_blend_pd (__mmask8 __U, __m256d __A, __m256d __W)
12090 return (__m256d) __builtin_ia32_blendmpd_256_mask ((__v4df) __A,
12091 (__v4df) __W,
12092 (__mmask8) __U);
12095 extern __inline __m256
12096 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12097 _mm256_mask_blend_ps (__mmask8 __U, __m256 __A, __m256 __W)
12099 return (__m256) __builtin_ia32_blendmps_256_mask ((__v8sf) __A,
12100 (__v8sf) __W,
12101 (__mmask8) __U);
12104 extern __inline __m256i
12105 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12106 _mm256_mask_blend_epi64 (__mmask8 __U, __m256i __A, __m256i __W)
12108 return (__m256i) __builtin_ia32_blendmq_256_mask ((__v4di) __A,
12109 (__v4di) __W,
12110 (__mmask8) __U);
12113 extern __inline __m256i
12114 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12115 _mm256_mask_blend_epi32 (__mmask8 __U, __m256i __A, __m256i __W)
12117 return (__m256i) __builtin_ia32_blendmd_256_mask ((__v8si) __A,
12118 (__v8si) __W,
12119 (__mmask8) __U);
12122 extern __inline __m128d
12123 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12124 _mm_mask_blend_pd (__mmask8 __U, __m128d __A, __m128d __W)
12126 return (__m128d) __builtin_ia32_blendmpd_128_mask ((__v2df) __A,
12127 (__v2df) __W,
12128 (__mmask8) __U);
12131 extern __inline __m128
12132 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12133 _mm_mask_blend_ps (__mmask8 __U, __m128 __A, __m128 __W)
12135 return (__m128) __builtin_ia32_blendmps_128_mask ((__v4sf) __A,
12136 (__v4sf) __W,
12137 (__mmask8) __U);
12140 extern __inline __m128i
12141 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12142 _mm_mask_blend_epi64 (__mmask8 __U, __m128i __A, __m128i __W)
12144 return (__m128i) __builtin_ia32_blendmq_128_mask ((__v2di) __A,
12145 (__v2di) __W,
12146 (__mmask8) __U);
12149 extern __inline __m128i
12150 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12151 _mm_mask_blend_epi32 (__mmask8 __U, __m128i __A, __m128i __W)
12153 return (__m128i) __builtin_ia32_blendmd_128_mask ((__v4si) __A,
12154 (__v4si) __W,
12155 (__mmask8) __U);
12158 extern __inline __mmask8
12159 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12160 _mm256_cmp_epi64_mask (__m256i __X, __m256i __Y, const int __P)
12162 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
12163 (__v4di) __Y, __P,
12164 (__mmask8) -1);
12167 extern __inline __mmask8
12168 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12169 _mm256_cmp_epi32_mask (__m256i __X, __m256i __Y, const int __P)
12171 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
12172 (__v8si) __Y, __P,
12173 (__mmask8) -1);
12176 extern __inline __mmask8
12177 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12178 _mm256_cmp_epu64_mask (__m256i __X, __m256i __Y, const int __P)
12180 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
12181 (__v4di) __Y, __P,
12182 (__mmask8) -1);
12185 extern __inline __mmask8
12186 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12187 _mm256_cmp_epu32_mask (__m256i __X, __m256i __Y, const int __P)
12189 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
12190 (__v8si) __Y, __P,
12191 (__mmask8) -1);
12194 extern __inline __mmask8
12195 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12196 _mm256_cmp_pd_mask (__m256d __X, __m256d __Y, const int __P)
12198 return (__mmask8) __builtin_ia32_cmppd256_mask ((__v4df) __X,
12199 (__v4df) __Y, __P,
12200 (__mmask8) -1);
12203 extern __inline __mmask8
12204 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12205 _mm256_cmp_ps_mask (__m256 __X, __m256 __Y, const int __P)
12207 return (__mmask8) __builtin_ia32_cmpps256_mask ((__v8sf) __X,
12208 (__v8sf) __Y, __P,
12209 (__mmask8) -1);
12212 extern __inline __mmask8
12213 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12214 _mm256_mask_cmp_epi64_mask (__mmask8 __U, __m256i __X, __m256i __Y,
12215 const int __P)
12217 return (__mmask8) __builtin_ia32_cmpq256_mask ((__v4di) __X,
12218 (__v4di) __Y, __P,
12219 (__mmask8) __U);
12222 extern __inline __mmask8
12223 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12224 _mm256_mask_cmp_epi32_mask (__mmask8 __U, __m256i __X, __m256i __Y,
12225 const int __P)
12227 return (__mmask8) __builtin_ia32_cmpd256_mask ((__v8si) __X,
12228 (__v8si) __Y, __P,
12229 (__mmask8) __U);
12232 extern __inline __mmask8
12233 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12234 _mm256_mask_cmp_epu64_mask (__mmask8 __U, __m256i __X, __m256i __Y,
12235 const int __P)
12237 return (__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di) __X,
12238 (__v4di) __Y, __P,
12239 (__mmask8) __U);
12242 extern __inline __mmask8
12243 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12244 _mm256_mask_cmp_epu32_mask (__mmask8 __U, __m256i __X, __m256i __Y,
12245 const int __P)
12247 return (__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si) __X,
12248 (__v8si) __Y, __P,
12249 (__mmask8) __U);
12252 extern __inline __mmask8
12253 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12254 _mm256_mask_cmp_pd_mask (__mmask8 __U, __m256d __X, __m256d __Y,
12255 const int __P)
12257 return (__mmask8) __builtin_ia32_cmppd256_mask ((__v4df) __X,
12258 (__v4df) __Y, __P,
12259 (__mmask8) __U);
12262 extern __inline __mmask8
12263 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12264 _mm256_mask_cmp_ps_mask (__mmask8 __U, __m256 __X, __m256 __Y,
12265 const int __P)
12267 return (__mmask8) __builtin_ia32_cmpps256_mask ((__v8sf) __X,
12268 (__v8sf) __Y, __P,
12269 (__mmask8) __U);
12272 extern __inline __mmask8
12273 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12274 _mm_cmp_epi64_mask (__m128i __X, __m128i __Y, const int __P)
12276 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
12277 (__v2di) __Y, __P,
12278 (__mmask8) -1);
12281 extern __inline __mmask8
12282 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12283 _mm_cmp_epi32_mask (__m128i __X, __m128i __Y, const int __P)
12285 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
12286 (__v4si) __Y, __P,
12287 (__mmask8) -1);
12290 extern __inline __mmask8
12291 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12292 _mm_cmp_epu64_mask (__m128i __X, __m128i __Y, const int __P)
12294 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
12295 (__v2di) __Y, __P,
12296 (__mmask8) -1);
12299 extern __inline __mmask8
12300 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12301 _mm_cmp_epu32_mask (__m128i __X, __m128i __Y, const int __P)
12303 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
12304 (__v4si) __Y, __P,
12305 (__mmask8) -1);
12308 extern __inline __mmask8
12309 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12310 _mm_cmp_pd_mask (__m128d __X, __m128d __Y, const int __P)
12312 return (__mmask8) __builtin_ia32_cmppd128_mask ((__v2df) __X,
12313 (__v2df) __Y, __P,
12314 (__mmask8) -1);
12317 extern __inline __mmask8
12318 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12319 _mm_cmp_ps_mask (__m128 __X, __m128 __Y, const int __P)
12321 return (__mmask8) __builtin_ia32_cmpps128_mask ((__v4sf) __X,
12322 (__v4sf) __Y, __P,
12323 (__mmask8) -1);
12326 extern __inline __mmask8
12327 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12328 _mm_mask_cmp_epi64_mask (__mmask8 __U, __m128i __X, __m128i __Y,
12329 const int __P)
12331 return (__mmask8) __builtin_ia32_cmpq128_mask ((__v2di) __X,
12332 (__v2di) __Y, __P,
12333 (__mmask8) __U);
12336 extern __inline __mmask8
12337 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12338 _mm_mask_cmp_epi32_mask (__mmask8 __U, __m128i __X, __m128i __Y,
12339 const int __P)
12341 return (__mmask8) __builtin_ia32_cmpd128_mask ((__v4si) __X,
12342 (__v4si) __Y, __P,
12343 (__mmask8) __U);
12346 extern __inline __mmask8
12347 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12348 _mm_mask_cmp_epu64_mask (__mmask8 __U, __m128i __X, __m128i __Y,
12349 const int __P)
12351 return (__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di) __X,
12352 (__v2di) __Y, __P,
12353 (__mmask8) __U);
12356 extern __inline __mmask8
12357 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12358 _mm_mask_cmp_epu32_mask (__mmask8 __U, __m128i __X, __m128i __Y,
12359 const int __P)
12361 return (__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si) __X,
12362 (__v4si) __Y, __P,
12363 (__mmask8) __U);
12366 extern __inline __mmask8
12367 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12368 _mm_mask_cmp_pd_mask (__mmask8 __U, __m128d __X, __m128d __Y,
12369 const int __P)
12371 return (__mmask8) __builtin_ia32_cmppd128_mask ((__v2df) __X,
12372 (__v2df) __Y, __P,
12373 (__mmask8) __U);
12376 extern __inline __mmask8
12377 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12378 _mm_mask_cmp_ps_mask (__mmask8 __U, __m128 __X, __m128 __Y,
12379 const int __P)
12381 return (__mmask8) __builtin_ia32_cmpps128_mask ((__v4sf) __X,
12382 (__v4sf) __Y, __P,
12383 (__mmask8) __U);
12386 extern __inline __m256d
12387 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
12388 _mm256_permutex_pd (__m256d __X, const int __M)
12390 return (__m256d) __builtin_ia32_permdf256_mask ((__v4df) __X, __M,
12391 (__v4df)
12392 _mm256_undefined_pd (),
12393 (__mmask8) -1);
12396 #else
12397 #define _mm256_permutex_pd(X, M) \
12398 ((__m256d) __builtin_ia32_permdf256_mask ((__v4df)(__m256d)(X), (int)(M), \
12399 (__v4df)(__m256d) \
12400 _mm256_undefined_pd (), \
12401 (__mmask8)-1))
12403 #define _mm256_permutex_epi64(X, I) \
12404 ((__m256i) __builtin_ia32_permdi256_mask ((__v4di)(__m256i)(X), \
12405 (int)(I), \
12406 (__v4di)(__m256i) \
12407 (_mm256_setzero_si256 ()),\
12408 (__mmask8) -1))
12410 #define _mm256_maskz_permutex_epi64(M, X, I) \
12411 ((__m256i) __builtin_ia32_permdi256_mask ((__v4di)(__m256i)(X), \
12412 (int)(I), \
12413 (__v4di)(__m256i) \
12414 (_mm256_setzero_si256 ()),\
12415 (__mmask8)(M)))
12417 #define _mm256_mask_permutex_epi64(W, M, X, I) \
12418 ((__m256i) __builtin_ia32_permdi256_mask ((__v4di)(__m256i)(X), \
12419 (int)(I), \
12420 (__v4di)(__m256i)(W), \
12421 (__mmask8)(M)))
12423 #define _mm256_insertf32x4(X, Y, C) \
12424 ((__m256) __builtin_ia32_insertf32x4_256_mask ((__v8sf)(__m256) (X), \
12425 (__v4sf)(__m128) (Y), (int) (C), \
12426 (__v8sf)(__m256)_mm256_setzero_ps (), \
12427 (__mmask8)-1))
12429 #define _mm256_mask_insertf32x4(W, U, X, Y, C) \
12430 ((__m256) __builtin_ia32_insertf32x4_256_mask ((__v8sf)(__m256) (X), \
12431 (__v4sf)(__m128) (Y), (int) (C), \
12432 (__v8sf)(__m256)(W), \
12433 (__mmask8)(U)))
12435 #define _mm256_maskz_insertf32x4(U, X, Y, C) \
12436 ((__m256) __builtin_ia32_insertf32x4_256_mask ((__v8sf)(__m256) (X), \
12437 (__v4sf)(__m128) (Y), (int) (C), \
12438 (__v8sf)(__m256)_mm256_setzero_ps (), \
12439 (__mmask8)(U)))
12441 #define _mm256_inserti32x4(X, Y, C) \
12442 ((__m256i) __builtin_ia32_inserti32x4_256_mask ((__v8si)(__m256i) (X),\
12443 (__v4si)(__m128i) (Y), (int) (C), \
12444 (__v8si)(__m256i)_mm256_setzero_si256 (), \
12445 (__mmask8)-1))
12447 #define _mm256_mask_inserti32x4(W, U, X, Y, C) \
12448 ((__m256i) __builtin_ia32_inserti32x4_256_mask ((__v8si)(__m256i) (X),\
12449 (__v4si)(__m128i) (Y), (int) (C), \
12450 (__v8si)(__m256i)(W), \
12451 (__mmask8)(U)))
12453 #define _mm256_maskz_inserti32x4(U, X, Y, C) \
12454 ((__m256i) __builtin_ia32_inserti32x4_256_mask ((__v8si)(__m256i) (X),\
12455 (__v4si)(__m128i) (Y), (int) (C), \
12456 (__v8si)(__m256i)_mm256_setzero_si256 (), \
12457 (__mmask8)(U)))
12459 #define _mm256_extractf32x4_ps(X, C) \
12460 ((__m128) __builtin_ia32_extractf32x4_256_mask ((__v8sf)(__m256) (X), \
12461 (int) (C), \
12462 (__v4sf)(__m128)_mm_setzero_ps (), \
12463 (__mmask8)-1))
12465 #define _mm256_mask_extractf32x4_ps(W, U, X, C) \
12466 ((__m128) __builtin_ia32_extractf32x4_256_mask ((__v8sf)(__m256) (X), \
12467 (int) (C), \
12468 (__v4sf)(__m128)(W), \
12469 (__mmask8)(U)))
12471 #define _mm256_maskz_extractf32x4_ps(U, X, C) \
12472 ((__m128) __builtin_ia32_extractf32x4_256_mask ((__v8sf)(__m256) (X), \
12473 (int) (C), \
12474 (__v4sf)(__m128)_mm_setzero_ps (), \
12475 (__mmask8)(U)))
12477 #define _mm256_extracti32x4_epi32(X, C) \
12478 ((__m128i) __builtin_ia32_extracti32x4_256_mask ((__v8si)(__m256i) (X),\
12479 (int) (C), (__v4si)(__m128i)_mm_setzero_si128 (), (__mmask8)-1))
12481 #define _mm256_mask_extracti32x4_epi32(W, U, X, C) \
12482 ((__m128i) __builtin_ia32_extracti32x4_256_mask ((__v8si)(__m256i) (X),\
12483 (int) (C), (__v4si)(__m128i)(W), (__mmask8)(U)))
12485 #define _mm256_maskz_extracti32x4_epi32(U, X, C) \
12486 ((__m128i) __builtin_ia32_extracti32x4_256_mask ((__v8si)(__m256i) (X),\
12487 (int) (C), (__v4si)(__m128i)_mm_setzero_si128 (), (__mmask8)(U)))
12489 #define _mm256_shuffle_i64x2(X, Y, C) \
12490 ((__m256i) __builtin_ia32_shuf_i64x2_256_mask ((__v4di)(__m256i)(X), \
12491 (__v4di)(__m256i)(Y), (int)(C), \
12492 (__v4di)(__m256i)_mm256_setzero_si256 (), \
12493 (__mmask8)-1))
12495 #define _mm256_mask_shuffle_i64x2(W, U, X, Y, C) \
12496 ((__m256i) __builtin_ia32_shuf_i64x2_256_mask ((__v4di)(__m256i)(X), \
12497 (__v4di)(__m256i)(Y), (int)(C), \
12498 (__v4di)(__m256i)(W),\
12499 (__mmask8)(U)))
12501 #define _mm256_maskz_shuffle_i64x2(U, X, Y, C) \
12502 ((__m256i) __builtin_ia32_shuf_i64x2_256_mask ((__v4di)(__m256i)(X), \
12503 (__v4di)(__m256i)(Y), (int)(C), \
12504 (__v4di)(__m256i)_mm256_setzero_si256 (), \
12505 (__mmask8)(U)))
12507 #define _mm256_shuffle_i32x4(X, Y, C) \
12508 ((__m256i) __builtin_ia32_shuf_i32x4_256_mask ((__v8si)(__m256i)(X), \
12509 (__v8si)(__m256i)(Y), (int)(C), \
12510 (__v8si)(__m256i) \
12511 _mm256_setzero_si256 (), \
12512 (__mmask8)-1))
12514 #define _mm256_mask_shuffle_i32x4(W, U, X, Y, C) \
12515 ((__m256i) __builtin_ia32_shuf_i32x4_256_mask ((__v8si)(__m256i)(X), \
12516 (__v8si)(__m256i)(Y), (int)(C), \
12517 (__v8si)(__m256i)(W), \
12518 (__mmask8)(U)))
12520 #define _mm256_maskz_shuffle_i32x4(U, X, Y, C) \
12521 ((__m256i) __builtin_ia32_shuf_i32x4_256_mask ((__v8si)(__m256i)(X), \
12522 (__v8si)(__m256i)(Y), (int)(C), \
12523 (__v8si)(__m256i) \
12524 _mm256_setzero_si256 (), \
12525 (__mmask8)(U)))
12527 #define _mm256_shuffle_f64x2(X, Y, C) \
12528 ((__m256d) __builtin_ia32_shuf_f64x2_256_mask ((__v4df)(__m256d)(X), \
12529 (__v4df)(__m256d)(Y), (int)(C), \
12530 (__v4df)(__m256d)_mm256_setzero_pd (),\
12531 (__mmask8)-1))
12533 #define _mm256_mask_shuffle_f64x2(W, U, X, Y, C) \
12534 ((__m256d) __builtin_ia32_shuf_f64x2_256_mask ((__v4df)(__m256d)(X), \
12535 (__v4df)(__m256d)(Y), (int)(C), \
12536 (__v4df)(__m256d)(W), \
12537 (__mmask8)(U)))
12539 #define _mm256_maskz_shuffle_f64x2(U, X, Y, C) \
12540 ((__m256d) __builtin_ia32_shuf_f64x2_256_mask ((__v4df)(__m256d)(X), \
12541 (__v4df)(__m256d)(Y), (int)(C), \
12542 (__v4df)(__m256d)_mm256_setzero_pd( ),\
12543 (__mmask8)(U)))
12545 #define _mm256_shuffle_f32x4(X, Y, C) \
12546 ((__m256) __builtin_ia32_shuf_f32x4_256_mask ((__v8sf)(__m256)(X), \
12547 (__v8sf)(__m256)(Y), (int)(C), \
12548 (__v8sf)(__m256)_mm256_setzero_ps (), \
12549 (__mmask8)-1))
12551 #define _mm256_mask_shuffle_f32x4(W, U, X, Y, C) \
12552 ((__m256) __builtin_ia32_shuf_f32x4_256_mask ((__v8sf)(__m256)(X), \
12553 (__v8sf)(__m256)(Y), (int)(C), \
12554 (__v8sf)(__m256)(W), \
12555 (__mmask8)(U)))
12557 #define _mm256_maskz_shuffle_f32x4(U, X, Y, C) \
12558 ((__m256) __builtin_ia32_shuf_f32x4_256_mask ((__v8sf)(__m256)(X), \
12559 (__v8sf)(__m256)(Y), (int)(C), \
12560 (__v8sf)(__m256)_mm256_setzero_ps (), \
12561 (__mmask8)(U)))
12563 #define _mm256_mask_shuffle_pd(W, U, A, B, C) \
12564 ((__m256d)__builtin_ia32_shufpd256_mask ((__v4df)(__m256d)(A), \
12565 (__v4df)(__m256d)(B), (int)(C), \
12566 (__v4df)(__m256d)(W), \
12567 (__mmask8)(U)))
12569 #define _mm256_maskz_shuffle_pd(U, A, B, C) \
12570 ((__m256d)__builtin_ia32_shufpd256_mask ((__v4df)(__m256d)(A), \
12571 (__v4df)(__m256d)(B), (int)(C), \
12572 (__v4df)(__m256d) \
12573 _mm256_setzero_pd (), \
12574 (__mmask8)(U)))
12576 #define _mm_mask_shuffle_pd(W, U, A, B, C) \
12577 ((__m128d)__builtin_ia32_shufpd128_mask ((__v2df)(__m128d)(A), \
12578 (__v2df)(__m128d)(B), (int)(C), \
12579 (__v2df)(__m128d)(W), \
12580 (__mmask8)(U)))
12582 #define _mm_maskz_shuffle_pd(U, A, B, C) \
12583 ((__m128d)__builtin_ia32_shufpd128_mask ((__v2df)(__m128d)(A), \
12584 (__v2df)(__m128d)(B), (int)(C), \
12585 (__v2df)(__m128d)_mm_setzero_pd (), \
12586 (__mmask8)(U)))
12588 #define _mm256_mask_shuffle_ps(W, U, A, B, C) \
12589 ((__m256) __builtin_ia32_shufps256_mask ((__v8sf)(__m256)(A), \
12590 (__v8sf)(__m256)(B), (int)(C), \
12591 (__v8sf)(__m256)(W), \
12592 (__mmask8)(U)))
12594 #define _mm256_maskz_shuffle_ps(U, A, B, C) \
12595 ((__m256) __builtin_ia32_shufps256_mask ((__v8sf)(__m256)(A), \
12596 (__v8sf)(__m256)(B), (int)(C), \
12597 (__v8sf)(__m256)_mm256_setzero_ps (),\
12598 (__mmask8)(U)))
12600 #define _mm_mask_shuffle_ps(W, U, A, B, C) \
12601 ((__m128) __builtin_ia32_shufps128_mask ((__v4sf)(__m128)(A), \
12602 (__v4sf)(__m128)(B), (int)(C), \
12603 (__v4sf)(__m128)(W), \
12604 (__mmask8)(U)))
12606 #define _mm_maskz_shuffle_ps(U, A, B, C) \
12607 ((__m128) __builtin_ia32_shufps128_mask ((__v4sf)(__m128)(A), \
12608 (__v4sf)(__m128)(B), (int)(C), \
12609 (__v4sf)(__m128)_mm_setzero_ps (), \
12610 (__mmask8)(U)))
12612 #define _mm256_fixupimm_pd(X, Y, Z, C) \
12613 ((__m256d)__builtin_ia32_fixupimmpd256_mask ((__v4df)(__m256d)(X), \
12614 (__v4df)(__m256d)(Y), \
12615 (__v4di)(__m256i)(Z), (int)(C), \
12616 (__mmask8)(-1)))
12618 #define _mm256_mask_fixupimm_pd(X, U, Y, Z, C) \
12619 ((__m256d)__builtin_ia32_fixupimmpd256_mask ((__v4df)(__m256d)(X), \
12620 (__v4df)(__m256d)(Y), \
12621 (__v4di)(__m256i)(Z), (int)(C), \
12622 (__mmask8)(U)))
12624 #define _mm256_maskz_fixupimm_pd(U, X, Y, Z, C) \
12625 ((__m256d)__builtin_ia32_fixupimmpd256_maskz ((__v4df)(__m256d)(X), \
12626 (__v4df)(__m256d)(Y), \
12627 (__v4di)(__m256i)(Z), (int)(C),\
12628 (__mmask8)(U)))
12630 #define _mm256_fixupimm_ps(X, Y, Z, C) \
12631 ((__m256)__builtin_ia32_fixupimmps256_mask ((__v8sf)(__m256)(X), \
12632 (__v8sf)(__m256)(Y), \
12633 (__v8si)(__m256i)(Z), (int)(C), \
12634 (__mmask8)(-1)))
12637 #define _mm256_mask_fixupimm_ps(X, U, Y, Z, C) \
12638 ((__m256)__builtin_ia32_fixupimmps256_mask ((__v8sf)(__m256)(X), \
12639 (__v8sf)(__m256)(Y), \
12640 (__v8si)(__m256i)(Z), (int)(C), \
12641 (__mmask8)(U)))
12643 #define _mm256_maskz_fixupimm_ps(U, X, Y, Z, C) \
12644 ((__m256)__builtin_ia32_fixupimmps256_maskz ((__v8sf)(__m256)(X), \
12645 (__v8sf)(__m256)(Y), \
12646 (__v8si)(__m256i)(Z), (int)(C),\
12647 (__mmask8)(U)))
12649 #define _mm_fixupimm_pd(X, Y, Z, C) \
12650 ((__m128d)__builtin_ia32_fixupimmpd128_mask ((__v2df)(__m128d)(X), \
12651 (__v2df)(__m128d)(Y), \
12652 (__v2di)(__m128i)(Z), (int)(C), \
12653 (__mmask8)(-1)))
12656 #define _mm_mask_fixupimm_pd(X, U, Y, Z, C) \
12657 ((__m128d)__builtin_ia32_fixupimmpd128_mask ((__v2df)(__m128d)(X), \
12658 (__v2df)(__m128d)(Y), \
12659 (__v2di)(__m128i)(Z), (int)(C), \
12660 (__mmask8)(U)))
12662 #define _mm_maskz_fixupimm_pd(U, X, Y, Z, C) \
12663 ((__m128d)__builtin_ia32_fixupimmpd128_maskz ((__v2df)(__m128d)(X), \
12664 (__v2df)(__m128d)(Y), \
12665 (__v2di)(__m128i)(Z), (int)(C),\
12666 (__mmask8)(U)))
12668 #define _mm_fixupimm_ps(X, Y, Z, C) \
12669 ((__m128)__builtin_ia32_fixupimmps128_mask ((__v4sf)(__m128)(X), \
12670 (__v4sf)(__m128)(Y), \
12671 (__v4si)(__m128i)(Z), (int)(C), \
12672 (__mmask8)(-1)))
12674 #define _mm_mask_fixupimm_ps(X, U, Y, Z, C) \
12675 ((__m128)__builtin_ia32_fixupimmps128_mask ((__v4sf)(__m128)(X), \
12676 (__v4sf)(__m128)(Y), \
12677 (__v4si)(__m128i)(Z), (int)(C),\
12678 (__mmask8)(U)))
12680 #define _mm_maskz_fixupimm_ps(U, X, Y, Z, C) \
12681 ((__m128)__builtin_ia32_fixupimmps128_maskz ((__v4sf)(__m128)(X), \
12682 (__v4sf)(__m128)(Y), \
12683 (__v4si)(__m128i)(Z), (int)(C),\
12684 (__mmask8)(U)))
12686 #define _mm256_mask_srli_epi32(W, U, A, B) \
12687 ((__m256i) __builtin_ia32_psrldi256_mask ((__v8si)(__m256i)(A), \
12688 (int)(B), (__v8si)(__m256i)(W), (__mmask8)(U)))
12690 #define _mm256_maskz_srli_epi32(U, A, B) \
12691 ((__m256i) __builtin_ia32_psrldi256_mask ((__v8si)(__m256i)(A), \
12692 (int)(B), (__v8si)_mm256_setzero_si256 (), (__mmask8)(U)))
12694 #define _mm_mask_srli_epi32(W, U, A, B) \
12695 ((__m128i) __builtin_ia32_psrldi128_mask ((__v4si)(__m128i)(A), \
12696 (int)(B), (__v4si)(__m128i)(W), (__mmask8)(U)))
12698 #define _mm_maskz_srli_epi32(U, A, B) \
12699 ((__m128i) __builtin_ia32_psrldi128_mask ((__v4si)(__m128i)(A), \
12700 (int)(B), (__v4si)_mm_setzero_si128 (), (__mmask8)(U)))
12702 #define _mm256_mask_srli_epi64(W, U, A, B) \
12703 ((__m256i) __builtin_ia32_psrlqi256_mask ((__v4di)(__m256i)(A), \
12704 (int)(B), (__v4di)(__m256i)(W), (__mmask8)(U)))
12706 #define _mm256_maskz_srli_epi64(U, A, B) \
12707 ((__m256i) __builtin_ia32_psrlqi256_mask ((__v4di)(__m256i)(A), \
12708 (int)(B), (__v4di)_mm256_setzero_si256 (), (__mmask8)(U)))
12710 #define _mm_mask_srli_epi64(W, U, A, B) \
12711 ((__m128i) __builtin_ia32_psrlqi128_mask ((__v2di)(__m128i)(A), \
12712 (int)(B), (__v2di)(__m128i)(W), (__mmask8)(U)))
12714 #define _mm_maskz_srli_epi64(U, A, B) \
12715 ((__m128i) __builtin_ia32_psrlqi128_mask ((__v2di)(__m128i)(A), \
12716 (int)(B), (__v2di)_mm_setzero_si128 (), (__mmask8)(U)))
12718 #define _mm256_mask_slli_epi32(W, U, X, C) \
12719 ((__m256i)__builtin_ia32_pslldi256_mask ((__v8si)(__m256i)(X), (int)(C),\
12720 (__v8si)(__m256i)(W), \
12721 (__mmask8)(U)))
12723 #define _mm256_maskz_slli_epi32(U, X, C) \
12724 ((__m256i)__builtin_ia32_pslldi256_mask ((__v8si)(__m256i)(X), (int)(C),\
12725 (__v8si)(__m256i)_mm256_setzero_si256 (), \
12726 (__mmask8)(U)))
12728 #define _mm256_mask_slli_epi64(W, U, X, C) \
12729 ((__m256i)__builtin_ia32_psllqi256_mask ((__v4di)(__m256i)(X), (int)(C),\
12730 (__v4di)(__m256i)(W), \
12731 (__mmask8)(U)))
12733 #define _mm256_maskz_slli_epi64(U, X, C) \
12734 ((__m256i)__builtin_ia32_psllqi256_mask ((__v4di)(__m256i)(X), (int)(C),\
12735 (__v4di)(__m256i)_mm256_setzero_si256 (), \
12736 (__mmask8)(U)))
12738 #define _mm_mask_slli_epi32(W, U, X, C) \
12739 ((__m128i)__builtin_ia32_pslldi128_mask ((__v4si)(__m128i)(X), (int)(C),\
12740 (__v4si)(__m128i)(W),\
12741 (__mmask8)(U)))
12743 #define _mm_maskz_slli_epi32(U, X, C) \
12744 ((__m128i)__builtin_ia32_pslldi128_mask ((__v4si)(__m128i)(X), (int)(C),\
12745 (__v4si)(__m128i)_mm_setzero_si128 (),\
12746 (__mmask8)(U)))
12748 #define _mm_mask_slli_epi64(W, U, X, C) \
12749 ((__m128i)__builtin_ia32_psllqi128_mask ((__v2di)(__m128i)(X), (int)(C),\
12750 (__v2di)(__m128i)(W),\
12751 (__mmask8)(U)))
12753 #define _mm_maskz_slli_epi64(U, X, C) \
12754 ((__m128i)__builtin_ia32_psllqi128_mask ((__v2di)(__m128i)(X), (int)(C),\
12755 (__v2di)(__m128i)_mm_setzero_si128 (),\
12756 (__mmask8)(U)))
12758 #define _mm256_ternarylogic_epi64(A, B, C, I) \
12759 ((__m256i) __builtin_ia32_pternlogq256_mask ((__v4di)(__m256i)(A), \
12760 (__v4di)(__m256i)(B), (__v4di)(__m256i)(C), (int)(I), (__mmask8)-1))
12762 #define _mm256_mask_ternarylogic_epi64(A, U, B, C, I) \
12763 ((__m256i) __builtin_ia32_pternlogq256_mask ((__v4di)(__m256i)(A), \
12764 (__v4di)(__m256i)(B), (__v4di)(__m256i)(C), (int)(I), (__mmask8)(U)))
12766 #define _mm256_maskz_ternarylogic_epi64(U, A, B, C, I) \
12767 ((__m256i) __builtin_ia32_pternlogq256_maskz ((__v4di)(__m256i)(A), \
12768 (__v4di)(__m256i)(B), (__v4di)(__m256i)(C), (int)(I), (__mmask8)(U)))
12770 #define _mm256_ternarylogic_epi32(A, B, C, I) \
12771 ((__m256i) __builtin_ia32_pternlogd256_mask ((__v8si)(__m256i)(A), \
12772 (__v8si)(__m256i)(B), (__v8si)(__m256i)(C), (int)(I), (__mmask8)-1))
12774 #define _mm256_mask_ternarylogic_epi32(A, U, B, C, I) \
12775 ((__m256i) __builtin_ia32_pternlogd256_mask ((__v8si)(__m256i)(A), \
12776 (__v8si)(__m256i)(B), (__v8si)(__m256i)(C), (int)(I), (__mmask8)(U)))
12778 #define _mm256_maskz_ternarylogic_epi32(U, A, B, C, I) \
12779 ((__m256i) __builtin_ia32_pternlogd256_maskz ((__v8si)(__m256i)(A), \
12780 (__v8si)(__m256i)(B), (__v8si)(__m256i)(C), (int)(I), (__mmask8)(U)))
12782 #define _mm_ternarylogic_epi64(A, B, C, I) \
12783 ((__m128i) __builtin_ia32_pternlogq128_mask ((__v2di)(__m128i)(A), \
12784 (__v2di)(__m128i)(B), (__v2di)(__m128i)(C), (int)(I), (__mmask8)-1))
12786 #define _mm_mask_ternarylogic_epi64(A, U, B, C, I) \
12787 ((__m128i) __builtin_ia32_pternlogq128_mask ((__v2di)(__m128i)(A), \
12788 (__v2di)(__m128i)(B), (__v2di)(__m128i)(C), (int)(I), (__mmask8)(U)))
12790 #define _mm_maskz_ternarylogic_epi64(U, A, B, C, I) \
12791 ((__m128i) __builtin_ia32_pternlogq128_maskz ((__v2di)(__m128i)(A), \
12792 (__v2di)(__m128i)(B), (__v2di)(__m128i)(C), (int)(I), (__mmask8)(U)))
12794 #define _mm_ternarylogic_epi32(A, B, C, I) \
12795 ((__m128i) __builtin_ia32_pternlogd128_mask ((__v4si)(__m128i)(A), \
12796 (__v4si)(__m128i)(B), (__v4si)(__m128i)(C), (int)(I), (__mmask8)-1))
12798 #define _mm_mask_ternarylogic_epi32(A, U, B, C, I) \
12799 ((__m128i) __builtin_ia32_pternlogd128_mask ((__v4si)(__m128i)(A), \
12800 (__v4si)(__m128i)(B), (__v4si)(__m128i)(C), (int)(I), (__mmask8)(U)))
12802 #define _mm_maskz_ternarylogic_epi32(U, A, B, C, I) \
12803 ((__m128i) __builtin_ia32_pternlogd128_maskz ((__v4si)(__m128i)(A), \
12804 (__v4si)(__m128i)(B), (__v4si)(__m128i)(C), (int)(I), (__mmask8)(U)))
12806 #define _mm256_roundscale_ps(A, B) \
12807 ((__m256) __builtin_ia32_rndscaleps_256_mask ((__v8sf)(__m256)(A), \
12808 (int)(B), (__v8sf)(__m256)_mm256_setzero_ps (), (__mmask8)-1))
12810 #define _mm256_mask_roundscale_ps(W, U, A, B) \
12811 ((__m256) __builtin_ia32_rndscaleps_256_mask ((__v8sf)(__m256)(A), \
12812 (int)(B), (__v8sf)(__m256)(W), (__mmask8)(U)))
12814 #define _mm256_maskz_roundscale_ps(U, A, B) \
12815 ((__m256) __builtin_ia32_rndscaleps_256_mask ((__v8sf)(__m256)(A), \
12816 (int)(B), (__v8sf)(__m256)_mm256_setzero_ps (), (__mmask8)(U)))
12818 #define _mm256_roundscale_pd(A, B) \
12819 ((__m256d) __builtin_ia32_rndscalepd_256_mask ((__v4df)(__m256d)(A), \
12820 (int)(B), (__v4df)(__m256d)_mm256_setzero_pd (), (__mmask8)-1))
12822 #define _mm256_mask_roundscale_pd(W, U, A, B) \
12823 ((__m256d) __builtin_ia32_rndscalepd_256_mask ((__v4df)(__m256d)(A), \
12824 (int)(B), (__v4df)(__m256d)(W), (__mmask8)(U)))
12826 #define _mm256_maskz_roundscale_pd(U, A, B) \
12827 ((__m256d) __builtin_ia32_rndscalepd_256_mask ((__v4df)(__m256d)(A), \
12828 (int)(B), (__v4df)(__m256d)_mm256_setzero_pd (), (__mmask8)(U)))
12830 #define _mm_roundscale_ps(A, B) \
12831 ((__m128) __builtin_ia32_rndscaleps_128_mask ((__v4sf)(__m128)(A), \
12832 (int)(B), (__v4sf)(__m128)_mm_setzero_ps (), (__mmask8)-1))
12834 #define _mm_mask_roundscale_ps(W, U, A, B) \
12835 ((__m128) __builtin_ia32_rndscaleps_128_mask ((__v4sf)(__m128)(A), \
12836 (int)(B), (__v4sf)(__m128)(W), (__mmask8)(U)))
12838 #define _mm_maskz_roundscale_ps(U, A, B) \
12839 ((__m128) __builtin_ia32_rndscaleps_128_mask ((__v4sf)(__m128)(A), \
12840 (int)(B), (__v4sf)(__m128)_mm_setzero_ps (), (__mmask8)(U)))
12842 #define _mm_roundscale_pd(A, B) \
12843 ((__m128d) __builtin_ia32_rndscalepd_128_mask ((__v2df)(__m128d)(A), \
12844 (int)(B), (__v2df)(__m128d)_mm_setzero_pd (), (__mmask8)-1))
12846 #define _mm_mask_roundscale_pd(W, U, A, B) \
12847 ((__m128d) __builtin_ia32_rndscalepd_128_mask ((__v2df)(__m128d)(A), \
12848 (int)(B), (__v2df)(__m128d)(W), (__mmask8)(U)))
12850 #define _mm_maskz_roundscale_pd(U, A, B) \
12851 ((__m128d) __builtin_ia32_rndscalepd_128_mask ((__v2df)(__m128d)(A), \
12852 (int)(B), (__v2df)(__m128d)_mm_setzero_pd (), (__mmask8)(U)))
12854 #define _mm256_getmant_ps(X, B, C) \
12855 ((__m256) __builtin_ia32_getmantps256_mask ((__v8sf)(__m256) (X), \
12856 (int)(((C)<<2) | (B)), \
12857 (__v8sf)(__m256)_mm256_setzero_ps (), \
12858 (__mmask8)-1))
12860 #define _mm256_mask_getmant_ps(W, U, X, B, C) \
12861 ((__m256) __builtin_ia32_getmantps256_mask ((__v8sf)(__m256) (X), \
12862 (int)(((C)<<2) | (B)), \
12863 (__v8sf)(__m256)(W), \
12864 (__mmask8)(U)))
12866 #define _mm256_maskz_getmant_ps(U, X, B, C) \
12867 ((__m256) __builtin_ia32_getmantps256_mask ((__v8sf)(__m256) (X), \
12868 (int)(((C)<<2) | (B)), \
12869 (__v8sf)(__m256)_mm256_setzero_ps (), \
12870 (__mmask8)(U)))
12872 #define _mm_getmant_ps(X, B, C) \
12873 ((__m128) __builtin_ia32_getmantps128_mask ((__v4sf)(__m128) (X), \
12874 (int)(((C)<<2) | (B)), \
12875 (__v4sf)(__m128)_mm_setzero_ps (), \
12876 (__mmask8)-1))
12878 #define _mm_mask_getmant_ps(W, U, X, B, C) \
12879 ((__m128) __builtin_ia32_getmantps128_mask ((__v4sf)(__m128) (X), \
12880 (int)(((C)<<2) | (B)), \
12881 (__v4sf)(__m128)(W), \
12882 (__mmask8)(U)))
12884 #define _mm_maskz_getmant_ps(U, X, B, C) \
12885 ((__m128) __builtin_ia32_getmantps128_mask ((__v4sf)(__m128) (X), \
12886 (int)(((C)<<2) | (B)), \
12887 (__v4sf)(__m128)_mm_setzero_ps (), \
12888 (__mmask8)(U)))
12890 #define _mm256_getmant_pd(X, B, C) \
12891 ((__m256d) __builtin_ia32_getmantpd256_mask ((__v4df)(__m256d) (X), \
12892 (int)(((C)<<2) | (B)), \
12893 (__v4df)(__m256d)_mm256_setzero_pd (),\
12894 (__mmask8)-1))
12896 #define _mm256_mask_getmant_pd(W, U, X, B, C) \
12897 ((__m256d) __builtin_ia32_getmantpd256_mask ((__v4df)(__m256d) (X), \
12898 (int)(((C)<<2) | (B)), \
12899 (__v4df)(__m256d)(W), \
12900 (__mmask8)(U)))
12902 #define _mm256_maskz_getmant_pd(U, X, B, C) \
12903 ((__m256d) __builtin_ia32_getmantpd256_mask ((__v4df)(__m256d) (X), \
12904 (int)(((C)<<2) | (B)), \
12905 (__v4df)(__m256d)_mm256_setzero_pd (),\
12906 (__mmask8)(U)))
12908 #define _mm_getmant_pd(X, B, C) \
12909 ((__m128d) __builtin_ia32_getmantpd128_mask ((__v2df)(__m128d) (X), \
12910 (int)(((C)<<2) | (B)), \
12911 (__v2df)(__m128d)_mm_setzero_pd (), \
12912 (__mmask8)-1))
12914 #define _mm_mask_getmant_pd(W, U, X, B, C) \
12915 ((__m128d) __builtin_ia32_getmantpd128_mask ((__v2df)(__m128d) (X), \
12916 (int)(((C)<<2) | (B)), \
12917 (__v2df)(__m128d)(W), \
12918 (__mmask8)(U)))
12920 #define _mm_maskz_getmant_pd(U, X, B, C) \
12921 ((__m128d) __builtin_ia32_getmantpd128_mask ((__v2df)(__m128d) (X), \
12922 (int)(((C)<<2) | (B)), \
12923 (__v2df)(__m128d)_mm_setzero_pd (), \
12924 (__mmask8)(U)))
12926 #define _mm256_mmask_i32gather_ps(V1OLD, MASK, INDEX, ADDR, SCALE) \
12927 (__m256) __builtin_ia32_gather3siv8sf ((__v8sf)(__m256)V1OLD, \
12928 (void const *)ADDR, \
12929 (__v8si)(__m256i)INDEX, \
12930 (__mmask8)MASK, (int)SCALE)
12932 #define _mm_mmask_i32gather_ps(V1OLD, MASK, INDEX, ADDR, SCALE) \
12933 (__m128) __builtin_ia32_gather3siv4sf ((__v4sf)(__m128)V1OLD, \
12934 (void const *)ADDR, \
12935 (__v4si)(__m128i)INDEX, \
12936 (__mmask8)MASK, (int)SCALE)
12938 #define _mm256_mmask_i32gather_pd(V1OLD, MASK, INDEX, ADDR, SCALE) \
12939 (__m256d) __builtin_ia32_gather3siv4df ((__v4df)(__m256d)V1OLD, \
12940 (void const *)ADDR, \
12941 (__v4si)(__m128i)INDEX, \
12942 (__mmask8)MASK, (int)SCALE)
12944 #define _mm_mmask_i32gather_pd(V1OLD, MASK, INDEX, ADDR, SCALE) \
12945 (__m128d) __builtin_ia32_gather3siv2df ((__v2df)(__m128d)V1OLD, \
12946 (void const *)ADDR, \
12947 (__v4si)(__m128i)INDEX, \
12948 (__mmask8)MASK, (int)SCALE)
12950 #define _mm256_mmask_i64gather_ps(V1OLD, MASK, INDEX, ADDR, SCALE) \
12951 (__m128) __builtin_ia32_gather3div8sf ((__v4sf)(__m128)V1OLD, \
12952 (void const *)ADDR, \
12953 (__v4di)(__m256i)INDEX, \
12954 (__mmask8)MASK, (int)SCALE)
12956 #define _mm_mmask_i64gather_ps(V1OLD, MASK, INDEX, ADDR, SCALE) \
12957 (__m128) __builtin_ia32_gather3div4sf ((__v4sf)(__m128)V1OLD, \
12958 (void const *)ADDR, \
12959 (__v2di)(__m128i)INDEX, \
12960 (__mmask8)MASK, (int)SCALE)
12962 #define _mm256_mmask_i64gather_pd(V1OLD, MASK, INDEX, ADDR, SCALE) \
12963 (__m256d) __builtin_ia32_gather3div4df ((__v4df)(__m256d)V1OLD, \
12964 (void const *)ADDR, \
12965 (__v4di)(__m256i)INDEX, \
12966 (__mmask8)MASK, (int)SCALE)
12968 #define _mm_mmask_i64gather_pd(V1OLD, MASK, INDEX, ADDR, SCALE) \
12969 (__m128d) __builtin_ia32_gather3div2df ((__v2df)(__m128d)V1OLD, \
12970 (void const *)ADDR, \
12971 (__v2di)(__m128i)INDEX, \
12972 (__mmask8)MASK, (int)SCALE)
12974 #define _mm256_mmask_i32gather_epi32(V1OLD, MASK, INDEX, ADDR, SCALE) \
12975 (__m256i) __builtin_ia32_gather3siv8si ((__v8si)(__m256i)V1OLD, \
12976 (void const *)ADDR, \
12977 (__v8si)(__m256i)INDEX, \
12978 (__mmask8)MASK, (int)SCALE)
12980 #define _mm_mmask_i32gather_epi32(V1OLD, MASK, INDEX, ADDR, SCALE) \
12981 (__m128i) __builtin_ia32_gather3siv4si ((__v4si)(__m128i)V1OLD, \
12982 (void const *)ADDR, \
12983 (__v4si)(__m128i)INDEX, \
12984 (__mmask8)MASK, (int)SCALE)
12986 #define _mm256_mmask_i32gather_epi64(V1OLD, MASK, INDEX, ADDR, SCALE) \
12987 (__m256i) __builtin_ia32_gather3siv4di ((__v4di)(__m256i)V1OLD, \
12988 (void const *)ADDR, \
12989 (__v4si)(__m128i)INDEX, \
12990 (__mmask8)MASK, (int)SCALE)
12992 #define _mm_mmask_i32gather_epi64(V1OLD, MASK, INDEX, ADDR, SCALE) \
12993 (__m128i) __builtin_ia32_gather3siv2di ((__v2di)(__m128i)V1OLD, \
12994 (void const *)ADDR, \
12995 (__v4si)(__m128i)INDEX, \
12996 (__mmask8)MASK, (int)SCALE)
12998 #define _mm256_mmask_i64gather_epi32(V1OLD, MASK, INDEX, ADDR, SCALE) \
12999 (__m128i) __builtin_ia32_gather3div8si ((__v4si)(__m128i)V1OLD, \
13000 (void const *)ADDR, \
13001 (__v4di)(__m256i)INDEX, \
13002 (__mmask8)MASK, (int)SCALE)
13004 #define _mm_mmask_i64gather_epi32(V1OLD, MASK, INDEX, ADDR, SCALE) \
13005 (__m128i) __builtin_ia32_gather3div4si ((__v4si)(__m128i)V1OLD, \
13006 (void const *)ADDR, \
13007 (__v2di)(__m128i)INDEX, \
13008 (__mmask8)MASK, (int)SCALE)
13010 #define _mm256_mmask_i64gather_epi64(V1OLD, MASK, INDEX, ADDR, SCALE) \
13011 (__m256i) __builtin_ia32_gather3div4di ((__v4di)(__m256i)V1OLD, \
13012 (void const *)ADDR, \
13013 (__v4di)(__m256i)INDEX, \
13014 (__mmask8)MASK, (int)SCALE)
13016 #define _mm_mmask_i64gather_epi64(V1OLD, MASK, INDEX, ADDR, SCALE) \
13017 (__m128i) __builtin_ia32_gather3div2di ((__v2di)(__m128i)V1OLD, \
13018 (void const *)ADDR, \
13019 (__v2di)(__m128i)INDEX, \
13020 (__mmask8)MASK, (int)SCALE)
13022 #define _mm256_i32scatter_ps(ADDR, INDEX, V1, SCALE) \
13023 __builtin_ia32_scattersiv8sf ((void *)ADDR, (__mmask8)0xFF, \
13024 (__v8si)(__m256i)INDEX, \
13025 (__v8sf)(__m256)V1, (int)SCALE)
13027 #define _mm256_mask_i32scatter_ps(ADDR, MASK, INDEX, V1, SCALE) \
13028 __builtin_ia32_scattersiv8sf ((void *)ADDR, (__mmask8)MASK, \
13029 (__v8si)(__m256i)INDEX, \
13030 (__v8sf)(__m256)V1, (int)SCALE)
13032 #define _mm_i32scatter_ps(ADDR, INDEX, V1, SCALE) \
13033 __builtin_ia32_scattersiv4sf ((void *)ADDR, (__mmask8)0xFF, \
13034 (__v4si)(__m128i)INDEX, \
13035 (__v4sf)(__m128)V1, (int)SCALE)
13037 #define _mm_mask_i32scatter_ps(ADDR, MASK, INDEX, V1, SCALE) \
13038 __builtin_ia32_scattersiv4sf ((void *)ADDR, (__mmask8)MASK, \
13039 (__v4si)(__m128i)INDEX, \
13040 (__v4sf)(__m128)V1, (int)SCALE)
13042 #define _mm256_i32scatter_pd(ADDR, INDEX, V1, SCALE) \
13043 __builtin_ia32_scattersiv4df ((void *)ADDR, (__mmask8)0xFF, \
13044 (__v4si)(__m128i)INDEX, \
13045 (__v4df)(__m256d)V1, (int)SCALE)
13047 #define _mm256_mask_i32scatter_pd(ADDR, MASK, INDEX, V1, SCALE) \
13048 __builtin_ia32_scattersiv4df ((void *)ADDR, (__mmask8)MASK, \
13049 (__v4si)(__m128i)INDEX, \
13050 (__v4df)(__m256d)V1, (int)SCALE)
13052 #define _mm_i32scatter_pd(ADDR, INDEX, V1, SCALE) \
13053 __builtin_ia32_scattersiv2df ((void *)ADDR, (__mmask8)0xFF, \
13054 (__v4si)(__m128i)INDEX, \
13055 (__v2df)(__m128d)V1, (int)SCALE)
13057 #define _mm_mask_i32scatter_pd(ADDR, MASK, INDEX, V1, SCALE) \
13058 __builtin_ia32_scattersiv2df ((void *)ADDR, (__mmask8)MASK, \
13059 (__v4si)(__m128i)INDEX, \
13060 (__v2df)(__m128d)V1, (int)SCALE)
13062 #define _mm256_i64scatter_ps(ADDR, INDEX, V1, SCALE) \
13063 __builtin_ia32_scatterdiv8sf ((void *)ADDR, (__mmask8)0xFF, \
13064 (__v4di)(__m256i)INDEX, \
13065 (__v4sf)(__m128)V1, (int)SCALE)
13067 #define _mm256_mask_i64scatter_ps(ADDR, MASK, INDEX, V1, SCALE) \
13068 __builtin_ia32_scatterdiv8sf ((void *)ADDR, (__mmask8)MASK, \
13069 (__v4di)(__m256i)INDEX, \
13070 (__v4sf)(__m128)V1, (int)SCALE)
13072 #define _mm_i64scatter_ps(ADDR, INDEX, V1, SCALE) \
13073 __builtin_ia32_scatterdiv4sf ((void *)ADDR, (__mmask8)0xFF, \
13074 (__v2di)(__m128i)INDEX, \
13075 (__v4sf)(__m128)V1, (int)SCALE)
13077 #define _mm_mask_i64scatter_ps(ADDR, MASK, INDEX, V1, SCALE) \
13078 __builtin_ia32_scatterdiv4sf ((void *)ADDR, (__mmask8)MASK, \
13079 (__v2di)(__m128i)INDEX, \
13080 (__v4sf)(__m128)V1, (int)SCALE)
13082 #define _mm256_i64scatter_pd(ADDR, INDEX, V1, SCALE) \
13083 __builtin_ia32_scatterdiv4df ((void *)ADDR, (__mmask8)0xFF, \
13084 (__v4di)(__m256i)INDEX, \
13085 (__v4df)(__m256d)V1, (int)SCALE)
13087 #define _mm256_mask_i64scatter_pd(ADDR, MASK, INDEX, V1, SCALE) \
13088 __builtin_ia32_scatterdiv4df ((void *)ADDR, (__mmask8)MASK, \
13089 (__v4di)(__m256i)INDEX, \
13090 (__v4df)(__m256d)V1, (int)SCALE)
13092 #define _mm_i64scatter_pd(ADDR, INDEX, V1, SCALE) \
13093 __builtin_ia32_scatterdiv2df ((void *)ADDR, (__mmask8)0xFF, \
13094 (__v2di)(__m128i)INDEX, \
13095 (__v2df)(__m128d)V1, (int)SCALE)
13097 #define _mm_mask_i64scatter_pd(ADDR, MASK, INDEX, V1, SCALE) \
13098 __builtin_ia32_scatterdiv2df ((void *)ADDR, (__mmask8)MASK, \
13099 (__v2di)(__m128i)INDEX, \
13100 (__v2df)(__m128d)V1, (int)SCALE)
13102 #define _mm256_i32scatter_epi32(ADDR, INDEX, V1, SCALE) \
13103 __builtin_ia32_scattersiv8si ((void *)ADDR, (__mmask8)0xFF, \
13104 (__v8si)(__m256i)INDEX, \
13105 (__v8si)(__m256i)V1, (int)SCALE)
13107 #define _mm256_mask_i32scatter_epi32(ADDR, MASK, INDEX, V1, SCALE) \
13108 __builtin_ia32_scattersiv8si ((void *)ADDR, (__mmask8)MASK, \
13109 (__v8si)(__m256i)INDEX, \
13110 (__v8si)(__m256i)V1, (int)SCALE)
13112 #define _mm_i32scatter_epi32(ADDR, INDEX, V1, SCALE) \
13113 __builtin_ia32_scattersiv4si ((void *)ADDR, (__mmask8)0xFF, \
13114 (__v4si)(__m128i)INDEX, \
13115 (__v4si)(__m128i)V1, (int)SCALE)
13117 #define _mm_mask_i32scatter_epi32(ADDR, MASK, INDEX, V1, SCALE) \
13118 __builtin_ia32_scattersiv4si ((void *)ADDR, (__mmask8)MASK, \
13119 (__v4si)(__m128i)INDEX, \
13120 (__v4si)(__m128i)V1, (int)SCALE)
13122 #define _mm256_i32scatter_epi64(ADDR, INDEX, V1, SCALE) \
13123 __builtin_ia32_scattersiv4di ((void *)ADDR, (__mmask8)0xFF, \
13124 (__v4si)(__m128i)INDEX, \
13125 (__v4di)(__m256i)V1, (int)SCALE)
13127 #define _mm256_mask_i32scatter_epi64(ADDR, MASK, INDEX, V1, SCALE) \
13128 __builtin_ia32_scattersiv4di ((void *)ADDR, (__mmask8)MASK, \
13129 (__v4si)(__m128i)INDEX, \
13130 (__v4di)(__m256i)V1, (int)SCALE)
13132 #define _mm_i32scatter_epi64(ADDR, INDEX, V1, SCALE) \
13133 __builtin_ia32_scattersiv2di ((void *)ADDR, (__mmask8)0xFF, \
13134 (__v4si)(__m128i)INDEX, \
13135 (__v2di)(__m128i)V1, (int)SCALE)
13137 #define _mm_mask_i32scatter_epi64(ADDR, MASK, INDEX, V1, SCALE) \
13138 __builtin_ia32_scattersiv2di ((void *)ADDR, (__mmask8)MASK, \
13139 (__v4si)(__m128i)INDEX, \
13140 (__v2di)(__m128i)V1, (int)SCALE)
13142 #define _mm256_i64scatter_epi32(ADDR, INDEX, V1, SCALE) \
13143 __builtin_ia32_scatterdiv8si ((void *)ADDR, (__mmask8)0xFF, \
13144 (__v4di)(__m256i)INDEX, \
13145 (__v4si)(__m128i)V1, (int)SCALE)
13147 #define _mm256_mask_i64scatter_epi32(ADDR, MASK, INDEX, V1, SCALE) \
13148 __builtin_ia32_scatterdiv8si ((void *)ADDR, (__mmask8)MASK, \
13149 (__v4di)(__m256i)INDEX, \
13150 (__v4si)(__m128i)V1, (int)SCALE)
13152 #define _mm_i64scatter_epi32(ADDR, INDEX, V1, SCALE) \
13153 __builtin_ia32_scatterdiv4si ((void *)ADDR, (__mmask8)0xFF, \
13154 (__v2di)(__m128i)INDEX, \
13155 (__v4si)(__m128i)V1, (int)SCALE)
13157 #define _mm_mask_i64scatter_epi32(ADDR, MASK, INDEX, V1, SCALE) \
13158 __builtin_ia32_scatterdiv4si ((void *)ADDR, (__mmask8)MASK, \
13159 (__v2di)(__m128i)INDEX, \
13160 (__v4si)(__m128i)V1, (int)SCALE)
13162 #define _mm256_i64scatter_epi64(ADDR, INDEX, V1, SCALE) \
13163 __builtin_ia32_scatterdiv4di ((void *)ADDR, (__mmask8)0xFF, \
13164 (__v4di)(__m256i)INDEX, \
13165 (__v4di)(__m256i)V1, (int)SCALE)
13167 #define _mm256_mask_i64scatter_epi64(ADDR, MASK, INDEX, V1, SCALE) \
13168 __builtin_ia32_scatterdiv4di ((void *)ADDR, (__mmask8)MASK, \
13169 (__v4di)(__m256i)INDEX, \
13170 (__v4di)(__m256i)V1, (int)SCALE)
13172 #define _mm_i64scatter_epi64(ADDR, INDEX, V1, SCALE) \
13173 __builtin_ia32_scatterdiv2di ((void *)ADDR, (__mmask8)0xFF, \
13174 (__v2di)(__m128i)INDEX, \
13175 (__v2di)(__m128i)V1, (int)SCALE)
13177 #define _mm_mask_i64scatter_epi64(ADDR, MASK, INDEX, V1, SCALE) \
13178 __builtin_ia32_scatterdiv2di ((void *)ADDR, (__mmask8)MASK, \
13179 (__v2di)(__m128i)INDEX, \
13180 (__v2di)(__m128i)V1, (int)SCALE)
13182 #define _mm256_mask_shuffle_epi32(W, U, X, C) \
13183 ((__m256i) __builtin_ia32_pshufd256_mask ((__v8si)(__m256i)(X), (int)(C), \
13184 (__v8si)(__m256i)(W), \
13185 (__mmask8)(U)))
13187 #define _mm256_maskz_shuffle_epi32(U, X, C) \
13188 ((__m256i) __builtin_ia32_pshufd256_mask ((__v8si)(__m256i)(X), (int)(C), \
13189 (__v8si)(__m256i) \
13190 _mm256_setzero_si256 (), \
13191 (__mmask8)(U)))
13193 #define _mm_mask_shuffle_epi32(W, U, X, C) \
13194 ((__m128i) __builtin_ia32_pshufd128_mask ((__v4si)(__m128i)(X), (int)(C), \
13195 (__v4si)(__m128i)(W), \
13196 (__mmask8)(U)))
13198 #define _mm_maskz_shuffle_epi32(U, X, C) \
13199 ((__m128i) __builtin_ia32_pshufd128_mask ((__v4si)(__m128i)(X), (int)(C), \
13200 (__v4si)(__m128i)_mm_setzero_si128 (), \
13201 (__mmask8)(U)))
13203 #define _mm256_rol_epi64(A, B) \
13204 ((__m256i)__builtin_ia32_prolq256_mask ((__v4di)(__m256i)(A), (int)(B), \
13205 (__v4di)(__m256i)_mm256_setzero_si256 (),\
13206 (__mmask8)-1))
13208 #define _mm256_mask_rol_epi64(W, U, A, B) \
13209 ((__m256i)__builtin_ia32_prolq256_mask ((__v4di)(__m256i)(A), (int)(B), \
13210 (__v4di)(__m256i)(W), \
13211 (__mmask8)(U)))
13213 #define _mm256_maskz_rol_epi64(U, A, B) \
13214 ((__m256i)__builtin_ia32_prolq256_mask ((__v4di)(__m256i)(A), (int)(B), \
13215 (__v4di)(__m256i)_mm256_setzero_si256 (),\
13216 (__mmask8)(U)))
13218 #define _mm_rol_epi64(A, B) \
13219 ((__m128i)__builtin_ia32_prolq128_mask ((__v2di)(__m128i)(A), (int)(B), \
13220 (__v2di)(__m128i)_mm_setzero_si128 (),\
13221 (__mmask8)-1))
13223 #define _mm_mask_rol_epi64(W, U, A, B) \
13224 ((__m128i)__builtin_ia32_prolq128_mask ((__v2di)(__m128i)(A), (int)(B), \
13225 (__v2di)(__m128i)(W), \
13226 (__mmask8)(U)))
13228 #define _mm_maskz_rol_epi64(U, A, B) \
13229 ((__m128i)__builtin_ia32_prolq128_mask ((__v2di)(__m128i)(A), (int)(B), \
13230 (__v2di)(__m128i)_mm_setzero_si128 (),\
13231 (__mmask8)(U)))
13233 #define _mm256_ror_epi64(A, B) \
13234 ((__m256i)__builtin_ia32_prorq256_mask ((__v4di)(__m256i)(A), (int)(B), \
13235 (__v4di)(__m256i)_mm256_setzero_si256 (),\
13236 (__mmask8)-1))
13238 #define _mm256_mask_ror_epi64(W, U, A, B) \
13239 ((__m256i)__builtin_ia32_prorq256_mask ((__v4di)(__m256i)(A), (int)(B), \
13240 (__v4di)(__m256i)(W), \
13241 (__mmask8)(U)))
13243 #define _mm256_maskz_ror_epi64(U, A, B) \
13244 ((__m256i)__builtin_ia32_prorq256_mask ((__v4di)(__m256i)(A), (int)(B), \
13245 (__v4di)(__m256i)_mm256_setzero_si256 (),\
13246 (__mmask8)(U)))
13248 #define _mm_ror_epi64(A, B) \
13249 ((__m128i)__builtin_ia32_prorq128_mask ((__v2di)(__m128i)(A), (int)(B), \
13250 (__v2di)(__m128i)_mm_setzero_si128 (),\
13251 (__mmask8)-1))
13253 #define _mm_mask_ror_epi64(W, U, A, B) \
13254 ((__m128i)__builtin_ia32_prorq128_mask ((__v2di)(__m128i)(A), (int)(B), \
13255 (__v2di)(__m128i)(W), \
13256 (__mmask8)(U)))
13258 #define _mm_maskz_ror_epi64(U, A, B) \
13259 ((__m128i)__builtin_ia32_prorq128_mask ((__v2di)(__m128i)(A), (int)(B), \
13260 (__v2di)(__m128i)_mm_setzero_si128 (),\
13261 (__mmask8)(U)))
13263 #define _mm256_rol_epi32(A, B) \
13264 ((__m256i)__builtin_ia32_prold256_mask ((__v8si)(__m256i)(A), (int)(B), \
13265 (__v8si)(__m256i)_mm256_setzero_si256 (),\
13266 (__mmask8)-1))
13268 #define _mm256_mask_rol_epi32(W, U, A, B) \
13269 ((__m256i)__builtin_ia32_prold256_mask ((__v8si)(__m256i)(A), (int)(B), \
13270 (__v8si)(__m256i)(W), \
13271 (__mmask8)(U)))
13273 #define _mm256_maskz_rol_epi32(U, A, B) \
13274 ((__m256i)__builtin_ia32_prold256_mask ((__v8si)(__m256i)(A), (int)(B), \
13275 (__v8si)(__m256i)_mm256_setzero_si256 (),\
13276 (__mmask8)(U)))
13278 #define _mm_rol_epi32(A, B) \
13279 ((__m128i)__builtin_ia32_prold128_mask ((__v4si)(__m128i)(A), (int)(B), \
13280 (__v4si)(__m128i)_mm_setzero_si128 (),\
13281 (__mmask8)-1))
13283 #define _mm_mask_rol_epi32(W, U, A, B) \
13284 ((__m128i)__builtin_ia32_prold128_mask ((__v4si)(__m128i)(A), (int)(B), \
13285 (__v4si)(__m128i)(W), \
13286 (__mmask8)(U)))
13288 #define _mm_maskz_rol_epi32(U, A, B) \
13289 ((__m128i)__builtin_ia32_prold128_mask ((__v4si)(__m128i)(A), (int)(B), \
13290 (__v4si)(__m128i)_mm_setzero_si128 (),\
13291 (__mmask8)(U)))
13293 #define _mm256_ror_epi32(A, B) \
13294 ((__m256i)__builtin_ia32_prord256_mask ((__v8si)(__m256i)(A), (int)(B), \
13295 (__v8si)(__m256i)_mm256_setzero_si256 (),\
13296 (__mmask8)-1))
13298 #define _mm256_mask_ror_epi32(W, U, A, B) \
13299 ((__m256i)__builtin_ia32_prord256_mask ((__v8si)(__m256i)(A), (int)(B), \
13300 (__v8si)(__m256i)(W), \
13301 (__mmask8)(U)))
13303 #define _mm256_maskz_ror_epi32(U, A, B) \
13304 ((__m256i)__builtin_ia32_prord256_mask ((__v8si)(__m256i)(A), (int)(B), \
13305 (__v8si)(__m256i) \
13306 _mm256_setzero_si256 (), \
13307 (__mmask8)(U)))
13309 #define _mm_ror_epi32(A, B) \
13310 ((__m128i)__builtin_ia32_prord128_mask ((__v4si)(__m128i)(A), (int)(B), \
13311 (__v4si)(__m128i)_mm_setzero_si128 (),\
13312 (__mmask8)-1))
13314 #define _mm_mask_ror_epi32(W, U, A, B) \
13315 ((__m128i)__builtin_ia32_prord128_mask ((__v4si)(__m128i)(A), (int)(B), \
13316 (__v4si)(__m128i)(W), \
13317 (__mmask8)(U)))
13319 #define _mm_maskz_ror_epi32(U, A, B) \
13320 ((__m128i)__builtin_ia32_prord128_mask ((__v4si)(__m128i)(A), (int)(B), \
13321 (__v4si)(__m128i)_mm_setzero_si128 (),\
13322 (__mmask8)(U)))
13324 #define _mm256_alignr_epi32(X, Y, C) \
13325 ((__m256i)__builtin_ia32_alignd256_mask ((__v8si)(__m256i)(X), \
13326 (__v8si)(__m256i)(Y), (int)(C), (__v8si)(__m256i)(X), (__mmask8)-1))
13328 #define _mm256_mask_alignr_epi32(W, U, X, Y, C) \
13329 ((__m256i)__builtin_ia32_alignd256_mask ((__v8si)(__m256i)(X), \
13330 (__v8si)(__m256i)(Y), (int)(C), (__v8si)(__m256i)(W), (__mmask8)(U)))
13332 #define _mm256_maskz_alignr_epi32(U, X, Y, C) \
13333 ((__m256i)__builtin_ia32_alignd256_mask ((__v8si)(__m256i)(X), \
13334 (__v8si)(__m256i)(Y), (int)(C), (__v8si)(__m256i)_mm256_setzero_si256 (),\
13335 (__mmask8)(U)))
13337 #define _mm256_alignr_epi64(X, Y, C) \
13338 ((__m256i)__builtin_ia32_alignq256_mask ((__v4di)(__m256i)(X), \
13339 (__v4di)(__m256i)(Y), (int)(C), (__v4di)(__m256i)(X), (__mmask8)-1))
13341 #define _mm256_mask_alignr_epi64(W, U, X, Y, C) \
13342 ((__m256i)__builtin_ia32_alignq256_mask ((__v4di)(__m256i)(X), \
13343 (__v4di)(__m256i)(Y), (int)(C), (__v4di)(__m256i)(W), (__mmask8)(U)))
13345 #define _mm256_maskz_alignr_epi64(U, X, Y, C) \
13346 ((__m256i)__builtin_ia32_alignq256_mask ((__v4di)(__m256i)(X), \
13347 (__v4di)(__m256i)(Y), (int)(C), (__v4di)(__m256i)_mm256_setzero_si256 (),\
13348 (__mmask8)(U)))
13350 #define _mm_alignr_epi32(X, Y, C) \
13351 ((__m128i)__builtin_ia32_alignd128_mask ((__v4si)(__m128i)(X), \
13352 (__v4si)(__m128i)(Y), (int)(C), (__v4si)(__m128i)(X), (__mmask8)-1))
13354 #define _mm_mask_alignr_epi32(W, U, X, Y, C) \
13355 ((__m128i)__builtin_ia32_alignd128_mask ((__v4si)(__m128i)(X), \
13356 (__v4si)(__m128i)(Y), (int)(C), (__v4si)(__m128i)(W), (__mmask8)(U)))
13358 #define _mm_maskz_alignr_epi32(U, X, Y, C) \
13359 ((__m128i)__builtin_ia32_alignd128_mask ((__v4si)(__m128i)(X), \
13360 (__v4si)(__m128i)(Y), (int)(C), (__v4si)(__m128i)_mm_setzero_si128 (),\
13361 (__mmask8)(U)))
13363 #define _mm_alignr_epi64(X, Y, C) \
13364 ((__m128i)__builtin_ia32_alignq128_mask ((__v2di)(__m128i)(X), \
13365 (__v2di)(__m128i)(Y), (int)(C), (__v2di)(__m128i)(X), (__mmask8)-1))
13367 #define _mm_mask_alignr_epi64(W, U, X, Y, C) \
13368 ((__m128i)__builtin_ia32_alignq128_mask ((__v2di)(__m128i)(X), \
13369 (__v2di)(__m128i)(Y), (int)(C), (__v2di)(__m128i)(X), (__mmask8)-1))
13371 #define _mm_maskz_alignr_epi64(U, X, Y, C) \
13372 ((__m128i)__builtin_ia32_alignq128_mask ((__v2di)(__m128i)(X), \
13373 (__v2di)(__m128i)(Y), (int)(C), (__v2di)(__m128i)_mm_setzero_si128 (),\
13374 (__mmask8)(U)))
13376 #define _mm_mask_cvtps_ph(W, U, A, I) \
13377 ((__m128i) __builtin_ia32_vcvtps2ph_mask ((__v4sf)(__m128) A, (int) (I), \
13378 (__v8hi)(__m128i) (W), (__mmask8) (U)))
13380 #define _mm_maskz_cvtps_ph(U, A, I) \
13381 ((__m128i) __builtin_ia32_vcvtps2ph_mask ((__v4sf)(__m128) A, (int) (I), \
13382 (__v8hi)(__m128i) _mm_setzero_si128 (), (__mmask8) (U)))
13384 #define _mm256_mask_cvtps_ph(W, U, A, I) \
13385 ((__m128i) __builtin_ia32_vcvtps2ph256_mask ((__v8sf)(__m256) A, (int) (I), \
13386 (__v8hi)(__m128i) (W), (__mmask8) (U)))
13388 #define _mm256_maskz_cvtps_ph(U, A, I) \
13389 ((__m128i) __builtin_ia32_vcvtps2ph256_mask ((__v8sf)(__m256) A, (int) (I), \
13390 (__v8hi)(__m128i) _mm_setzero_si128 (), (__mmask8) (U)))
13392 #define _mm256_mask_srai_epi32(W, U, A, B) \
13393 ((__m256i) __builtin_ia32_psradi256_mask ((__v8si)(__m256i)(A), \
13394 (int)(B), (__v8si)(__m256i)(W), (__mmask8)(U)))
13396 #define _mm256_maskz_srai_epi32(U, A, B) \
13397 ((__m256i) __builtin_ia32_psradi256_mask ((__v8si)(__m256i)(A), \
13398 (int)(B), (__v8si)_mm256_setzero_si256 (), (__mmask8)(U)))
13400 #define _mm_mask_srai_epi32(W, U, A, B) \
13401 ((__m128i) __builtin_ia32_psradi128_mask ((__v4si)(__m128i)(A), \
13402 (int)(B), (__v4si)(__m128i)(W), (__mmask8)(U)))
13404 #define _mm_maskz_srai_epi32(U, A, B) \
13405 ((__m128i) __builtin_ia32_psradi128_mask ((__v4si)(__m128i)(A), \
13406 (int)(B), (__v4si)_mm_setzero_si128 (), (__mmask8)(U)))
13408 #define _mm256_srai_epi64(A, B) \
13409 ((__m256i) __builtin_ia32_psraqi256_mask ((__v4di)(__m256i)(A), \
13410 (int)(B), (__v4di)_mm256_setzero_si256 (), (__mmask8)-1))
13412 #define _mm256_mask_srai_epi64(W, U, A, B) \
13413 ((__m256i) __builtin_ia32_psraqi256_mask ((__v4di)(__m256i)(A), \
13414 (int)(B), (__v4di)(__m256i)(W), (__mmask8)(U)))
13416 #define _mm256_maskz_srai_epi64(U, A, B) \
13417 ((__m256i) __builtin_ia32_psraqi256_mask ((__v4di)(__m256i)(A), \
13418 (int)(B), (__v4di)_mm256_setzero_si256 (), (__mmask8)(U)))
13420 #define _mm_srai_epi64(A, B) \
13421 ((__m128i) __builtin_ia32_psraqi128_mask ((__v2di)(__m128i)(A), \
13422 (int)(B), (__v2di)_mm_setzero_si128 (), (__mmask8)-1))
13424 #define _mm_mask_srai_epi64(W, U, A, B) \
13425 ((__m128i) __builtin_ia32_psraqi128_mask ((__v2di)(__m128i)(A), \
13426 (int)(B), (__v2di)(__m128i)(W), (__mmask8)(U)))
13428 #define _mm_maskz_srai_epi64(U, A, B) \
13429 ((__m128i) __builtin_ia32_psraqi128_mask ((__v2di)(__m128i)(A), \
13430 (int)(B), (__v2di)_mm_setzero_si128 (), (__mmask8)(U)))
13432 #define _mm256_mask_permutex_pd(W, U, A, B) \
13433 ((__m256d) __builtin_ia32_permdf256_mask ((__v4df)(__m256d)(A), \
13434 (int)(B), (__v4df)(__m256d)(W), (__mmask8)(U)))
13436 #define _mm256_maskz_permutex_pd(U, A, B) \
13437 ((__m256d) __builtin_ia32_permdf256_mask ((__v4df)(__m256d)(A), \
13438 (int)(B), (__v4df)(__m256d)_mm256_setzero_pd (), (__mmask8)(U)))
13440 #define _mm256_mask_permute_pd(W, U, X, C) \
13441 ((__m256d) __builtin_ia32_vpermilpd256_mask ((__v4df)(__m256d)(X), (int)(C), \
13442 (__v4df)(__m256d)(W), \
13443 (__mmask8)(U)))
13445 #define _mm256_maskz_permute_pd(U, X, C) \
13446 ((__m256d) __builtin_ia32_vpermilpd256_mask ((__v4df)(__m256d)(X), (int)(C), \
13447 (__v4df)(__m256d)_mm256_setzero_pd (),\
13448 (__mmask8)(U)))
13450 #define _mm256_mask_permute_ps(W, U, X, C) \
13451 ((__m256) __builtin_ia32_vpermilps256_mask ((__v8sf)(__m256)(X), (int)(C), \
13452 (__v8sf)(__m256)(W), (__mmask8)(U)))
13454 #define _mm256_maskz_permute_ps(U, X, C) \
13455 ((__m256) __builtin_ia32_vpermilps256_mask ((__v8sf)(__m256)(X), (int)(C), \
13456 (__v8sf)(__m256)_mm256_setzero_ps (), \
13457 (__mmask8)(U)))
13459 #define _mm_mask_permute_pd(W, U, X, C) \
13460 ((__m128d) __builtin_ia32_vpermilpd_mask ((__v2df)(__m128d)(X), (int)(C), \
13461 (__v2df)(__m128d)(W), (__mmask8)(U)))
13463 #define _mm_maskz_permute_pd(U, X, C) \
13464 ((__m128d) __builtin_ia32_vpermilpd_mask ((__v2df)(__m128d)(X), (int)(C), \
13465 (__v2df)(__m128d)_mm_setzero_pd (), \
13466 (__mmask8)(U)))
13468 #define _mm_mask_permute_ps(W, U, X, C) \
13469 ((__m128) __builtin_ia32_vpermilps_mask ((__v4sf)(__m128)(X), (int)(C), \
13470 (__v4sf)(__m128)(W), (__mmask8)(U)))
13472 #define _mm_maskz_permute_ps(U, X, C) \
13473 ((__m128) __builtin_ia32_vpermilps_mask ((__v4sf)(__m128)(X), (int)(C), \
13474 (__v4sf)(__m128)_mm_setzero_ps (), \
13475 (__mmask8)(U)))
13477 #define _mm256_mask_blend_pd(__U, __A, __W) \
13478 ((__m256d) __builtin_ia32_blendmpd_256_mask ((__v4df) (__A), \
13479 (__v4df) (__W), \
13480 (__mmask8) (__U)))
13482 #define _mm256_mask_blend_ps(__U, __A, __W) \
13483 ((__m256) __builtin_ia32_blendmps_256_mask ((__v8sf) (__A), \
13484 (__v8sf) (__W), \
13485 (__mmask8) (__U)))
13487 #define _mm256_mask_blend_epi64(__U, __A, __W) \
13488 ((__m256i) __builtin_ia32_blendmq_256_mask ((__v4di) (__A), \
13489 (__v4di) (__W), \
13490 (__mmask8) (__U)))
13492 #define _mm256_mask_blend_epi32(__U, __A, __W) \
13493 ((__m256i) __builtin_ia32_blendmd_256_mask ((__v8si) (__A), \
13494 (__v8si) (__W), \
13495 (__mmask8) (__U)))
13497 #define _mm_mask_blend_pd(__U, __A, __W) \
13498 ((__m128d) __builtin_ia32_blendmpd_128_mask ((__v2df) (__A), \
13499 (__v2df) (__W), \
13500 (__mmask8) (__U)))
13502 #define _mm_mask_blend_ps(__U, __A, __W) \
13503 ((__m128) __builtin_ia32_blendmps_128_mask ((__v4sf) (__A), \
13504 (__v4sf) (__W), \
13505 (__mmask8) (__U)))
13507 #define _mm_mask_blend_epi64(__U, __A, __W) \
13508 ((__m128i) __builtin_ia32_blendmq_128_mask ((__v2di) (__A), \
13509 (__v2di) (__W), \
13510 (__mmask8) (__U)))
13512 #define _mm_mask_blend_epi32(__U, __A, __W) \
13513 ((__m128i) __builtin_ia32_blendmd_128_mask ((__v4si) (__A), \
13514 (__v4si) (__W), \
13515 (__mmask8) (__U)))
13517 #define _mm256_cmp_epu32_mask(X, Y, P) \
13518 ((__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si)(__m256i)(X), \
13519 (__v8si)(__m256i)(Y), (int)(P),\
13520 (__mmask8)-1))
13522 #define _mm256_cmp_epi64_mask(X, Y, P) \
13523 ((__mmask8) __builtin_ia32_cmpq256_mask ((__v4di)(__m256i)(X), \
13524 (__v4di)(__m256i)(Y), (int)(P),\
13525 (__mmask8)-1))
13527 #define _mm256_cmp_epi32_mask(X, Y, P) \
13528 ((__mmask8) __builtin_ia32_cmpd256_mask ((__v8si)(__m256i)(X), \
13529 (__v8si)(__m256i)(Y), (int)(P),\
13530 (__mmask8)-1))
13532 #define _mm256_cmp_epu64_mask(X, Y, P) \
13533 ((__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di)(__m256i)(X), \
13534 (__v4di)(__m256i)(Y), (int)(P),\
13535 (__mmask8)-1))
13537 #define _mm256_cmp_pd_mask(X, Y, P) \
13538 ((__mmask8) __builtin_ia32_cmppd256_mask ((__v4df)(__m256d)(X), \
13539 (__v4df)(__m256d)(Y), (int)(P),\
13540 (__mmask8)-1))
13542 #define _mm256_cmp_ps_mask(X, Y, P) \
13543 ((__mmask8) __builtin_ia32_cmpps256_mask ((__v8sf)(__m256)(X), \
13544 (__v8sf)(__m256)(Y), (int)(P),\
13545 (__mmask8)-1))
13547 #define _mm256_mask_cmp_epi64_mask(M, X, Y, P) \
13548 ((__mmask8) __builtin_ia32_cmpq256_mask ((__v4di)(__m256i)(X), \
13549 (__v4di)(__m256i)(Y), (int)(P),\
13550 (__mmask8)(M)))
13552 #define _mm256_mask_cmp_epi32_mask(M, X, Y, P) \
13553 ((__mmask8) __builtin_ia32_cmpd256_mask ((__v8si)(__m256i)(X), \
13554 (__v8si)(__m256i)(Y), (int)(P),\
13555 (__mmask8)(M)))
13557 #define _mm256_mask_cmp_epu64_mask(M, X, Y, P) \
13558 ((__mmask8) __builtin_ia32_ucmpq256_mask ((__v4di)(__m256i)(X), \
13559 (__v4di)(__m256i)(Y), (int)(P),\
13560 (__mmask8)(M)))
13562 #define _mm256_mask_cmp_epu32_mask(M, X, Y, P) \
13563 ((__mmask8) __builtin_ia32_ucmpd256_mask ((__v8si)(__m256i)(X), \
13564 (__v8si)(__m256i)(Y), (int)(P),\
13565 (__mmask8)(M)))
13567 #define _mm256_mask_cmp_pd_mask(M, X, Y, P) \
13568 ((__mmask8) __builtin_ia32_cmppd256_mask ((__v4df)(__m256d)(X), \
13569 (__v4df)(__m256d)(Y), (int)(P),\
13570 (__mmask8)(M)))
13572 #define _mm256_mask_cmp_ps_mask(M, X, Y, P) \
13573 ((__mmask8) __builtin_ia32_cmpps256_mask ((__v8sf)(__m256)(X), \
13574 (__v8sf)(__m256)(Y), (int)(P),\
13575 (__mmask8)(M)))
13577 #define _mm_cmp_epi64_mask(X, Y, P) \
13578 ((__mmask8) __builtin_ia32_cmpq128_mask ((__v2di)(__m128i)(X), \
13579 (__v2di)(__m128i)(Y), (int)(P),\
13580 (__mmask8)-1))
13582 #define _mm_cmp_epi32_mask(X, Y, P) \
13583 ((__mmask8) __builtin_ia32_cmpd128_mask ((__v4si)(__m128i)(X), \
13584 (__v4si)(__m128i)(Y), (int)(P),\
13585 (__mmask8)-1))
13587 #define _mm_cmp_epu64_mask(X, Y, P) \
13588 ((__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di)(__m128i)(X), \
13589 (__v2di)(__m128i)(Y), (int)(P),\
13590 (__mmask8)-1))
13592 #define _mm_cmp_epu32_mask(X, Y, P) \
13593 ((__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si)(__m128i)(X), \
13594 (__v4si)(__m128i)(Y), (int)(P),\
13595 (__mmask8)-1))
13597 #define _mm_cmp_pd_mask(X, Y, P) \
13598 ((__mmask8) __builtin_ia32_cmppd128_mask ((__v2df)(__m128d)(X), \
13599 (__v2df)(__m128d)(Y), (int)(P),\
13600 (__mmask8)-1))
13602 #define _mm_cmp_ps_mask(X, Y, P) \
13603 ((__mmask8) __builtin_ia32_cmpps128_mask ((__v4sf)(__m128)(X), \
13604 (__v4sf)(__m128)(Y), (int)(P),\
13605 (__mmask8)-1))
13607 #define _mm_mask_cmp_epi64_mask(M, X, Y, P) \
13608 ((__mmask8) __builtin_ia32_cmpq128_mask ((__v2di)(__m128i)(X), \
13609 (__v2di)(__m128i)(Y), (int)(P),\
13610 (__mmask8)(M)))
13612 #define _mm_mask_cmp_epi32_mask(M, X, Y, P) \
13613 ((__mmask8) __builtin_ia32_cmpd128_mask ((__v4si)(__m128i)(X), \
13614 (__v4si)(__m128i)(Y), (int)(P),\
13615 (__mmask8)(M)))
13617 #define _mm_mask_cmp_epu64_mask(M, X, Y, P) \
13618 ((__mmask8) __builtin_ia32_ucmpq128_mask ((__v2di)(__m128i)(X), \
13619 (__v2di)(__m128i)(Y), (int)(P),\
13620 (__mmask8)(M)))
13622 #define _mm_mask_cmp_epu32_mask(M, X, Y, P) \
13623 ((__mmask8) __builtin_ia32_ucmpd128_mask ((__v4si)(__m128i)(X), \
13624 (__v4si)(__m128i)(Y), (int)(P),\
13625 (__mmask8)(M)))
13627 #define _mm_mask_cmp_pd_mask(M, X, Y, P) \
13628 ((__mmask8) __builtin_ia32_cmppd128_mask ((__v2df)(__m128d)(X), \
13629 (__v2df)(__m128d)(Y), (int)(P),\
13630 (__mmask8)(M)))
13632 #define _mm_mask_cmp_ps_mask(M, X, Y, P) \
13633 ((__mmask8) __builtin_ia32_cmpps128_mask ((__v4sf)(__m128)(X), \
13634 (__v4sf)(__m128)(Y), (int)(P),\
13635 (__mmask8)(M)))
13637 #endif
13639 #define _mm256_permutexvar_ps(A, B) _mm256_permutevar8x32_ps ((B), (A))
13641 #ifdef __DISABLE_AVX512VL__
13642 #undef __DISABLE_AVX512VL__
13643 #pragma GCC pop_options
13644 #endif /* __DISABLE_AVX512VL__ */
13646 #endif /* _AVX512VLINTRIN_H_INCLUDED */