xmmintrin.h (_mm_add_ps, [...]): Use vector extensions instead of builtins.
[official-gcc.git] / gcc / config / i386 / emmintrin.h
blobfe6e3f532445eaf162aaaaba48ced32f03c69d6d
1 /* Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 This file is part of GCC.
5 GCC is free software; you can redistribute it and/or modify
6 it under the terms of the GNU General Public License as published by
7 the Free Software Foundation; either version 3, or (at your option)
8 any later version.
10 GCC is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 Under Section 7 of GPL version 3, you are granted additional
16 permissions described in the GCC Runtime Library Exception, version
17 3.1, as published by the Free Software Foundation.
19 You should have received a copy of the GNU General Public License and
20 a copy of the GCC Runtime Library Exception along with this program;
21 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
22 <http://www.gnu.org/licenses/>. */
24 /* Implemented from the specification included in the Intel C++ Compiler
25 User Guide and Reference, version 9.0. */
27 #ifndef _EMMINTRIN_H_INCLUDED
28 #define _EMMINTRIN_H_INCLUDED
30 /* We need definitions from the SSE header files*/
31 #include <xmmintrin.h>
33 #ifndef __SSE2__
34 #pragma GCC push_options
35 #pragma GCC target("sse2")
36 #define __DISABLE_SSE2__
37 #endif /* __SSE2__ */
39 /* SSE2 */
40 typedef double __v2df __attribute__ ((__vector_size__ (16)));
41 typedef long long __v2di __attribute__ ((__vector_size__ (16)));
42 typedef unsigned long long __v2du __attribute__ ((__vector_size__ (16)));
43 typedef int __v4si __attribute__ ((__vector_size__ (16)));
44 typedef unsigned int __v4su __attribute__ ((__vector_size__ (16)));
45 typedef short __v8hi __attribute__ ((__vector_size__ (16)));
46 typedef unsigned short __v8hu __attribute__ ((__vector_size__ (16)));
47 typedef char __v16qi __attribute__ ((__vector_size__ (16)));
48 typedef unsigned char __v16qu __attribute__ ((__vector_size__ (16)));
50 /* The Intel API is flexible enough that we must allow aliasing with other
51 vector types, and their scalar components. */
52 typedef long long __m128i __attribute__ ((__vector_size__ (16), __may_alias__));
53 typedef double __m128d __attribute__ ((__vector_size__ (16), __may_alias__));
55 /* Create a selector for use with the SHUFPD instruction. */
56 #define _MM_SHUFFLE2(fp1,fp0) \
57 (((fp1) << 1) | (fp0))
59 /* Create a vector with element 0 as F and the rest zero. */
60 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
61 _mm_set_sd (double __F)
63 return __extension__ (__m128d){ __F, 0.0 };
66 /* Create a vector with both elements equal to F. */
67 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
68 _mm_set1_pd (double __F)
70 return __extension__ (__m128d){ __F, __F };
73 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
74 _mm_set_pd1 (double __F)
76 return _mm_set1_pd (__F);
79 /* Create a vector with the lower value X and upper value W. */
80 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
81 _mm_set_pd (double __W, double __X)
83 return __extension__ (__m128d){ __X, __W };
86 /* Create a vector with the lower value W and upper value X. */
87 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
88 _mm_setr_pd (double __W, double __X)
90 return __extension__ (__m128d){ __W, __X };
93 /* Create an undefined vector. */
94 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
95 _mm_undefined_pd (void)
97 __m128d __Y = __Y;
98 return __Y;
101 /* Create a vector of zeros. */
102 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
103 _mm_setzero_pd (void)
105 return __extension__ (__m128d){ 0.0, 0.0 };
108 /* Sets the low DPFP value of A from the low value of B. */
109 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
110 _mm_move_sd (__m128d __A, __m128d __B)
112 return (__m128d) __builtin_ia32_movsd ((__v2df)__A, (__v2df)__B);
115 /* Load two DPFP values from P. The address must be 16-byte aligned. */
116 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
117 _mm_load_pd (double const *__P)
119 return *(__m128d *)__P;
122 /* Load two DPFP values from P. The address need not be 16-byte aligned. */
123 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
124 _mm_loadu_pd (double const *__P)
126 return __builtin_ia32_loadupd (__P);
129 /* Create a vector with all two elements equal to *P. */
130 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
131 _mm_load1_pd (double const *__P)
133 return _mm_set1_pd (*__P);
136 /* Create a vector with element 0 as *P and the rest zero. */
137 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
138 _mm_load_sd (double const *__P)
140 return _mm_set_sd (*__P);
143 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
144 _mm_load_pd1 (double const *__P)
146 return _mm_load1_pd (__P);
149 /* Load two DPFP values in reverse order. The address must be aligned. */
150 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
151 _mm_loadr_pd (double const *__P)
153 __m128d __tmp = _mm_load_pd (__P);
154 return __builtin_ia32_shufpd (__tmp, __tmp, _MM_SHUFFLE2 (0,1));
157 /* Store two DPFP values. The address must be 16-byte aligned. */
158 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
159 _mm_store_pd (double *__P, __m128d __A)
161 *(__m128d *)__P = __A;
164 /* Store two DPFP values. The address need not be 16-byte aligned. */
165 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
166 _mm_storeu_pd (double *__P, __m128d __A)
168 __builtin_ia32_storeupd (__P, __A);
171 /* Stores the lower DPFP value. */
172 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
173 _mm_store_sd (double *__P, __m128d __A)
175 *__P = ((__v2df)__A)[0];
178 extern __inline double __attribute__((__gnu_inline__, __always_inline__, __artificial__))
179 _mm_cvtsd_f64 (__m128d __A)
181 return ((__v2df)__A)[0];
184 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
185 _mm_storel_pd (double *__P, __m128d __A)
187 _mm_store_sd (__P, __A);
190 /* Stores the upper DPFP value. */
191 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
192 _mm_storeh_pd (double *__P, __m128d __A)
194 *__P = ((__v2df)__A)[1];
197 /* Store the lower DPFP value across two words.
198 The address must be 16-byte aligned. */
199 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
200 _mm_store1_pd (double *__P, __m128d __A)
202 _mm_store_pd (__P, __builtin_ia32_shufpd (__A, __A, _MM_SHUFFLE2 (0,0)));
205 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
206 _mm_store_pd1 (double *__P, __m128d __A)
208 _mm_store1_pd (__P, __A);
211 /* Store two DPFP values in reverse order. The address must be aligned. */
212 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
213 _mm_storer_pd (double *__P, __m128d __A)
215 _mm_store_pd (__P, __builtin_ia32_shufpd (__A, __A, _MM_SHUFFLE2 (0,1)));
218 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
219 _mm_cvtsi128_si32 (__m128i __A)
221 return __builtin_ia32_vec_ext_v4si ((__v4si)__A, 0);
224 #ifdef __x86_64__
225 /* Intel intrinsic. */
226 extern __inline long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
227 _mm_cvtsi128_si64 (__m128i __A)
229 return ((__v2di)__A)[0];
232 /* Microsoft intrinsic. */
233 extern __inline long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
234 _mm_cvtsi128_si64x (__m128i __A)
236 return ((__v2di)__A)[0];
238 #endif
240 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
241 _mm_add_pd (__m128d __A, __m128d __B)
243 return (__m128d) ((__v2df)__A + (__v2df)__B);
246 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
247 _mm_add_sd (__m128d __A, __m128d __B)
249 return (__m128d)__builtin_ia32_addsd ((__v2df)__A, (__v2df)__B);
252 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
253 _mm_sub_pd (__m128d __A, __m128d __B)
255 return (__m128d) ((__v2df)__A - (__v2df)__B);
258 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
259 _mm_sub_sd (__m128d __A, __m128d __B)
261 return (__m128d)__builtin_ia32_subsd ((__v2df)__A, (__v2df)__B);
264 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
265 _mm_mul_pd (__m128d __A, __m128d __B)
267 return (__m128d) ((__v2df)__A * (__v2df)__B);
270 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
271 _mm_mul_sd (__m128d __A, __m128d __B)
273 return (__m128d)__builtin_ia32_mulsd ((__v2df)__A, (__v2df)__B);
276 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
277 _mm_div_pd (__m128d __A, __m128d __B)
279 return (__m128d) ((__v2df)__A / (__v2df)__B);
282 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
283 _mm_div_sd (__m128d __A, __m128d __B)
285 return (__m128d)__builtin_ia32_divsd ((__v2df)__A, (__v2df)__B);
288 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
289 _mm_sqrt_pd (__m128d __A)
291 return (__m128d)__builtin_ia32_sqrtpd ((__v2df)__A);
294 /* Return pair {sqrt (B[0]), A[1]}. */
295 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
296 _mm_sqrt_sd (__m128d __A, __m128d __B)
298 __v2df __tmp = __builtin_ia32_movsd ((__v2df)__A, (__v2df)__B);
299 return (__m128d)__builtin_ia32_sqrtsd ((__v2df)__tmp);
302 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
303 _mm_min_pd (__m128d __A, __m128d __B)
305 return (__m128d)__builtin_ia32_minpd ((__v2df)__A, (__v2df)__B);
308 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
309 _mm_min_sd (__m128d __A, __m128d __B)
311 return (__m128d)__builtin_ia32_minsd ((__v2df)__A, (__v2df)__B);
314 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
315 _mm_max_pd (__m128d __A, __m128d __B)
317 return (__m128d)__builtin_ia32_maxpd ((__v2df)__A, (__v2df)__B);
320 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
321 _mm_max_sd (__m128d __A, __m128d __B)
323 return (__m128d)__builtin_ia32_maxsd ((__v2df)__A, (__v2df)__B);
326 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
327 _mm_and_pd (__m128d __A, __m128d __B)
329 return (__m128d)__builtin_ia32_andpd ((__v2df)__A, (__v2df)__B);
332 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
333 _mm_andnot_pd (__m128d __A, __m128d __B)
335 return (__m128d)__builtin_ia32_andnpd ((__v2df)__A, (__v2df)__B);
338 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
339 _mm_or_pd (__m128d __A, __m128d __B)
341 return (__m128d)__builtin_ia32_orpd ((__v2df)__A, (__v2df)__B);
344 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
345 _mm_xor_pd (__m128d __A, __m128d __B)
347 return (__m128d)__builtin_ia32_xorpd ((__v2df)__A, (__v2df)__B);
350 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
351 _mm_cmpeq_pd (__m128d __A, __m128d __B)
353 return (__m128d)__builtin_ia32_cmpeqpd ((__v2df)__A, (__v2df)__B);
356 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
357 _mm_cmplt_pd (__m128d __A, __m128d __B)
359 return (__m128d)__builtin_ia32_cmpltpd ((__v2df)__A, (__v2df)__B);
362 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
363 _mm_cmple_pd (__m128d __A, __m128d __B)
365 return (__m128d)__builtin_ia32_cmplepd ((__v2df)__A, (__v2df)__B);
368 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
369 _mm_cmpgt_pd (__m128d __A, __m128d __B)
371 return (__m128d)__builtin_ia32_cmpgtpd ((__v2df)__A, (__v2df)__B);
374 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
375 _mm_cmpge_pd (__m128d __A, __m128d __B)
377 return (__m128d)__builtin_ia32_cmpgepd ((__v2df)__A, (__v2df)__B);
380 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
381 _mm_cmpneq_pd (__m128d __A, __m128d __B)
383 return (__m128d)__builtin_ia32_cmpneqpd ((__v2df)__A, (__v2df)__B);
386 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
387 _mm_cmpnlt_pd (__m128d __A, __m128d __B)
389 return (__m128d)__builtin_ia32_cmpnltpd ((__v2df)__A, (__v2df)__B);
392 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
393 _mm_cmpnle_pd (__m128d __A, __m128d __B)
395 return (__m128d)__builtin_ia32_cmpnlepd ((__v2df)__A, (__v2df)__B);
398 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
399 _mm_cmpngt_pd (__m128d __A, __m128d __B)
401 return (__m128d)__builtin_ia32_cmpngtpd ((__v2df)__A, (__v2df)__B);
404 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
405 _mm_cmpnge_pd (__m128d __A, __m128d __B)
407 return (__m128d)__builtin_ia32_cmpngepd ((__v2df)__A, (__v2df)__B);
410 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
411 _mm_cmpord_pd (__m128d __A, __m128d __B)
413 return (__m128d)__builtin_ia32_cmpordpd ((__v2df)__A, (__v2df)__B);
416 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
417 _mm_cmpunord_pd (__m128d __A, __m128d __B)
419 return (__m128d)__builtin_ia32_cmpunordpd ((__v2df)__A, (__v2df)__B);
422 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
423 _mm_cmpeq_sd (__m128d __A, __m128d __B)
425 return (__m128d)__builtin_ia32_cmpeqsd ((__v2df)__A, (__v2df)__B);
428 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
429 _mm_cmplt_sd (__m128d __A, __m128d __B)
431 return (__m128d)__builtin_ia32_cmpltsd ((__v2df)__A, (__v2df)__B);
434 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
435 _mm_cmple_sd (__m128d __A, __m128d __B)
437 return (__m128d)__builtin_ia32_cmplesd ((__v2df)__A, (__v2df)__B);
440 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
441 _mm_cmpgt_sd (__m128d __A, __m128d __B)
443 return (__m128d) __builtin_ia32_movsd ((__v2df) __A,
444 (__v2df)
445 __builtin_ia32_cmpltsd ((__v2df) __B,
446 (__v2df)
447 __A));
450 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
451 _mm_cmpge_sd (__m128d __A, __m128d __B)
453 return (__m128d) __builtin_ia32_movsd ((__v2df) __A,
454 (__v2df)
455 __builtin_ia32_cmplesd ((__v2df) __B,
456 (__v2df)
457 __A));
460 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
461 _mm_cmpneq_sd (__m128d __A, __m128d __B)
463 return (__m128d)__builtin_ia32_cmpneqsd ((__v2df)__A, (__v2df)__B);
466 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
467 _mm_cmpnlt_sd (__m128d __A, __m128d __B)
469 return (__m128d)__builtin_ia32_cmpnltsd ((__v2df)__A, (__v2df)__B);
472 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
473 _mm_cmpnle_sd (__m128d __A, __m128d __B)
475 return (__m128d)__builtin_ia32_cmpnlesd ((__v2df)__A, (__v2df)__B);
478 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
479 _mm_cmpngt_sd (__m128d __A, __m128d __B)
481 return (__m128d) __builtin_ia32_movsd ((__v2df) __A,
482 (__v2df)
483 __builtin_ia32_cmpnltsd ((__v2df) __B,
484 (__v2df)
485 __A));
488 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
489 _mm_cmpnge_sd (__m128d __A, __m128d __B)
491 return (__m128d) __builtin_ia32_movsd ((__v2df) __A,
492 (__v2df)
493 __builtin_ia32_cmpnlesd ((__v2df) __B,
494 (__v2df)
495 __A));
498 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
499 _mm_cmpord_sd (__m128d __A, __m128d __B)
501 return (__m128d)__builtin_ia32_cmpordsd ((__v2df)__A, (__v2df)__B);
504 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
505 _mm_cmpunord_sd (__m128d __A, __m128d __B)
507 return (__m128d)__builtin_ia32_cmpunordsd ((__v2df)__A, (__v2df)__B);
510 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
511 _mm_comieq_sd (__m128d __A, __m128d __B)
513 return __builtin_ia32_comisdeq ((__v2df)__A, (__v2df)__B);
516 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
517 _mm_comilt_sd (__m128d __A, __m128d __B)
519 return __builtin_ia32_comisdlt ((__v2df)__A, (__v2df)__B);
522 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
523 _mm_comile_sd (__m128d __A, __m128d __B)
525 return __builtin_ia32_comisdle ((__v2df)__A, (__v2df)__B);
528 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
529 _mm_comigt_sd (__m128d __A, __m128d __B)
531 return __builtin_ia32_comisdgt ((__v2df)__A, (__v2df)__B);
534 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
535 _mm_comige_sd (__m128d __A, __m128d __B)
537 return __builtin_ia32_comisdge ((__v2df)__A, (__v2df)__B);
540 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
541 _mm_comineq_sd (__m128d __A, __m128d __B)
543 return __builtin_ia32_comisdneq ((__v2df)__A, (__v2df)__B);
546 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
547 _mm_ucomieq_sd (__m128d __A, __m128d __B)
549 return __builtin_ia32_ucomisdeq ((__v2df)__A, (__v2df)__B);
552 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
553 _mm_ucomilt_sd (__m128d __A, __m128d __B)
555 return __builtin_ia32_ucomisdlt ((__v2df)__A, (__v2df)__B);
558 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
559 _mm_ucomile_sd (__m128d __A, __m128d __B)
561 return __builtin_ia32_ucomisdle ((__v2df)__A, (__v2df)__B);
564 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
565 _mm_ucomigt_sd (__m128d __A, __m128d __B)
567 return __builtin_ia32_ucomisdgt ((__v2df)__A, (__v2df)__B);
570 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
571 _mm_ucomige_sd (__m128d __A, __m128d __B)
573 return __builtin_ia32_ucomisdge ((__v2df)__A, (__v2df)__B);
576 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
577 _mm_ucomineq_sd (__m128d __A, __m128d __B)
579 return __builtin_ia32_ucomisdneq ((__v2df)__A, (__v2df)__B);
582 /* Create a vector of Qi, where i is the element number. */
584 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
585 _mm_set_epi64x (long long __q1, long long __q0)
587 return __extension__ (__m128i)(__v2di){ __q0, __q1 };
590 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
591 _mm_set_epi64 (__m64 __q1, __m64 __q0)
593 return _mm_set_epi64x ((long long)__q1, (long long)__q0);
596 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
597 _mm_set_epi32 (int __q3, int __q2, int __q1, int __q0)
599 return __extension__ (__m128i)(__v4si){ __q0, __q1, __q2, __q3 };
602 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
603 _mm_set_epi16 (short __q7, short __q6, short __q5, short __q4,
604 short __q3, short __q2, short __q1, short __q0)
606 return __extension__ (__m128i)(__v8hi){
607 __q0, __q1, __q2, __q3, __q4, __q5, __q6, __q7 };
610 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
611 _mm_set_epi8 (char __q15, char __q14, char __q13, char __q12,
612 char __q11, char __q10, char __q09, char __q08,
613 char __q07, char __q06, char __q05, char __q04,
614 char __q03, char __q02, char __q01, char __q00)
616 return __extension__ (__m128i)(__v16qi){
617 __q00, __q01, __q02, __q03, __q04, __q05, __q06, __q07,
618 __q08, __q09, __q10, __q11, __q12, __q13, __q14, __q15
622 /* Set all of the elements of the vector to A. */
624 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
625 _mm_set1_epi64x (long long __A)
627 return _mm_set_epi64x (__A, __A);
630 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
631 _mm_set1_epi64 (__m64 __A)
633 return _mm_set_epi64 (__A, __A);
636 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
637 _mm_set1_epi32 (int __A)
639 return _mm_set_epi32 (__A, __A, __A, __A);
642 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
643 _mm_set1_epi16 (short __A)
645 return _mm_set_epi16 (__A, __A, __A, __A, __A, __A, __A, __A);
648 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
649 _mm_set1_epi8 (char __A)
651 return _mm_set_epi8 (__A, __A, __A, __A, __A, __A, __A, __A,
652 __A, __A, __A, __A, __A, __A, __A, __A);
655 /* Create a vector of Qi, where i is the element number.
656 The parameter order is reversed from the _mm_set_epi* functions. */
658 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
659 _mm_setr_epi64 (__m64 __q0, __m64 __q1)
661 return _mm_set_epi64 (__q1, __q0);
664 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
665 _mm_setr_epi32 (int __q0, int __q1, int __q2, int __q3)
667 return _mm_set_epi32 (__q3, __q2, __q1, __q0);
670 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
671 _mm_setr_epi16 (short __q0, short __q1, short __q2, short __q3,
672 short __q4, short __q5, short __q6, short __q7)
674 return _mm_set_epi16 (__q7, __q6, __q5, __q4, __q3, __q2, __q1, __q0);
677 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
678 _mm_setr_epi8 (char __q00, char __q01, char __q02, char __q03,
679 char __q04, char __q05, char __q06, char __q07,
680 char __q08, char __q09, char __q10, char __q11,
681 char __q12, char __q13, char __q14, char __q15)
683 return _mm_set_epi8 (__q15, __q14, __q13, __q12, __q11, __q10, __q09, __q08,
684 __q07, __q06, __q05, __q04, __q03, __q02, __q01, __q00);
687 /* Create a vector with element 0 as *P and the rest zero. */
689 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
690 _mm_load_si128 (__m128i const *__P)
692 return *__P;
695 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
696 _mm_loadu_si128 (__m128i const *__P)
698 return (__m128i) __builtin_ia32_loaddqu ((char const *)__P);
701 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
702 _mm_loadl_epi64 (__m128i const *__P)
704 return _mm_set_epi64 ((__m64)0LL, *(__m64 *)__P);
707 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
708 _mm_store_si128 (__m128i *__P, __m128i __B)
710 *__P = __B;
713 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
714 _mm_storeu_si128 (__m128i *__P, __m128i __B)
716 __builtin_ia32_storedqu ((char *)__P, (__v16qi)__B);
719 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
720 _mm_storel_epi64 (__m128i *__P, __m128i __B)
722 *(long long *)__P = ((__v2di)__B)[0];
725 extern __inline __m64 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
726 _mm_movepi64_pi64 (__m128i __B)
728 return (__m64) ((__v2di)__B)[0];
731 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
732 _mm_movpi64_epi64 (__m64 __A)
734 return _mm_set_epi64 ((__m64)0LL, __A);
737 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
738 _mm_move_epi64 (__m128i __A)
740 return (__m128i)__builtin_ia32_movq128 ((__v2di) __A);
743 /* Create an undefined vector. */
744 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
745 _mm_undefined_si128 (void)
747 __m128i __Y = __Y;
748 return __Y;
751 /* Create a vector of zeros. */
752 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
753 _mm_setzero_si128 (void)
755 return __extension__ (__m128i)(__v4si){ 0, 0, 0, 0 };
758 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
759 _mm_cvtepi32_pd (__m128i __A)
761 return (__m128d)__builtin_ia32_cvtdq2pd ((__v4si) __A);
764 extern __inline __m128 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
765 _mm_cvtepi32_ps (__m128i __A)
767 return (__m128)__builtin_ia32_cvtdq2ps ((__v4si) __A);
770 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
771 _mm_cvtpd_epi32 (__m128d __A)
773 return (__m128i)__builtin_ia32_cvtpd2dq ((__v2df) __A);
776 extern __inline __m64 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
777 _mm_cvtpd_pi32 (__m128d __A)
779 return (__m64)__builtin_ia32_cvtpd2pi ((__v2df) __A);
782 extern __inline __m128 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
783 _mm_cvtpd_ps (__m128d __A)
785 return (__m128)__builtin_ia32_cvtpd2ps ((__v2df) __A);
788 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
789 _mm_cvttpd_epi32 (__m128d __A)
791 return (__m128i)__builtin_ia32_cvttpd2dq ((__v2df) __A);
794 extern __inline __m64 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
795 _mm_cvttpd_pi32 (__m128d __A)
797 return (__m64)__builtin_ia32_cvttpd2pi ((__v2df) __A);
800 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
801 _mm_cvtpi32_pd (__m64 __A)
803 return (__m128d)__builtin_ia32_cvtpi2pd ((__v2si) __A);
806 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
807 _mm_cvtps_epi32 (__m128 __A)
809 return (__m128i)__builtin_ia32_cvtps2dq ((__v4sf) __A);
812 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
813 _mm_cvttps_epi32 (__m128 __A)
815 return (__m128i)__builtin_ia32_cvttps2dq ((__v4sf) __A);
818 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
819 _mm_cvtps_pd (__m128 __A)
821 return (__m128d)__builtin_ia32_cvtps2pd ((__v4sf) __A);
824 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
825 _mm_cvtsd_si32 (__m128d __A)
827 return __builtin_ia32_cvtsd2si ((__v2df) __A);
830 #ifdef __x86_64__
831 /* Intel intrinsic. */
832 extern __inline long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
833 _mm_cvtsd_si64 (__m128d __A)
835 return __builtin_ia32_cvtsd2si64 ((__v2df) __A);
838 /* Microsoft intrinsic. */
839 extern __inline long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
840 _mm_cvtsd_si64x (__m128d __A)
842 return __builtin_ia32_cvtsd2si64 ((__v2df) __A);
844 #endif
846 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
847 _mm_cvttsd_si32 (__m128d __A)
849 return __builtin_ia32_cvttsd2si ((__v2df) __A);
852 #ifdef __x86_64__
853 /* Intel intrinsic. */
854 extern __inline long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
855 _mm_cvttsd_si64 (__m128d __A)
857 return __builtin_ia32_cvttsd2si64 ((__v2df) __A);
860 /* Microsoft intrinsic. */
861 extern __inline long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
862 _mm_cvttsd_si64x (__m128d __A)
864 return __builtin_ia32_cvttsd2si64 ((__v2df) __A);
866 #endif
868 extern __inline __m128 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
869 _mm_cvtsd_ss (__m128 __A, __m128d __B)
871 return (__m128)__builtin_ia32_cvtsd2ss ((__v4sf) __A, (__v2df) __B);
874 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
875 _mm_cvtsi32_sd (__m128d __A, int __B)
877 return (__m128d)__builtin_ia32_cvtsi2sd ((__v2df) __A, __B);
880 #ifdef __x86_64__
881 /* Intel intrinsic. */
882 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
883 _mm_cvtsi64_sd (__m128d __A, long long __B)
885 return (__m128d)__builtin_ia32_cvtsi642sd ((__v2df) __A, __B);
888 /* Microsoft intrinsic. */
889 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
890 _mm_cvtsi64x_sd (__m128d __A, long long __B)
892 return (__m128d)__builtin_ia32_cvtsi642sd ((__v2df) __A, __B);
894 #endif
896 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
897 _mm_cvtss_sd (__m128d __A, __m128 __B)
899 return (__m128d)__builtin_ia32_cvtss2sd ((__v2df) __A, (__v4sf)__B);
902 #ifdef __OPTIMIZE__
903 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
904 _mm_shuffle_pd(__m128d __A, __m128d __B, const int __mask)
906 return (__m128d)__builtin_ia32_shufpd ((__v2df)__A, (__v2df)__B, __mask);
908 #else
909 #define _mm_shuffle_pd(A, B, N) \
910 ((__m128d)__builtin_ia32_shufpd ((__v2df)(__m128d)(A), \
911 (__v2df)(__m128d)(B), (int)(N)))
912 #endif
914 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
915 _mm_unpackhi_pd (__m128d __A, __m128d __B)
917 return (__m128d)__builtin_ia32_unpckhpd ((__v2df)__A, (__v2df)__B);
920 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
921 _mm_unpacklo_pd (__m128d __A, __m128d __B)
923 return (__m128d)__builtin_ia32_unpcklpd ((__v2df)__A, (__v2df)__B);
926 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
927 _mm_loadh_pd (__m128d __A, double const *__B)
929 return (__m128d)__builtin_ia32_loadhpd ((__v2df)__A, __B);
932 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
933 _mm_loadl_pd (__m128d __A, double const *__B)
935 return (__m128d)__builtin_ia32_loadlpd ((__v2df)__A, __B);
938 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
939 _mm_movemask_pd (__m128d __A)
941 return __builtin_ia32_movmskpd ((__v2df)__A);
944 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
945 _mm_packs_epi16 (__m128i __A, __m128i __B)
947 return (__m128i)__builtin_ia32_packsswb128 ((__v8hi)__A, (__v8hi)__B);
950 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
951 _mm_packs_epi32 (__m128i __A, __m128i __B)
953 return (__m128i)__builtin_ia32_packssdw128 ((__v4si)__A, (__v4si)__B);
956 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
957 _mm_packus_epi16 (__m128i __A, __m128i __B)
959 return (__m128i)__builtin_ia32_packuswb128 ((__v8hi)__A, (__v8hi)__B);
962 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
963 _mm_unpackhi_epi8 (__m128i __A, __m128i __B)
965 return (__m128i)__builtin_ia32_punpckhbw128 ((__v16qi)__A, (__v16qi)__B);
968 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
969 _mm_unpackhi_epi16 (__m128i __A, __m128i __B)
971 return (__m128i)__builtin_ia32_punpckhwd128 ((__v8hi)__A, (__v8hi)__B);
974 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
975 _mm_unpackhi_epi32 (__m128i __A, __m128i __B)
977 return (__m128i)__builtin_ia32_punpckhdq128 ((__v4si)__A, (__v4si)__B);
980 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
981 _mm_unpackhi_epi64 (__m128i __A, __m128i __B)
983 return (__m128i)__builtin_ia32_punpckhqdq128 ((__v2di)__A, (__v2di)__B);
986 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
987 _mm_unpacklo_epi8 (__m128i __A, __m128i __B)
989 return (__m128i)__builtin_ia32_punpcklbw128 ((__v16qi)__A, (__v16qi)__B);
992 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
993 _mm_unpacklo_epi16 (__m128i __A, __m128i __B)
995 return (__m128i)__builtin_ia32_punpcklwd128 ((__v8hi)__A, (__v8hi)__B);
998 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
999 _mm_unpacklo_epi32 (__m128i __A, __m128i __B)
1001 return (__m128i)__builtin_ia32_punpckldq128 ((__v4si)__A, (__v4si)__B);
1004 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1005 _mm_unpacklo_epi64 (__m128i __A, __m128i __B)
1007 return (__m128i)__builtin_ia32_punpcklqdq128 ((__v2di)__A, (__v2di)__B);
1010 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1011 _mm_add_epi8 (__m128i __A, __m128i __B)
1013 return (__m128i) ((__v16qu)__A + (__v16qu)__B);
1016 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1017 _mm_add_epi16 (__m128i __A, __m128i __B)
1019 return (__m128i) ((__v8hu)__A + (__v8hu)__B);
1022 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1023 _mm_add_epi32 (__m128i __A, __m128i __B)
1025 return (__m128i) ((__v4su)__A + (__v4su)__B);
1028 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1029 _mm_add_epi64 (__m128i __A, __m128i __B)
1031 return (__m128i) ((__v2du)__A + (__v2du)__B);
1034 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1035 _mm_adds_epi8 (__m128i __A, __m128i __B)
1037 return (__m128i)__builtin_ia32_paddsb128 ((__v16qi)__A, (__v16qi)__B);
1040 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1041 _mm_adds_epi16 (__m128i __A, __m128i __B)
1043 return (__m128i)__builtin_ia32_paddsw128 ((__v8hi)__A, (__v8hi)__B);
1046 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1047 _mm_adds_epu8 (__m128i __A, __m128i __B)
1049 return (__m128i)__builtin_ia32_paddusb128 ((__v16qi)__A, (__v16qi)__B);
1052 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1053 _mm_adds_epu16 (__m128i __A, __m128i __B)
1055 return (__m128i)__builtin_ia32_paddusw128 ((__v8hi)__A, (__v8hi)__B);
1058 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1059 _mm_sub_epi8 (__m128i __A, __m128i __B)
1061 return (__m128i) ((__v16qu)__A - (__v16qu)__B);
1064 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1065 _mm_sub_epi16 (__m128i __A, __m128i __B)
1067 return (__m128i) ((__v8hu)__A - (__v8hu)__B);
1070 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1071 _mm_sub_epi32 (__m128i __A, __m128i __B)
1073 return (__m128i) ((__v4su)__A - (__v4su)__B);
1076 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1077 _mm_sub_epi64 (__m128i __A, __m128i __B)
1079 return (__m128i) ((__v2du)__A - (__v2du)__B);
1082 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1083 _mm_subs_epi8 (__m128i __A, __m128i __B)
1085 return (__m128i)__builtin_ia32_psubsb128 ((__v16qi)__A, (__v16qi)__B);
1088 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1089 _mm_subs_epi16 (__m128i __A, __m128i __B)
1091 return (__m128i)__builtin_ia32_psubsw128 ((__v8hi)__A, (__v8hi)__B);
1094 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1095 _mm_subs_epu8 (__m128i __A, __m128i __B)
1097 return (__m128i)__builtin_ia32_psubusb128 ((__v16qi)__A, (__v16qi)__B);
1100 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1101 _mm_subs_epu16 (__m128i __A, __m128i __B)
1103 return (__m128i)__builtin_ia32_psubusw128 ((__v8hi)__A, (__v8hi)__B);
1106 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1107 _mm_madd_epi16 (__m128i __A, __m128i __B)
1109 return (__m128i)__builtin_ia32_pmaddwd128 ((__v8hi)__A, (__v8hi)__B);
1112 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1113 _mm_mulhi_epi16 (__m128i __A, __m128i __B)
1115 return (__m128i)__builtin_ia32_pmulhw128 ((__v8hi)__A, (__v8hi)__B);
1118 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1119 _mm_mullo_epi16 (__m128i __A, __m128i __B)
1121 return (__m128i) ((__v8hu)__A * (__v8hu)__B);
1124 extern __inline __m64 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1125 _mm_mul_su32 (__m64 __A, __m64 __B)
1127 return (__m64)__builtin_ia32_pmuludq ((__v2si)__A, (__v2si)__B);
1130 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1131 _mm_mul_epu32 (__m128i __A, __m128i __B)
1133 return (__m128i)__builtin_ia32_pmuludq128 ((__v4si)__A, (__v4si)__B);
1136 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1137 _mm_slli_epi16 (__m128i __A, int __B)
1139 return (__m128i)__builtin_ia32_psllwi128 ((__v8hi)__A, __B);
1142 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1143 _mm_slli_epi32 (__m128i __A, int __B)
1145 return (__m128i)__builtin_ia32_pslldi128 ((__v4si)__A, __B);
1148 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1149 _mm_slli_epi64 (__m128i __A, int __B)
1151 return (__m128i)__builtin_ia32_psllqi128 ((__v2di)__A, __B);
1154 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1155 _mm_srai_epi16 (__m128i __A, int __B)
1157 return (__m128i)__builtin_ia32_psrawi128 ((__v8hi)__A, __B);
1160 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1161 _mm_srai_epi32 (__m128i __A, int __B)
1163 return (__m128i)__builtin_ia32_psradi128 ((__v4si)__A, __B);
1166 #ifdef __OPTIMIZE__
1167 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1168 _mm_srli_si128 (__m128i __A, const int __N)
1170 return (__m128i)__builtin_ia32_psrldqi128 (__A, __N * 8);
1173 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1174 _mm_slli_si128 (__m128i __A, const int __N)
1176 return (__m128i)__builtin_ia32_pslldqi128 (__A, __N * 8);
1178 #else
1179 #define _mm_srli_si128(A, N) \
1180 ((__m128i)__builtin_ia32_psrldqi128 ((__m128i)(A), (int)(N) * 8))
1181 #define _mm_slli_si128(A, N) \
1182 ((__m128i)__builtin_ia32_pslldqi128 ((__m128i)(A), (int)(N) * 8))
1183 #endif
1185 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1186 _mm_srli_epi16 (__m128i __A, int __B)
1188 return (__m128i)__builtin_ia32_psrlwi128 ((__v8hi)__A, __B);
1191 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1192 _mm_srli_epi32 (__m128i __A, int __B)
1194 return (__m128i)__builtin_ia32_psrldi128 ((__v4si)__A, __B);
1197 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1198 _mm_srli_epi64 (__m128i __A, int __B)
1200 return (__m128i)__builtin_ia32_psrlqi128 ((__v2di)__A, __B);
1203 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1204 _mm_sll_epi16 (__m128i __A, __m128i __B)
1206 return (__m128i)__builtin_ia32_psllw128((__v8hi)__A, (__v8hi)__B);
1209 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1210 _mm_sll_epi32 (__m128i __A, __m128i __B)
1212 return (__m128i)__builtin_ia32_pslld128((__v4si)__A, (__v4si)__B);
1215 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1216 _mm_sll_epi64 (__m128i __A, __m128i __B)
1218 return (__m128i)__builtin_ia32_psllq128((__v2di)__A, (__v2di)__B);
1221 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1222 _mm_sra_epi16 (__m128i __A, __m128i __B)
1224 return (__m128i)__builtin_ia32_psraw128 ((__v8hi)__A, (__v8hi)__B);
1227 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1228 _mm_sra_epi32 (__m128i __A, __m128i __B)
1230 return (__m128i)__builtin_ia32_psrad128 ((__v4si)__A, (__v4si)__B);
1233 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1234 _mm_srl_epi16 (__m128i __A, __m128i __B)
1236 return (__m128i)__builtin_ia32_psrlw128 ((__v8hi)__A, (__v8hi)__B);
1239 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1240 _mm_srl_epi32 (__m128i __A, __m128i __B)
1242 return (__m128i)__builtin_ia32_psrld128 ((__v4si)__A, (__v4si)__B);
1245 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1246 _mm_srl_epi64 (__m128i __A, __m128i __B)
1248 return (__m128i)__builtin_ia32_psrlq128 ((__v2di)__A, (__v2di)__B);
1251 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1252 _mm_and_si128 (__m128i __A, __m128i __B)
1254 return (__m128i) ((__v2du)__A & (__v2du)__B);
1257 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1258 _mm_andnot_si128 (__m128i __A, __m128i __B)
1260 return (__m128i)__builtin_ia32_pandn128 ((__v2di)__A, (__v2di)__B);
1263 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1264 _mm_or_si128 (__m128i __A, __m128i __B)
1266 return (__m128i) ((__v2du)__A | (__v2du)__B);
1269 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1270 _mm_xor_si128 (__m128i __A, __m128i __B)
1272 return (__m128i) ((__v2du)__A ^ (__v2du)__B);
1275 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1276 _mm_cmpeq_epi8 (__m128i __A, __m128i __B)
1278 return (__m128i) ((__v16qi)__A == (__v16qi)__B);
1281 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1282 _mm_cmpeq_epi16 (__m128i __A, __m128i __B)
1284 return (__m128i) ((__v8hi)__A == (__v8hi)__B);
1287 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1288 _mm_cmpeq_epi32 (__m128i __A, __m128i __B)
1290 return (__m128i) ((__v4si)__A == (__v4si)__B);
1293 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1294 _mm_cmplt_epi8 (__m128i __A, __m128i __B)
1296 return (__m128i) ((__v16qi)__A < (__v16qi)__B);
1299 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1300 _mm_cmplt_epi16 (__m128i __A, __m128i __B)
1302 return (__m128i) ((__v8hi)__A < (__v8hi)__B);
1305 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1306 _mm_cmplt_epi32 (__m128i __A, __m128i __B)
1308 return (__m128i) ((__v4si)__A < (__v4si)__B);
1311 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1312 _mm_cmpgt_epi8 (__m128i __A, __m128i __B)
1314 return (__m128i) ((__v16qi)__A > (__v16qi)__B);
1317 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1318 _mm_cmpgt_epi16 (__m128i __A, __m128i __B)
1320 return (__m128i) ((__v8hi)__A > (__v8hi)__B);
1323 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1324 _mm_cmpgt_epi32 (__m128i __A, __m128i __B)
1326 return (__m128i) ((__v4si)__A > (__v4si)__B);
1329 #ifdef __OPTIMIZE__
1330 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1331 _mm_extract_epi16 (__m128i const __A, int const __N)
1333 return (unsigned short) __builtin_ia32_vec_ext_v8hi ((__v8hi)__A, __N);
1336 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1337 _mm_insert_epi16 (__m128i const __A, int const __D, int const __N)
1339 return (__m128i) __builtin_ia32_vec_set_v8hi ((__v8hi)__A, __D, __N);
1341 #else
1342 #define _mm_extract_epi16(A, N) \
1343 ((int) (unsigned short) __builtin_ia32_vec_ext_v8hi ((__v8hi)(__m128i)(A), (int)(N)))
1344 #define _mm_insert_epi16(A, D, N) \
1345 ((__m128i) __builtin_ia32_vec_set_v8hi ((__v8hi)(__m128i)(A), \
1346 (int)(D), (int)(N)))
1347 #endif
1349 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1350 _mm_max_epi16 (__m128i __A, __m128i __B)
1352 return (__m128i)__builtin_ia32_pmaxsw128 ((__v8hi)__A, (__v8hi)__B);
1355 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1356 _mm_max_epu8 (__m128i __A, __m128i __B)
1358 return (__m128i)__builtin_ia32_pmaxub128 ((__v16qi)__A, (__v16qi)__B);
1361 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1362 _mm_min_epi16 (__m128i __A, __m128i __B)
1364 return (__m128i)__builtin_ia32_pminsw128 ((__v8hi)__A, (__v8hi)__B);
1367 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1368 _mm_min_epu8 (__m128i __A, __m128i __B)
1370 return (__m128i)__builtin_ia32_pminub128 ((__v16qi)__A, (__v16qi)__B);
1373 extern __inline int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1374 _mm_movemask_epi8 (__m128i __A)
1376 return __builtin_ia32_pmovmskb128 ((__v16qi)__A);
1379 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1380 _mm_mulhi_epu16 (__m128i __A, __m128i __B)
1382 return (__m128i)__builtin_ia32_pmulhuw128 ((__v8hi)__A, (__v8hi)__B);
1385 #ifdef __OPTIMIZE__
1386 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1387 _mm_shufflehi_epi16 (__m128i __A, const int __mask)
1389 return (__m128i)__builtin_ia32_pshufhw ((__v8hi)__A, __mask);
1392 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1393 _mm_shufflelo_epi16 (__m128i __A, const int __mask)
1395 return (__m128i)__builtin_ia32_pshuflw ((__v8hi)__A, __mask);
1398 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1399 _mm_shuffle_epi32 (__m128i __A, const int __mask)
1401 return (__m128i)__builtin_ia32_pshufd ((__v4si)__A, __mask);
1403 #else
1404 #define _mm_shufflehi_epi16(A, N) \
1405 ((__m128i)__builtin_ia32_pshufhw ((__v8hi)(__m128i)(A), (int)(N)))
1406 #define _mm_shufflelo_epi16(A, N) \
1407 ((__m128i)__builtin_ia32_pshuflw ((__v8hi)(__m128i)(A), (int)(N)))
1408 #define _mm_shuffle_epi32(A, N) \
1409 ((__m128i)__builtin_ia32_pshufd ((__v4si)(__m128i)(A), (int)(N)))
1410 #endif
1412 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1413 _mm_maskmoveu_si128 (__m128i __A, __m128i __B, char *__C)
1415 __builtin_ia32_maskmovdqu ((__v16qi)__A, (__v16qi)__B, __C);
1418 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1419 _mm_avg_epu8 (__m128i __A, __m128i __B)
1421 return (__m128i)__builtin_ia32_pavgb128 ((__v16qi)__A, (__v16qi)__B);
1424 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1425 _mm_avg_epu16 (__m128i __A, __m128i __B)
1427 return (__m128i)__builtin_ia32_pavgw128 ((__v8hi)__A, (__v8hi)__B);
1430 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1431 _mm_sad_epu8 (__m128i __A, __m128i __B)
1433 return (__m128i)__builtin_ia32_psadbw128 ((__v16qi)__A, (__v16qi)__B);
1436 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1437 _mm_stream_si32 (int *__A, int __B)
1439 __builtin_ia32_movnti (__A, __B);
1442 #ifdef __x86_64__
1443 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1444 _mm_stream_si64 (long long int *__A, long long int __B)
1446 __builtin_ia32_movnti64 (__A, __B);
1448 #endif
1450 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1451 _mm_stream_si128 (__m128i *__A, __m128i __B)
1453 __builtin_ia32_movntdq ((__v2di *)__A, (__v2di)__B);
1456 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1457 _mm_stream_pd (double *__A, __m128d __B)
1459 __builtin_ia32_movntpd (__A, (__v2df)__B);
1462 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1463 _mm_clflush (void const *__A)
1465 __builtin_ia32_clflush (__A);
1468 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1469 _mm_lfence (void)
1471 __builtin_ia32_lfence ();
1474 extern __inline void __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1475 _mm_mfence (void)
1477 __builtin_ia32_mfence ();
1480 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1481 _mm_cvtsi32_si128 (int __A)
1483 return _mm_set_epi32 (0, 0, 0, __A);
1486 #ifdef __x86_64__
1487 /* Intel intrinsic. */
1488 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1489 _mm_cvtsi64_si128 (long long __A)
1491 return _mm_set_epi64x (0, __A);
1494 /* Microsoft intrinsic. */
1495 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1496 _mm_cvtsi64x_si128 (long long __A)
1498 return _mm_set_epi64x (0, __A);
1500 #endif
1502 /* Casts between various SP, DP, INT vector types. Note that these do no
1503 conversion of values, they just change the type. */
1504 extern __inline __m128 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1505 _mm_castpd_ps(__m128d __A)
1507 return (__m128) __A;
1510 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1511 _mm_castpd_si128(__m128d __A)
1513 return (__m128i) __A;
1516 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1517 _mm_castps_pd(__m128 __A)
1519 return (__m128d) __A;
1522 extern __inline __m128i __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1523 _mm_castps_si128(__m128 __A)
1525 return (__m128i) __A;
1528 extern __inline __m128 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1529 _mm_castsi128_ps(__m128i __A)
1531 return (__m128) __A;
1534 extern __inline __m128d __attribute__((__gnu_inline__, __always_inline__, __artificial__))
1535 _mm_castsi128_pd(__m128i __A)
1537 return (__m128d) __A;
1540 #ifdef __DISABLE_SSE2__
1541 #undef __DISABLE_SSE2__
1542 #pragma GCC pop_options
1543 #endif /* __DISABLE_SSE2__ */
1545 #endif /* _EMMINTRIN_H_INCLUDED */