* gcc-interface/trans.c (process_freeze_entity): Be prepared for a
[official-gcc.git] / gcc / config / i386 / avx512vbmi2intrin.h
blob9e4c1ae18b18517436cf874c73cfddf5d222da09
1 /* Copyright (C) 2013-2017 Free Software Foundation, Inc.
3 This file is part of GCC.
5 GCC is free software; you can redistribute it and/or modify
6 it under the terms of the GNU General Public License as published by
7 the Free Software Foundation; either version 3, or (at your option)
8 any later version.
10 GCC is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 Under Section 7 of GPL version 3, you are granted additional
16 permissions described in the GCC Runtime Library Exception, version
17 3.1, as published by the Free Software Foundation.
19 You should have received a copy of the GNU General Public License and
20 a copy of the GCC Runtime Library Exception along with this program;
21 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
22 <http://www.gnu.org/licenses/>. */
24 #ifndef _IMMINTRIN_H_INCLUDED
25 #error "Never use <avx512vbmi2intrin.h> directly; include <immintrin.h> instead."
26 #endif
28 #ifndef __AVX512VBMI2INTRIN_H_INCLUDED
29 #define __AVX512VBMI2INTRIN_H_INCLUDED
31 #if !defined(__AVX512VBMI2__) || !defined(__AVX512BW__)
32 #pragma GCC push_options
33 #pragma GCC target("avx512vbmi2,avx512bw")
34 #define __DISABLE_AVX512VBMI2BW__
35 #endif /* __AVX512VBMI2BW__ */
37 extern __inline __m512i
38 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
39 _mm512_mask_compress_epi8 (__m512i __A, __mmask64 __B, __m512i __C)
41 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi)__C,
42 (__v64qi)__A, (__mmask64)__B);
46 extern __inline __m512i
47 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
48 _mm512_maskz_compress_epi8 (__mmask64 __A, __m512i __B)
50 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi)__B,
51 (__v64qi)_mm512_setzero_si512 (), (__mmask64)__A);
55 extern __inline void
56 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
57 _mm512_mask_compressstoreu_epi8 (void * __A, __mmask64 __B, __m512i __C)
59 __builtin_ia32_compressstoreuqi512_mask ((__v64qi *) __A, (__v64qi) __C,
60 (__mmask64) __B);
63 extern __inline __m512i
64 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
65 _mm512_mask_compress_epi16 (__m512i __A, __mmask32 __B, __m512i __C)
67 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi)__C,
68 (__v32hi)__A, (__mmask32)__B);
71 extern __inline __m512i
72 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
73 _mm512_maskz_compress_epi16 (__mmask32 __A, __m512i __B)
75 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi)__B,
76 (__v32hi)_mm512_setzero_si512 (), (__mmask32)__A);
79 extern __inline void
80 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
81 _mm512_mask_compressstoreu_epi16 (void * __A, __mmask32 __B, __m512i __C)
83 __builtin_ia32_compressstoreuhi512_mask ((__v32hi *) __A, (__v32hi) __C,
84 (__mmask32) __B);
87 extern __inline __m512i
88 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
89 _mm512_mask_expand_epi8 (__m512i __A, __mmask64 __B, __m512i __C)
91 return (__m512i) __builtin_ia32_expandqi512_mask ((__v64qi) __C,
92 (__v64qi) __A,
93 (__mmask64) __B);
96 extern __inline __m512i
97 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
98 _mm512_maskz_expand_epi8 (__mmask64 __A, __m512i __B)
100 return (__m512i) __builtin_ia32_expandqi512_maskz ((__v64qi) __B,
101 (__v64qi) _mm512_setzero_si512 (), (__mmask64) __A);
104 extern __inline __m512i
105 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
106 _mm512_mask_expandloadu_epi8 (__m512i __A, __mmask64 __B, const void * __C)
108 return (__m512i) __builtin_ia32_expandloadqi512_mask ((const __v64qi *) __C,
109 (__v64qi) __A, (__mmask64) __B);
112 extern __inline __m512i
113 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
114 _mm512_maskz_expandloadu_epi8 (__mmask64 __A, const void * __B)
116 return (__m512i) __builtin_ia32_expandloadqi512_maskz ((const __v64qi *) __B,
117 (__v64qi) _mm512_setzero_si512 (), (__mmask64) __A);
120 extern __inline __m512i
121 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
122 _mm512_mask_expand_epi16 (__m512i __A, __mmask32 __B, __m512i __C)
124 return (__m512i) __builtin_ia32_expandhi512_mask ((__v32hi) __C,
125 (__v32hi) __A,
126 (__mmask32) __B);
129 extern __inline __m512i
130 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
131 _mm512_maskz_expand_epi16 (__mmask32 __A, __m512i __B)
133 return (__m512i) __builtin_ia32_expandhi512_maskz ((__v32hi) __B,
134 (__v32hi) _mm512_setzero_si512 (), (__mmask32) __A);
137 extern __inline __m512i
138 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
139 _mm512_mask_expandloadu_epi16 (__m512i __A, __mmask32 __B, const void * __C)
141 return (__m512i) __builtin_ia32_expandloadhi512_mask ((const __v32hi *) __C,
142 (__v32hi) __A, (__mmask32) __B);
145 extern __inline __m512i
146 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
147 _mm512_maskz_expandloadu_epi16 (__mmask32 __A, const void * __B)
149 return (__m512i) __builtin_ia32_expandloadhi512_maskz ((const __v32hi *) __B,
150 (__v32hi) _mm512_setzero_si512 (), (__mmask32) __A);
153 #ifdef __OPTIMIZE__
154 extern __inline __m512i
155 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
156 _mm512_shrdi_epi16 (__m512i __A, __m512i __B, int __C)
158 return (__m512i) __builtin_ia32_vpshrd_v32hi ((__v32hi)__A, (__v32hi) __B,
159 __C);
162 extern __inline __m512i
163 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
164 _mm512_mask_shrdi_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D,
165 int __E)
167 return (__m512i)__builtin_ia32_vpshrd_v32hi_mask ((__v32hi)__C,
168 (__v32hi) __D, __E, (__v32hi) __A, (__mmask32)__B);
171 extern __inline __m512i
172 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
173 _mm512_maskz_shrdi_epi16 (__mmask32 __A, __m512i __B, __m512i __C, int __D)
175 return (__m512i)__builtin_ia32_vpshrd_v32hi_mask ((__v32hi)__B,
176 (__v32hi) __C, __D, (__v32hi) _mm512_setzero_si512 (), (__mmask32)__A);
179 extern __inline __m512i
180 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
181 _mm512_shrdi_epi32 (__m512i __A, __m512i __B, int __C)
183 return (__m512i) __builtin_ia32_vpshrd_v16si ((__v16si)__A, (__v16si) __B,
184 __C);
187 extern __inline __m512i
188 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
189 _mm512_mask_shrdi_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D,
190 int __E)
192 return (__m512i)__builtin_ia32_vpshrd_v16si_mask ((__v16si)__C,
193 (__v16si) __D, __E, (__v16si) __A, (__mmask16)__B);
196 extern __inline __m512i
197 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
198 _mm512_maskz_shrdi_epi32 (__mmask16 __A, __m512i __B, __m512i __C, int __D)
200 return (__m512i)__builtin_ia32_vpshrd_v16si_mask ((__v16si)__B,
201 (__v16si) __C, __D, (__v16si) _mm512_setzero_si512 (), (__mmask16)__A);
204 extern __inline __m512i
205 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
206 _mm512_shrdi_epi64 (__m512i __A, __m512i __B, int __C)
208 return (__m512i) __builtin_ia32_vpshrd_v8di ((__v8di)__A, (__v8di) __B, __C);
211 extern __inline __m512i
212 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
213 _mm512_mask_shrdi_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D,
214 int __E)
216 return (__m512i)__builtin_ia32_vpshrd_v8di_mask ((__v8di)__C, (__v8di) __D,
217 __E, (__v8di) __A, (__mmask8)__B);
220 extern __inline __m512i
221 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
222 _mm512_maskz_shrdi_epi64 (__mmask8 __A, __m512i __B, __m512i __C, int __D)
224 return (__m512i)__builtin_ia32_vpshrd_v8di_mask ((__v8di)__B, (__v8di) __C,
225 __D, (__v8di) _mm512_setzero_si512 (), (__mmask8)__A);
228 extern __inline __m512i
229 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
230 _mm512_shldi_epi16 (__m512i __A, __m512i __B, int __C)
232 return (__m512i) __builtin_ia32_vpshld_v32hi ((__v32hi)__A, (__v32hi) __B,
233 __C);
236 extern __inline __m512i
237 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
238 _mm512_mask_shldi_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D,
239 int __E)
241 return (__m512i)__builtin_ia32_vpshld_v32hi_mask ((__v32hi)__C,
242 (__v32hi) __D, __E, (__v32hi) __A, (__mmask32)__B);
245 extern __inline __m512i
246 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
247 _mm512_maskz_shldi_epi16 (__mmask32 __A, __m512i __B, __m512i __C, int __D)
249 return (__m512i)__builtin_ia32_vpshld_v32hi_mask ((__v32hi)__B,
250 (__v32hi) __C, __D, (__v32hi) _mm512_setzero_si512 (), (__mmask32)__A);
253 extern __inline __m512i
254 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
255 _mm512_shldi_epi32 (__m512i __A, __m512i __B, int __C)
257 return (__m512i) __builtin_ia32_vpshld_v16si ((__v16si)__A, (__v16si) __B,
258 __C);
261 extern __inline __m512i
262 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
263 _mm512_mask_shldi_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D,
264 int __E)
266 return (__m512i)__builtin_ia32_vpshld_v16si_mask ((__v16si)__C,
267 (__v16si) __D, __E, (__v16si) __A, (__mmask16)__B);
270 extern __inline __m512i
271 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
272 _mm512_maskz_shldi_epi32 (__mmask16 __A, __m512i __B, __m512i __C, int __D)
274 return (__m512i)__builtin_ia32_vpshld_v16si_mask ((__v16si)__B,
275 (__v16si) __C, __D, (__v16si) _mm512_setzero_si512 (), (__mmask16)__A);
278 extern __inline __m512i
279 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
280 _mm512_shldi_epi64 (__m512i __A, __m512i __B, int __C)
282 return (__m512i) __builtin_ia32_vpshld_v8di ((__v8di)__A, (__v8di) __B, __C);
285 extern __inline __m512i
286 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
287 _mm512_mask_shldi_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D,
288 int __E)
290 return (__m512i)__builtin_ia32_vpshld_v8di_mask ((__v8di)__C, (__v8di) __D,
291 __E, (__v8di) __A, (__mmask8)__B);
294 extern __inline __m512i
295 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
296 _mm512_maskz_shldi_epi64 (__mmask8 __A, __m512i __B, __m512i __C, int __D)
298 return (__m512i)__builtin_ia32_vpshld_v8di_mask ((__v8di)__B, (__v8di) __C,
299 __D, (__v8di) _mm512_setzero_si512 (), (__mmask8)__A);
301 #else
302 #define _mm512_shrdi_epi16(A, B, C) \
303 ((__m512i) __builtin_ia32_vpshrd_v32hi ((__v32hi)(__m512i)(A), \
304 (__v32hi)(__m512i)(B),(int)(C))
305 #define _mm512_mask_shrdi_epi16(A, B, C, D, E) \
306 ((__m512i) __builtin_ia32_vpshrd_v32hi_mask ((__v32hi)(__m512i)(C), \
307 (__v32hi)(__m512i)(D), (int)(E), (__v32hi)(__m512i)(A),(__mmask32)(B))
308 #define _mm512_maskz_shrdi_epi16(A, B, C, D) \
309 ((__m512i) __builtin_ia32_vpshrd_v32hi_mask ((__v32hi)(__m512i)(B), \
310 (__v32hi)(__m512i)(C),(int)(D), \
311 (__v32hi)(__m512i)_mm512_setzero_si512 (), (__mmask32)(A))
312 #define _mm512_shrdi_epi32(A, B, C) \
313 ((__m512i) __builtin_ia32_vpshrd_v16si ((__v16si)(__m512i)(A), \
314 (__v16si)(__m512i)(B),(int)(C))
315 #define _mm512_mask_shrdi_epi32(A, B, C, D, E) \
316 ((__m512i) __builtin_ia32_vpshrd_v16si_mask ((__v16si)(__m512i)(C), \
317 (__v16si)(__m512i)(D), (int)(E), (__v16si)(__m512i)(A),(__mmask16)(B))
318 #define _mm512_maskz_shrdi_epi32(A, B, C, D) \
319 ((__m512i) __builtin_ia32_vpshrd_v16si_mask ((__v16si)(__m512i)(B), \
320 (__v16si)(__m512i)(C),(int)(D), \
321 (__v16si)(__m512i)_mm512_setzero_si512 (), (__mmask16)(A))
322 #define _mm512_shrdi_epi64(A, B, C) \
323 ((__m512i) __builtin_ia32_vpshrd_v8di ((__v8di)(__m512i)(A), \
324 (__v8di)(__m512i)(B),(int)(C))
325 #define _mm512_mask_shrdi_epi64(A, B, C, D, E) \
326 ((__m512i) __builtin_ia32_vpshrd_v8di_mask ((__v8di)(__m512i)(C), \
327 (__v8di)(__m512i)(D), (int)(E), (__v8di)(__m512i)(A),(__mmask8)(B))
328 #define _mm512_maskz_shrdi_epi64(A, B, C, D) \
329 ((__m512i) __builtin_ia32_vpshrd_v8di_mask ((__v8di)(__m512i)(B), \
330 (__v8di)(__m512i)(C),(int)(D), \
331 (__v8di)(__m512i)_mm512_setzero_si512 (), (__mmask8)(A))
332 #define _mm512_shldi_epi16(A, B, C) \
333 ((__m512i) __builtin_ia32_vpshld_v32hi ((__v32hi)(__m512i)(A), \
334 (__v32hi)(__m512i)(B),(int)(C))
335 #define _mm512_mask_shldi_epi16(A, B, C, D, E) \
336 ((__m512i) __builtin_ia32_vpshld_v32hi_mask ((__v32hi)(__m512i)(C), \
337 (__v32hi)(__m512i)(D), (int)(E), (__v32hi)(__m512i)(A),(__mmask32)(B))
338 #define _mm512_maskz_shldi_epi16(A, B, C, D) \
339 ((__m512i) __builtin_ia32_vpshld_v32hi_mask ((__v32hi)(__m512i)(B), \
340 (__v32hi)(__m512i)(C),(int)(D), \
341 (__v32hi)(__m512i)_mm512_setzero_si512 (), (__mmask32)(A))
342 #define _mm512_shldi_epi32(A, B, C) \
343 ((__m512i) __builtin_ia32_vpshld_v16si ((__v16si)(__m512i)(A), \
344 (__v16si)(__m512i)(B),(int)(C))
345 #define _mm512_mask_shldi_epi32(A, B, C, D, E) \
346 ((__m512i) __builtin_ia32_vpshld_v16si_mask ((__v16si)(__m512i)(C), \
347 (__v16si)(__m512i)(D), (int)(E), (__v16si)(__m512i)(A),(__mmask16)(B))
348 #define _mm512_maskz_shldi_epi32(A, B, C, D) \
349 ((__m512i) __builtin_ia32_vpshld_v16si_mask ((__v16si)(__m512i)(B), \
350 (__v16si)(__m512i)(C),(int)(D), \
351 (__v16si)(__m512i)_mm512_setzero_si512 (), (__mmask16)(A))
352 #define _mm512_shldi_epi64(A, B, C) \
353 ((__m512i) __builtin_ia32_vpshld_v8di ((__v8di)(__m512i)(A), \
354 (__v8di)(__m512i)(B),(int)(C))
355 #define _mm512_mask_shldi_epi64(A, B, C, D, E) \
356 ((__m512i) __builtin_ia32_vpshld_v8di_mask ((__v8di)(__m512i)(C), \
357 (__v8di)(__m512i)(D), (int)(E), (__v8di)(__m512i)(A),(__mmask8)(B))
358 #define _mm512_maskz_shldi_epi64(A, B, C, D) \
359 ((__m512i) __builtin_ia32_vpshld_v8di_mask ((__v8di)(__m512i)(B), \
360 (__v8di)(__m512i)(C),(int)(D), \
361 (__v8di)(__m512i)_mm512_setzero_si512 (), (__mmask8)(A))
362 #endif
364 extern __inline __m512i
365 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
366 _mm512_shrdv_epi16 (__m512i __A, __m512i __B, __m512i __C)
368 return (__m512i) __builtin_ia32_vpshrdv_v32hi ((__v32hi)__A, (__v32hi) __B,
369 (__v32hi) __C);
372 extern __inline __m512i
373 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
374 _mm512_mask_shrdv_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D)
376 return (__m512i)__builtin_ia32_vpshrdv_v32hi_mask ((__v32hi)__A,
377 (__v32hi) __C, (__v32hi) __D, (__mmask32)__B);
380 extern __inline __m512i
381 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
382 _mm512_maskz_shrdv_epi16 (__mmask32 __A, __m512i __B, __m512i __C, __m512i __D)
384 return (__m512i)__builtin_ia32_vpshrdv_v32hi_maskz ((__v32hi)__B,
385 (__v32hi) __C, (__v32hi) __D, (__mmask32)__A);
388 extern __inline __m512i
389 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
390 _mm512_shrdv_epi32 (__m512i __A, __m512i __B, __m512i __C)
392 return (__m512i) __builtin_ia32_vpshrdv_v16si ((__v16si)__A, (__v16si) __B,
393 (__v16si) __C);
396 extern __inline __m512i
397 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
398 _mm512_mask_shrdv_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D)
400 return (__m512i)__builtin_ia32_vpshrdv_v16si_mask ((__v16si)__A,
401 (__v16si) __C, (__v16si) __D, (__mmask16)__B);
404 extern __inline __m512i
405 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
406 _mm512_maskz_shrdv_epi32 (__mmask16 __A, __m512i __B, __m512i __C, __m512i __D)
408 return (__m512i)__builtin_ia32_vpshrdv_v16si_maskz ((__v16si)__B,
409 (__v16si) __C, (__v16si) __D, (__mmask16)__A);
412 extern __inline __m512i
413 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
414 _mm512_shrdv_epi64 (__m512i __A, __m512i __B, __m512i __C)
416 return (__m512i) __builtin_ia32_vpshrdv_v8di ((__v8di)__A, (__v8di) __B,
417 (__v8di) __C);
420 extern __inline __m512i
421 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
422 _mm512_mask_shrdv_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D)
424 return (__m512i)__builtin_ia32_vpshrdv_v8di_mask ((__v8di)__A, (__v8di) __C,
425 (__v8di) __D, (__mmask8)__B);
428 extern __inline __m512i
429 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
430 _mm512_maskz_shrdv_epi64 (__mmask8 __A, __m512i __B, __m512i __C, __m512i __D)
432 return (__m512i)__builtin_ia32_vpshrdv_v8di_maskz ((__v8di)__B, (__v8di) __C,
433 (__v8di) __D, (__mmask8)__A);
435 extern __inline __m512i
436 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
437 _mm512_shldv_epi16 (__m512i __A, __m512i __B, __m512i __C)
439 return (__m512i) __builtin_ia32_vpshldv_v32hi ((__v32hi)__A, (__v32hi) __B,
440 (__v32hi) __C);
443 extern __inline __m512i
444 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
445 _mm512_mask_shldv_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D)
447 return (__m512i)__builtin_ia32_vpshldv_v32hi_mask ((__v32hi)__A,
448 (__v32hi) __C, (__v32hi) __D, (__mmask32)__B);
451 extern __inline __m512i
452 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
453 _mm512_maskz_shldv_epi16 (__mmask32 __A, __m512i __B, __m512i __C, __m512i __D)
455 return (__m512i)__builtin_ia32_vpshldv_v32hi_maskz ((__v32hi)__B,
456 (__v32hi) __C, (__v32hi) __D, (__mmask32)__A);
459 extern __inline __m512i
460 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
461 _mm512_shldv_epi32 (__m512i __A, __m512i __B, __m512i __C)
463 return (__m512i) __builtin_ia32_vpshldv_v16si ((__v16si)__A, (__v16si) __B,
464 (__v16si) __C);
467 extern __inline __m512i
468 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
469 _mm512_mask_shldv_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D)
471 return (__m512i)__builtin_ia32_vpshldv_v16si_mask ((__v16si)__A,
472 (__v16si) __C, (__v16si) __D, (__mmask16)__B);
475 extern __inline __m512i
476 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
477 _mm512_maskz_shldv_epi32 (__mmask16 __A, __m512i __B, __m512i __C, __m512i __D)
479 return (__m512i)__builtin_ia32_vpshldv_v16si_maskz ((__v16si)__B,
480 (__v16si) __C, (__v16si) __D, (__mmask16)__A);
483 extern __inline __m512i
484 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
485 _mm512_shldv_epi64 (__m512i __A, __m512i __B, __m512i __C)
487 return (__m512i) __builtin_ia32_vpshldv_v8di ((__v8di)__A, (__v8di) __B,
488 (__v8di) __C);
491 extern __inline __m512i
492 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
493 _mm512_mask_shldv_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D)
495 return (__m512i)__builtin_ia32_vpshldv_v8di_mask ((__v8di)__A, (__v8di) __C,
496 (__v8di) __D, (__mmask8)__B);
499 extern __inline __m512i
500 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
501 _mm512_maskz_shldv_epi64 (__mmask8 __A, __m512i __B, __m512i __C, __m512i __D)
503 return (__m512i)__builtin_ia32_vpshldv_v8di_maskz ((__v8di)__B, (__v8di) __C,
504 (__v8di) __D, (__mmask8)__A);
507 #ifdef __DISABLE_AVX512VBMI2BW__
508 #undef __DISABLE_AVX512VBMI2BW__
510 #pragma GCC pop_options
511 #endif /* __DISABLE_AVX512VBMI2BW__ */
513 #endif /* __AVX512VBMI2INTRIN_H_INCLUDED */