Apply LOCAL_PATCHES and remove not used ones.
[official-gcc.git] / gcc / config / i386 / avx512vbmi2intrin.h
blobf2f7013cf64691a1fd9f695cec64540dec7cafbc
1 /* Copyright (C) 2013-2018 Free Software Foundation, Inc.
3 This file is part of GCC.
5 GCC is free software; you can redistribute it and/or modify
6 it under the terms of the GNU General Public License as published by
7 the Free Software Foundation; either version 3, or (at your option)
8 any later version.
10 GCC is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 Under Section 7 of GPL version 3, you are granted additional
16 permissions described in the GCC Runtime Library Exception, version
17 3.1, as published by the Free Software Foundation.
19 You should have received a copy of the GNU General Public License and
20 a copy of the GCC Runtime Library Exception along with this program;
21 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
22 <http://www.gnu.org/licenses/>. */
24 #ifndef _IMMINTRIN_H_INCLUDED
25 #error "Never use <avx512vbmi2intrin.h> directly; include <immintrin.h> instead."
26 #endif
28 #ifndef __AVX512VBMI2INTRIN_H_INCLUDED
29 #define __AVX512VBMI2INTRIN_H_INCLUDED
31 #if !defined(__AVX512VBMI2__)
32 #pragma GCC push_options
33 #pragma GCC target("avx512vbmi2")
34 #define __DISABLE_AVX512VBMI2__
35 #endif /* __AVX512VBMI2__ */
37 #ifdef __OPTIMIZE__
38 extern __inline __m512i
39 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
40 _mm512_shrdi_epi16 (__m512i __A, __m512i __B, int __C)
42 return (__m512i) __builtin_ia32_vpshrd_v32hi ((__v32hi)__A, (__v32hi) __B,
43 __C);
46 extern __inline __m512i
47 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
48 _mm512_shrdi_epi32 (__m512i __A, __m512i __B, int __C)
50 return (__m512i) __builtin_ia32_vpshrd_v16si ((__v16si)__A, (__v16si) __B,
51 __C);
54 extern __inline __m512i
55 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
56 _mm512_mask_shrdi_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D,
57 int __E)
59 return (__m512i)__builtin_ia32_vpshrd_v16si_mask ((__v16si)__C,
60 (__v16si) __D, __E, (__v16si) __A, (__mmask16)__B);
63 extern __inline __m512i
64 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
65 _mm512_maskz_shrdi_epi32 (__mmask16 __A, __m512i __B, __m512i __C, int __D)
67 return (__m512i)__builtin_ia32_vpshrd_v16si_mask ((__v16si)__B,
68 (__v16si) __C, __D, (__v16si) _mm512_setzero_si512 (), (__mmask16)__A);
71 extern __inline __m512i
72 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
73 _mm512_shrdi_epi64 (__m512i __A, __m512i __B, int __C)
75 return (__m512i) __builtin_ia32_vpshrd_v8di ((__v8di)__A, (__v8di) __B, __C);
78 extern __inline __m512i
79 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
80 _mm512_mask_shrdi_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D,
81 int __E)
83 return (__m512i)__builtin_ia32_vpshrd_v8di_mask ((__v8di)__C, (__v8di) __D,
84 __E, (__v8di) __A, (__mmask8)__B);
87 extern __inline __m512i
88 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
89 _mm512_maskz_shrdi_epi64 (__mmask8 __A, __m512i __B, __m512i __C, int __D)
91 return (__m512i)__builtin_ia32_vpshrd_v8di_mask ((__v8di)__B, (__v8di) __C,
92 __D, (__v8di) _mm512_setzero_si512 (), (__mmask8)__A);
95 extern __inline __m512i
96 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
97 _mm512_shldi_epi16 (__m512i __A, __m512i __B, int __C)
99 return (__m512i) __builtin_ia32_vpshld_v32hi ((__v32hi)__A, (__v32hi) __B,
100 __C);
103 extern __inline __m512i
104 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
105 _mm512_shldi_epi32 (__m512i __A, __m512i __B, int __C)
107 return (__m512i) __builtin_ia32_vpshld_v16si ((__v16si)__A, (__v16si) __B,
108 __C);
111 extern __inline __m512i
112 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
113 _mm512_mask_shldi_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D,
114 int __E)
116 return (__m512i)__builtin_ia32_vpshld_v16si_mask ((__v16si)__C,
117 (__v16si) __D, __E, (__v16si) __A, (__mmask16)__B);
120 extern __inline __m512i
121 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
122 _mm512_maskz_shldi_epi32 (__mmask16 __A, __m512i __B, __m512i __C, int __D)
124 return (__m512i)__builtin_ia32_vpshld_v16si_mask ((__v16si)__B,
125 (__v16si) __C, __D, (__v16si) _mm512_setzero_si512 (), (__mmask16)__A);
128 extern __inline __m512i
129 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
130 _mm512_shldi_epi64 (__m512i __A, __m512i __B, int __C)
132 return (__m512i) __builtin_ia32_vpshld_v8di ((__v8di)__A, (__v8di) __B, __C);
135 extern __inline __m512i
136 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
137 _mm512_mask_shldi_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D,
138 int __E)
140 return (__m512i)__builtin_ia32_vpshld_v8di_mask ((__v8di)__C, (__v8di) __D,
141 __E, (__v8di) __A, (__mmask8)__B);
144 extern __inline __m512i
145 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
146 _mm512_maskz_shldi_epi64 (__mmask8 __A, __m512i __B, __m512i __C, int __D)
148 return (__m512i)__builtin_ia32_vpshld_v8di_mask ((__v8di)__B, (__v8di) __C,
149 __D, (__v8di) _mm512_setzero_si512 (), (__mmask8)__A);
151 #else
152 #define _mm512_shrdi_epi16(A, B, C) \
153 ((__m512i) __builtin_ia32_vpshrd_v32hi ((__v32hi)(__m512i)(A), \
154 (__v32hi)(__m512i)(B),(int)(C))
155 #define _mm512_shrdi_epi32(A, B, C) \
156 ((__m512i) __builtin_ia32_vpshrd_v16si ((__v16si)(__m512i)(A), \
157 (__v16si)(__m512i)(B),(int)(C))
158 #define _mm512_mask_shrdi_epi32(A, B, C, D, E) \
159 ((__m512i) __builtin_ia32_vpshrd_v16si_mask ((__v16si)(__m512i)(C), \
160 (__v16si)(__m512i)(D), (int)(E), (__v16si)(__m512i)(A),(__mmask16)(B))
161 #define _mm512_maskz_shrdi_epi32(A, B, C, D) \
162 ((__m512i) __builtin_ia32_vpshrd_v16si_mask ((__v16si)(__m512i)(B), \
163 (__v16si)(__m512i)(C),(int)(D), \
164 (__v16si)(__m512i)_mm512_setzero_si512 (), (__mmask16)(A))
165 #define _mm512_shrdi_epi64(A, B, C) \
166 ((__m512i) __builtin_ia32_vpshrd_v8di ((__v8di)(__m512i)(A), \
167 (__v8di)(__m512i)(B),(int)(C))
168 #define _mm512_mask_shrdi_epi64(A, B, C, D, E) \
169 ((__m512i) __builtin_ia32_vpshrd_v8di_mask ((__v8di)(__m512i)(C), \
170 (__v8di)(__m512i)(D), (int)(E), (__v8di)(__m512i)(A),(__mmask8)(B))
171 #define _mm512_maskz_shrdi_epi64(A, B, C, D) \
172 ((__m512i) __builtin_ia32_vpshrd_v8di_mask ((__v8di)(__m512i)(B), \
173 (__v8di)(__m512i)(C),(int)(D), \
174 (__v8di)(__m512i)_mm512_setzero_si512 (), (__mmask8)(A))
175 #define _mm512_shldi_epi16(A, B, C) \
176 ((__m512i) __builtin_ia32_vpshld_v32hi ((__v32hi)(__m512i)(A), \
177 (__v32hi)(__m512i)(B),(int)(C))
178 #define _mm512_shldi_epi32(A, B, C) \
179 ((__m512i) __builtin_ia32_vpshld_v16si ((__v16si)(__m512i)(A), \
180 (__v16si)(__m512i)(B),(int)(C))
181 #define _mm512_mask_shldi_epi32(A, B, C, D, E) \
182 ((__m512i) __builtin_ia32_vpshld_v16si_mask ((__v16si)(__m512i)(C), \
183 (__v16si)(__m512i)(D), (int)(E), (__v16si)(__m512i)(A),(__mmask16)(B))
184 #define _mm512_maskz_shldi_epi32(A, B, C, D) \
185 ((__m512i) __builtin_ia32_vpshld_v16si_mask ((__v16si)(__m512i)(B), \
186 (__v16si)(__m512i)(C),(int)(D), \
187 (__v16si)(__m512i)_mm512_setzero_si512 (), (__mmask16)(A))
188 #define _mm512_shldi_epi64(A, B, C) \
189 ((__m512i) __builtin_ia32_vpshld_v8di ((__v8di)(__m512i)(A), \
190 (__v8di)(__m512i)(B),(int)(C))
191 #define _mm512_mask_shldi_epi64(A, B, C, D, E) \
192 ((__m512i) __builtin_ia32_vpshld_v8di_mask ((__v8di)(__m512i)(C), \
193 (__v8di)(__m512i)(D), (int)(E), (__v8di)(__m512i)(A),(__mmask8)(B))
194 #define _mm512_maskz_shldi_epi64(A, B, C, D) \
195 ((__m512i) __builtin_ia32_vpshld_v8di_mask ((__v8di)(__m512i)(B), \
196 (__v8di)(__m512i)(C),(int)(D), \
197 (__v8di)(__m512i)_mm512_setzero_si512 (), (__mmask8)(A))
198 #endif
200 extern __inline __m512i
201 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
202 _mm512_shrdv_epi16 (__m512i __A, __m512i __B, __m512i __C)
204 return (__m512i) __builtin_ia32_vpshrdv_v32hi ((__v32hi)__A, (__v32hi) __B,
205 (__v32hi) __C);
208 extern __inline __m512i
209 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
210 _mm512_shrdv_epi32 (__m512i __A, __m512i __B, __m512i __C)
212 return (__m512i) __builtin_ia32_vpshrdv_v16si ((__v16si)__A, (__v16si) __B,
213 (__v16si) __C);
216 extern __inline __m512i
217 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
218 _mm512_mask_shrdv_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D)
220 return (__m512i)__builtin_ia32_vpshrdv_v16si_mask ((__v16si)__A,
221 (__v16si) __C, (__v16si) __D, (__mmask16)__B);
224 extern __inline __m512i
225 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
226 _mm512_maskz_shrdv_epi32 (__mmask16 __A, __m512i __B, __m512i __C, __m512i __D)
228 return (__m512i)__builtin_ia32_vpshrdv_v16si_maskz ((__v16si)__B,
229 (__v16si) __C, (__v16si) __D, (__mmask16)__A);
232 extern __inline __m512i
233 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
234 _mm512_shrdv_epi64 (__m512i __A, __m512i __B, __m512i __C)
236 return (__m512i) __builtin_ia32_vpshrdv_v8di ((__v8di)__A, (__v8di) __B,
237 (__v8di) __C);
240 extern __inline __m512i
241 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
242 _mm512_mask_shrdv_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D)
244 return (__m512i)__builtin_ia32_vpshrdv_v8di_mask ((__v8di)__A, (__v8di) __C,
245 (__v8di) __D, (__mmask8)__B);
248 extern __inline __m512i
249 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
250 _mm512_maskz_shrdv_epi64 (__mmask8 __A, __m512i __B, __m512i __C, __m512i __D)
252 return (__m512i)__builtin_ia32_vpshrdv_v8di_maskz ((__v8di)__B, (__v8di) __C,
253 (__v8di) __D, (__mmask8)__A);
255 extern __inline __m512i
256 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
257 _mm512_shldv_epi16 (__m512i __A, __m512i __B, __m512i __C)
259 return (__m512i) __builtin_ia32_vpshldv_v32hi ((__v32hi)__A, (__v32hi) __B,
260 (__v32hi) __C);
263 extern __inline __m512i
264 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
265 _mm512_shldv_epi32 (__m512i __A, __m512i __B, __m512i __C)
267 return (__m512i) __builtin_ia32_vpshldv_v16si ((__v16si)__A, (__v16si) __B,
268 (__v16si) __C);
271 extern __inline __m512i
272 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
273 _mm512_mask_shldv_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D)
275 return (__m512i)__builtin_ia32_vpshldv_v16si_mask ((__v16si)__A,
276 (__v16si) __C, (__v16si) __D, (__mmask16)__B);
279 extern __inline __m512i
280 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
281 _mm512_maskz_shldv_epi32 (__mmask16 __A, __m512i __B, __m512i __C, __m512i __D)
283 return (__m512i)__builtin_ia32_vpshldv_v16si_maskz ((__v16si)__B,
284 (__v16si) __C, (__v16si) __D, (__mmask16)__A);
287 extern __inline __m512i
288 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
289 _mm512_shldv_epi64 (__m512i __A, __m512i __B, __m512i __C)
291 return (__m512i) __builtin_ia32_vpshldv_v8di ((__v8di)__A, (__v8di) __B,
292 (__v8di) __C);
295 extern __inline __m512i
296 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
297 _mm512_mask_shldv_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D)
299 return (__m512i)__builtin_ia32_vpshldv_v8di_mask ((__v8di)__A, (__v8di) __C,
300 (__v8di) __D, (__mmask8)__B);
303 extern __inline __m512i
304 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
305 _mm512_maskz_shldv_epi64 (__mmask8 __A, __m512i __B, __m512i __C, __m512i __D)
307 return (__m512i)__builtin_ia32_vpshldv_v8di_maskz ((__v8di)__B, (__v8di) __C,
308 (__v8di) __D, (__mmask8)__A);
311 #ifdef __DISABLE_AVX512VBMI2__
312 #undef __DISABLE_AVX512VBMI2__
314 #pragma GCC pop_options
315 #endif /* __DISABLE_AVX512VBMI2__ */
317 #if !defined(__AVX512VBMI2__) || !defined(__AVX512BW__)
318 #pragma GCC push_options
319 #pragma GCC target("avx512vbmi2,avx512bw")
320 #define __DISABLE_AVX512VBMI2BW__
321 #endif /* __AVX512VBMI2BW__ */
323 extern __inline __m512i
324 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
325 _mm512_mask_compress_epi8 (__m512i __A, __mmask64 __B, __m512i __C)
327 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi)__C,
328 (__v64qi)__A, (__mmask64)__B);
332 extern __inline __m512i
333 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
334 _mm512_maskz_compress_epi8 (__mmask64 __A, __m512i __B)
336 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi)__B,
337 (__v64qi)_mm512_setzero_si512 (), (__mmask64)__A);
341 extern __inline void
342 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
343 _mm512_mask_compressstoreu_epi8 (void * __A, __mmask64 __B, __m512i __C)
345 __builtin_ia32_compressstoreuqi512_mask ((__v64qi *) __A, (__v64qi) __C,
346 (__mmask64) __B);
349 extern __inline __m512i
350 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
351 _mm512_mask_compress_epi16 (__m512i __A, __mmask32 __B, __m512i __C)
353 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi)__C,
354 (__v32hi)__A, (__mmask32)__B);
357 extern __inline __m512i
358 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
359 _mm512_maskz_compress_epi16 (__mmask32 __A, __m512i __B)
361 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi)__B,
362 (__v32hi)_mm512_setzero_si512 (), (__mmask32)__A);
365 extern __inline void
366 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
367 _mm512_mask_compressstoreu_epi16 (void * __A, __mmask32 __B, __m512i __C)
369 __builtin_ia32_compressstoreuhi512_mask ((__v32hi *) __A, (__v32hi) __C,
370 (__mmask32) __B);
373 extern __inline __m512i
374 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
375 _mm512_mask_expand_epi8 (__m512i __A, __mmask64 __B, __m512i __C)
377 return (__m512i) __builtin_ia32_expandqi512_mask ((__v64qi) __C,
378 (__v64qi) __A,
379 (__mmask64) __B);
382 extern __inline __m512i
383 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
384 _mm512_maskz_expand_epi8 (__mmask64 __A, __m512i __B)
386 return (__m512i) __builtin_ia32_expandqi512_maskz ((__v64qi) __B,
387 (__v64qi) _mm512_setzero_si512 (), (__mmask64) __A);
390 extern __inline __m512i
391 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
392 _mm512_mask_expandloadu_epi8 (__m512i __A, __mmask64 __B, const void * __C)
394 return (__m512i) __builtin_ia32_expandloadqi512_mask ((const __v64qi *) __C,
395 (__v64qi) __A, (__mmask64) __B);
398 extern __inline __m512i
399 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
400 _mm512_maskz_expandloadu_epi8 (__mmask64 __A, const void * __B)
402 return (__m512i) __builtin_ia32_expandloadqi512_maskz ((const __v64qi *) __B,
403 (__v64qi) _mm512_setzero_si512 (), (__mmask64) __A);
406 extern __inline __m512i
407 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
408 _mm512_mask_expand_epi16 (__m512i __A, __mmask32 __B, __m512i __C)
410 return (__m512i) __builtin_ia32_expandhi512_mask ((__v32hi) __C,
411 (__v32hi) __A,
412 (__mmask32) __B);
415 extern __inline __m512i
416 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
417 _mm512_maskz_expand_epi16 (__mmask32 __A, __m512i __B)
419 return (__m512i) __builtin_ia32_expandhi512_maskz ((__v32hi) __B,
420 (__v32hi) _mm512_setzero_si512 (), (__mmask32) __A);
423 extern __inline __m512i
424 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
425 _mm512_mask_expandloadu_epi16 (__m512i __A, __mmask32 __B, const void * __C)
427 return (__m512i) __builtin_ia32_expandloadhi512_mask ((const __v32hi *) __C,
428 (__v32hi) __A, (__mmask32) __B);
431 extern __inline __m512i
432 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
433 _mm512_maskz_expandloadu_epi16 (__mmask32 __A, const void * __B)
435 return (__m512i) __builtin_ia32_expandloadhi512_maskz ((const __v32hi *) __B,
436 (__v32hi) _mm512_setzero_si512 (), (__mmask32) __A);
439 #ifdef __OPTIMIZE__
440 extern __inline __m512i
441 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
442 _mm512_mask_shrdi_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D,
443 int __E)
445 return (__m512i)__builtin_ia32_vpshrd_v32hi_mask ((__v32hi)__C,
446 (__v32hi) __D, __E, (__v32hi) __A, (__mmask32)__B);
449 extern __inline __m512i
450 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
451 _mm512_maskz_shrdi_epi16 (__mmask32 __A, __m512i __B, __m512i __C, int __D)
453 return (__m512i)__builtin_ia32_vpshrd_v32hi_mask ((__v32hi)__B,
454 (__v32hi) __C, __D, (__v32hi) _mm512_setzero_si512 (), (__mmask32)__A);
457 extern __inline __m512i
458 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
459 _mm512_mask_shldi_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D,
460 int __E)
462 return (__m512i)__builtin_ia32_vpshld_v32hi_mask ((__v32hi)__C,
463 (__v32hi) __D, __E, (__v32hi) __A, (__mmask32)__B);
466 extern __inline __m512i
467 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
468 _mm512_maskz_shldi_epi16 (__mmask32 __A, __m512i __B, __m512i __C, int __D)
470 return (__m512i)__builtin_ia32_vpshld_v32hi_mask ((__v32hi)__B,
471 (__v32hi) __C, __D, (__v32hi) _mm512_setzero_si512 (), (__mmask32)__A);
474 #else
475 #define _mm512_mask_shrdi_epi16(A, B, C, D, E) \
476 ((__m512i) __builtin_ia32_vpshrd_v32hi_mask ((__v32hi)(__m512i)(C), \
477 (__v32hi)(__m512i)(D), (int)(E), (__v32hi)(__m512i)(A),(__mmask32)(B))
478 #define _mm512_maskz_shrdi_epi16(A, B, C, D) \
479 ((__m512i) __builtin_ia32_vpshrd_v32hi_mask ((__v32hi)(__m512i)(B), \
480 (__v32hi)(__m512i)(C),(int)(D), \
481 (__v32hi)(__m512i)_mm512_setzero_si512 (), (__mmask32)(A))
482 #define _mm512_mask_shldi_epi16(A, B, C, D, E) \
483 ((__m512i) __builtin_ia32_vpshld_v32hi_mask ((__v32hi)(__m512i)(C), \
484 (__v32hi)(__m512i)(D), (int)(E), (__v32hi)(__m512i)(A),(__mmask32)(B))
485 #define _mm512_maskz_shldi_epi16(A, B, C, D) \
486 ((__m512i) __builtin_ia32_vpshld_v32hi_mask ((__v32hi)(__m512i)(B), \
487 (__v32hi)(__m512i)(C),(int)(D), \
488 (__v32hi)(__m512i)_mm512_setzero_si512 (), (__mmask32)(A))
489 #endif
491 extern __inline __m512i
492 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
493 _mm512_mask_shrdv_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D)
495 return (__m512i)__builtin_ia32_vpshrdv_v32hi_mask ((__v32hi)__A,
496 (__v32hi) __C, (__v32hi) __D, (__mmask32)__B);
499 extern __inline __m512i
500 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
501 _mm512_maskz_shrdv_epi16 (__mmask32 __A, __m512i __B, __m512i __C, __m512i __D)
503 return (__m512i)__builtin_ia32_vpshrdv_v32hi_maskz ((__v32hi)__B,
504 (__v32hi) __C, (__v32hi) __D, (__mmask32)__A);
507 extern __inline __m512i
508 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
509 _mm512_mask_shldv_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D)
511 return (__m512i)__builtin_ia32_vpshldv_v32hi_mask ((__v32hi)__A,
512 (__v32hi) __C, (__v32hi) __D, (__mmask32)__B);
515 extern __inline __m512i
516 __attribute__((__gnu_inline__, __always_inline__, __artificial__))
517 _mm512_maskz_shldv_epi16 (__mmask32 __A, __m512i __B, __m512i __C, __m512i __D)
519 return (__m512i)__builtin_ia32_vpshldv_v32hi_maskz ((__v32hi)__B,
520 (__v32hi) __C, (__v32hi) __D, (__mmask32)__A);
523 #ifdef __DISABLE_AVX512VBMI2BW__
524 #undef __DISABLE_AVX512VBMI2BW__
526 #pragma GCC pop_options
527 #endif /* __DISABLE_AVX512VBMI2BW__ */
529 #endif /* __AVX512VBMI2INTRIN_H_INCLUDED */