[AArch64_be] Don't fold reduction intrinsics.
[official-gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
blob58db77e91c603e77a12838b7441081d9fbafa94e
1 /* Builtins' description for AArch64 SIMD architecture.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
3 Contributed by ARM Ltd.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "stringpool.h"
29 #include "calls.h"
30 #include "expr.h"
31 #include "tm_p.h"
32 #include "recog.h"
33 #include "langhooks.h"
34 #include "diagnostic-core.h"
35 #include "optabs.h"
36 #include "pointer-set.h"
37 #include "hash-table.h"
38 #include "vec.h"
39 #include "ggc.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
50 enum aarch64_simd_builtin_type_mode
52 T_V8QI,
53 T_V4HI,
54 T_V2SI,
55 T_V2SF,
56 T_V1DF,
57 T_DI,
58 T_DF,
59 T_V16QI,
60 T_V8HI,
61 T_V4SI,
62 T_V4SF,
63 T_V2DI,
64 T_V2DF,
65 T_TI,
66 T_EI,
67 T_OI,
68 T_XI,
69 T_SI,
70 T_SF,
71 T_HI,
72 T_QI,
73 T_MAX
76 #define v8qi_UP T_V8QI
77 #define v4hi_UP T_V4HI
78 #define v2si_UP T_V2SI
79 #define v2sf_UP T_V2SF
80 #define v1df_UP T_V1DF
81 #define di_UP T_DI
82 #define df_UP T_DF
83 #define v16qi_UP T_V16QI
84 #define v8hi_UP T_V8HI
85 #define v4si_UP T_V4SI
86 #define v4sf_UP T_V4SF
87 #define v2di_UP T_V2DI
88 #define v2df_UP T_V2DF
89 #define ti_UP T_TI
90 #define ei_UP T_EI
91 #define oi_UP T_OI
92 #define xi_UP T_XI
93 #define si_UP T_SI
94 #define sf_UP T_SF
95 #define hi_UP T_HI
96 #define qi_UP T_QI
98 #define UP(X) X##_UP
100 #define SIMD_MAX_BUILTIN_ARGS 5
102 enum aarch64_type_qualifiers
104 /* T foo. */
105 qualifier_none = 0x0,
106 /* unsigned T foo. */
107 qualifier_unsigned = 0x1, /* 1 << 0 */
108 /* const T foo. */
109 qualifier_const = 0x2, /* 1 << 1 */
110 /* T *foo. */
111 qualifier_pointer = 0x4, /* 1 << 2 */
112 /* const T *foo. */
113 qualifier_const_pointer = 0x6, /* qualifier_const | qualifier_pointer */
114 /* Used when expanding arguments if an operand could
115 be an immediate. */
116 qualifier_immediate = 0x8, /* 1 << 3 */
117 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
118 /* void foo (...). */
119 qualifier_void = 0x20, /* 1 << 5 */
120 /* Some patterns may have internal operands, this qualifier is an
121 instruction to the initialisation code to skip this operand. */
122 qualifier_internal = 0x40, /* 1 << 6 */
123 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
124 rather than using the type of the operand. */
125 qualifier_map_mode = 0x80, /* 1 << 7 */
126 /* qualifier_pointer | qualifier_map_mode */
127 qualifier_pointer_map_mode = 0x84,
128 /* qualifier_const_pointer | qualifier_map_mode */
129 qualifier_const_pointer_map_mode = 0x86,
130 /* Polynomial types. */
131 qualifier_poly = 0x100
134 typedef struct
136 const char *name;
137 enum aarch64_simd_builtin_type_mode mode;
138 const enum insn_code code;
139 unsigned int fcode;
140 enum aarch64_type_qualifiers *qualifiers;
141 } aarch64_simd_builtin_datum;
143 /* The qualifier_internal allows generation of a unary builtin from
144 a pattern with a third pseudo-operand such as a match_scratch. */
145 static enum aarch64_type_qualifiers
146 aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147 = { qualifier_none, qualifier_none, qualifier_internal };
148 #define TYPES_UNOP (aarch64_types_unop_qualifiers)
149 static enum aarch64_type_qualifiers
150 aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
151 = { qualifier_unsigned, qualifier_unsigned };
152 #define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
153 #define TYPES_CREATE (aarch64_types_unop_qualifiers)
154 #define TYPES_REINTERP_SS (aarch64_types_unop_qualifiers)
155 static enum aarch64_type_qualifiers
156 aarch64_types_unop_su_qualifiers[SIMD_MAX_BUILTIN_ARGS]
157 = { qualifier_none, qualifier_unsigned };
158 #define TYPES_REINTERP_SU (aarch64_types_unop_su_qualifiers)
159 static enum aarch64_type_qualifiers
160 aarch64_types_unop_sp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
161 = { qualifier_none, qualifier_poly };
162 #define TYPES_REINTERP_SP (aarch64_types_unop_sp_qualifiers)
163 static enum aarch64_type_qualifiers
164 aarch64_types_unop_us_qualifiers[SIMD_MAX_BUILTIN_ARGS]
165 = { qualifier_unsigned, qualifier_none };
166 #define TYPES_REINTERP_US (aarch64_types_unop_us_qualifiers)
167 static enum aarch64_type_qualifiers
168 aarch64_types_unop_ps_qualifiers[SIMD_MAX_BUILTIN_ARGS]
169 = { qualifier_poly, qualifier_none };
170 #define TYPES_REINTERP_PS (aarch64_types_unop_ps_qualifiers)
171 static enum aarch64_type_qualifiers
172 aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
173 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
174 #define TYPES_BINOP (aarch64_types_binop_qualifiers)
175 static enum aarch64_type_qualifiers
176 aarch64_types_binopv_qualifiers[SIMD_MAX_BUILTIN_ARGS]
177 = { qualifier_void, qualifier_none, qualifier_none };
178 #define TYPES_BINOPV (aarch64_types_binopv_qualifiers)
179 static enum aarch64_type_qualifiers
180 aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
181 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
182 #define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
183 static enum aarch64_type_qualifiers
184 aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
185 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
186 #define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
187 static enum aarch64_type_qualifiers
188 aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
189 = { qualifier_none, qualifier_none, qualifier_unsigned };
190 #define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
191 static enum aarch64_type_qualifiers
192 aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
193 = { qualifier_poly, qualifier_poly, qualifier_poly };
194 #define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
196 static enum aarch64_type_qualifiers
197 aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
198 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
199 #define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
200 static enum aarch64_type_qualifiers
201 aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
202 = { qualifier_unsigned, qualifier_unsigned,
203 qualifier_unsigned, qualifier_unsigned };
204 #define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
206 static enum aarch64_type_qualifiers
207 aarch64_types_quadop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
208 = { qualifier_none, qualifier_none, qualifier_none,
209 qualifier_none, qualifier_none };
210 #define TYPES_QUADOP (aarch64_types_quadop_qualifiers)
212 static enum aarch64_type_qualifiers
213 aarch64_types_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
214 = { qualifier_none, qualifier_none, qualifier_immediate };
215 #define TYPES_GETLANE (aarch64_types_getlane_qualifiers)
216 #define TYPES_SHIFTIMM (aarch64_types_getlane_qualifiers)
217 static enum aarch64_type_qualifiers
218 aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
219 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
220 #define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
221 static enum aarch64_type_qualifiers
222 aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
223 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
224 #define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
226 static enum aarch64_type_qualifiers
227 aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
228 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
229 #define TYPES_SETLANE (aarch64_types_setlane_qualifiers)
230 #define TYPES_SHIFTINSERT (aarch64_types_setlane_qualifiers)
231 #define TYPES_SHIFTACC (aarch64_types_setlane_qualifiers)
233 static enum aarch64_type_qualifiers
234 aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
235 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
236 qualifier_immediate };
237 #define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
240 static enum aarch64_type_qualifiers
241 aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
242 = { qualifier_none, qualifier_none, qualifier_none };
243 #define TYPES_COMBINE (aarch64_types_combine_qualifiers)
245 static enum aarch64_type_qualifiers
246 aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
247 = { qualifier_none, qualifier_const_pointer_map_mode };
248 #define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
249 #define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
251 static enum aarch64_type_qualifiers
252 aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
253 = { qualifier_poly, qualifier_unsigned,
254 qualifier_poly, qualifier_poly };
255 #define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
256 static enum aarch64_type_qualifiers
257 aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
258 = { qualifier_none, qualifier_unsigned,
259 qualifier_none, qualifier_none };
260 #define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
261 static enum aarch64_type_qualifiers
262 aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
263 = { qualifier_unsigned, qualifier_unsigned,
264 qualifier_unsigned, qualifier_unsigned };
265 #define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
267 /* The first argument (return type) of a store should be void type,
268 which we represent with qualifier_void. Their first operand will be
269 a DImode pointer to the location to store to, so we must use
270 qualifier_map_mode | qualifier_pointer to build a pointer to the
271 element type of the vector. */
272 static enum aarch64_type_qualifiers
273 aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
274 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
275 #define TYPES_STORE1 (aarch64_types_store1_qualifiers)
276 #define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
277 static enum aarch64_type_qualifiers
278 aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
279 = { qualifier_void, qualifier_pointer_map_mode,
280 qualifier_none, qualifier_none };
281 #define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
283 #define CF0(N, X) CODE_FOR_aarch64_##N##X
284 #define CF1(N, X) CODE_FOR_##N##X##1
285 #define CF2(N, X) CODE_FOR_##N##X##2
286 #define CF3(N, X) CODE_FOR_##N##X##3
287 #define CF4(N, X) CODE_FOR_##N##X##4
288 #define CF10(N, X) CODE_FOR_##N##X
290 #define VAR1(T, N, MAP, A) \
291 {#N, UP (A), CF##MAP (N, A), 0, TYPES_##T},
292 #define VAR2(T, N, MAP, A, B) \
293 VAR1 (T, N, MAP, A) \
294 VAR1 (T, N, MAP, B)
295 #define VAR3(T, N, MAP, A, B, C) \
296 VAR2 (T, N, MAP, A, B) \
297 VAR1 (T, N, MAP, C)
298 #define VAR4(T, N, MAP, A, B, C, D) \
299 VAR3 (T, N, MAP, A, B, C) \
300 VAR1 (T, N, MAP, D)
301 #define VAR5(T, N, MAP, A, B, C, D, E) \
302 VAR4 (T, N, MAP, A, B, C, D) \
303 VAR1 (T, N, MAP, E)
304 #define VAR6(T, N, MAP, A, B, C, D, E, F) \
305 VAR5 (T, N, MAP, A, B, C, D, E) \
306 VAR1 (T, N, MAP, F)
307 #define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
308 VAR6 (T, N, MAP, A, B, C, D, E, F) \
309 VAR1 (T, N, MAP, G)
310 #define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
311 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
312 VAR1 (T, N, MAP, H)
313 #define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
314 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
315 VAR1 (T, N, MAP, I)
316 #define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
317 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
318 VAR1 (T, N, MAP, J)
319 #define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
320 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
321 VAR1 (T, N, MAP, K)
322 #define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
323 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
324 VAR1 (T, N, MAP, L)
326 /* BUILTIN_<ITERATOR> macros should expand to cover the same range of
327 modes as is given for each define_mode_iterator in
328 config/aarch64/iterators.md. */
330 #define BUILTIN_DX(T, N, MAP) \
331 VAR2 (T, N, MAP, di, df)
332 #define BUILTIN_GPF(T, N, MAP) \
333 VAR2 (T, N, MAP, sf, df)
334 #define BUILTIN_SDQ_I(T, N, MAP) \
335 VAR4 (T, N, MAP, qi, hi, si, di)
336 #define BUILTIN_SD_HSI(T, N, MAP) \
337 VAR2 (T, N, MAP, hi, si)
338 #define BUILTIN_V2F(T, N, MAP) \
339 VAR2 (T, N, MAP, v2sf, v2df)
340 #define BUILTIN_VALL(T, N, MAP) \
341 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
342 v4si, v2di, v2sf, v4sf, v2df)
343 #define BUILTIN_VALLDI(T, N, MAP) \
344 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
345 v4si, v2di, v2sf, v4sf, v2df, di)
346 #define BUILTIN_VALLDIF(T, N, MAP) \
347 VAR12 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
348 v4si, v2di, v2sf, v4sf, v2df, di, df)
349 #define BUILTIN_VB(T, N, MAP) \
350 VAR2 (T, N, MAP, v8qi, v16qi)
351 #define BUILTIN_VD(T, N, MAP) \
352 VAR4 (T, N, MAP, v8qi, v4hi, v2si, v2sf)
353 #define BUILTIN_VD1(T, N, MAP) \
354 VAR5 (T, N, MAP, v8qi, v4hi, v2si, v2sf, v1df)
355 #define BUILTIN_VDC(T, N, MAP) \
356 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
357 #define BUILTIN_VDIC(T, N, MAP) \
358 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
359 #define BUILTIN_VDN(T, N, MAP) \
360 VAR3 (T, N, MAP, v4hi, v2si, di)
361 #define BUILTIN_VDQ(T, N, MAP) \
362 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
363 #define BUILTIN_VDQF(T, N, MAP) \
364 VAR3 (T, N, MAP, v2sf, v4sf, v2df)
365 #define BUILTIN_VDQF_DF(T, N, MAP) \
366 VAR4 (T, N, MAP, v2sf, v4sf, v2df, df)
367 #define BUILTIN_VDQH(T, N, MAP) \
368 VAR2 (T, N, MAP, v4hi, v8hi)
369 #define BUILTIN_VDQHS(T, N, MAP) \
370 VAR4 (T, N, MAP, v4hi, v8hi, v2si, v4si)
371 #define BUILTIN_VDQIF(T, N, MAP) \
372 VAR9 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
373 #define BUILTIN_VDQM(T, N, MAP) \
374 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
375 #define BUILTIN_VDQV(T, N, MAP) \
376 VAR5 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v4si)
377 #define BUILTIN_VDQQH(T, N, MAP) \
378 VAR4 (T, N, MAP, v8qi, v16qi, v4hi, v8hi)
379 #define BUILTIN_VDQ_BHSI(T, N, MAP) \
380 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
381 #define BUILTIN_VDQ_I(T, N, MAP) \
382 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
383 #define BUILTIN_VDW(T, N, MAP) \
384 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
385 #define BUILTIN_VD_BHSI(T, N, MAP) \
386 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
387 #define BUILTIN_VD_HSI(T, N, MAP) \
388 VAR2 (T, N, MAP, v4hi, v2si)
389 #define BUILTIN_VQ(T, N, MAP) \
390 VAR6 (T, N, MAP, v16qi, v8hi, v4si, v2di, v4sf, v2df)
391 #define BUILTIN_VQN(T, N, MAP) \
392 VAR3 (T, N, MAP, v8hi, v4si, v2di)
393 #define BUILTIN_VQW(T, N, MAP) \
394 VAR3 (T, N, MAP, v16qi, v8hi, v4si)
395 #define BUILTIN_VQ_HSI(T, N, MAP) \
396 VAR2 (T, N, MAP, v8hi, v4si)
397 #define BUILTIN_VQ_S(T, N, MAP) \
398 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
399 #define BUILTIN_VSDQ_HSI(T, N, MAP) \
400 VAR6 (T, N, MAP, v4hi, v8hi, v2si, v4si, hi, si)
401 #define BUILTIN_VSDQ_I(T, N, MAP) \
402 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
403 #define BUILTIN_VSDQ_I_BHSI(T, N, MAP) \
404 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
405 #define BUILTIN_VSDQ_I_DI(T, N, MAP) \
406 VAR8 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
407 #define BUILTIN_VSD_HSI(T, N, MAP) \
408 VAR4 (T, N, MAP, v4hi, v2si, hi, si)
409 #define BUILTIN_VSQN_HSDI(T, N, MAP) \
410 VAR6 (T, N, MAP, v8hi, v4si, v2di, hi, si, di)
411 #define BUILTIN_VSTRUCT(T, N, MAP) \
412 VAR3 (T, N, MAP, oi, ci, xi)
414 static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
415 #include "aarch64-simd-builtins.def"
418 /* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
419 #define AARCH64_CRC32_BUILTINS \
420 CRC32_BUILTIN (crc32b, QI) \
421 CRC32_BUILTIN (crc32h, HI) \
422 CRC32_BUILTIN (crc32w, SI) \
423 CRC32_BUILTIN (crc32x, DI) \
424 CRC32_BUILTIN (crc32cb, QI) \
425 CRC32_BUILTIN (crc32ch, HI) \
426 CRC32_BUILTIN (crc32cw, SI) \
427 CRC32_BUILTIN (crc32cx, DI)
429 typedef struct
431 const char *name;
432 enum machine_mode mode;
433 const enum insn_code icode;
434 unsigned int fcode;
435 } aarch64_crc_builtin_datum;
437 #define CRC32_BUILTIN(N, M) \
438 AARCH64_BUILTIN_##N,
440 #undef VAR1
441 #define VAR1(T, N, MAP, A) \
442 AARCH64_SIMD_BUILTIN_##T##_##N##A,
444 enum aarch64_builtins
446 AARCH64_BUILTIN_MIN,
448 AARCH64_BUILTIN_GET_FPCR,
449 AARCH64_BUILTIN_SET_FPCR,
450 AARCH64_BUILTIN_GET_FPSR,
451 AARCH64_BUILTIN_SET_FPSR,
453 AARCH64_SIMD_BUILTIN_BASE,
454 #include "aarch64-simd-builtins.def"
455 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
456 + ARRAY_SIZE (aarch64_simd_builtin_data),
457 AARCH64_CRC32_BUILTIN_BASE,
458 AARCH64_CRC32_BUILTINS
459 AARCH64_CRC32_BUILTIN_MAX,
460 AARCH64_BUILTIN_MAX
463 #undef CRC32_BUILTIN
464 #define CRC32_BUILTIN(N, M) \
465 {"__builtin_aarch64_"#N, M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
467 static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
468 AARCH64_CRC32_BUILTINS
471 #undef CRC32_BUILTIN
473 static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
475 #define NUM_DREG_TYPES 6
476 #define NUM_QREG_TYPES 6
478 /* Return a tree for a signed or unsigned argument of either
479 the mode specified by MODE, or the inner mode of MODE. */
480 tree
481 aarch64_build_scalar_type (enum machine_mode mode,
482 bool unsigned_p,
483 bool poly_p)
485 #undef INT_TYPES
486 #define INT_TYPES \
487 AARCH64_TYPE_BUILDER (QI) \
488 AARCH64_TYPE_BUILDER (HI) \
489 AARCH64_TYPE_BUILDER (SI) \
490 AARCH64_TYPE_BUILDER (DI) \
491 AARCH64_TYPE_BUILDER (EI) \
492 AARCH64_TYPE_BUILDER (OI) \
493 AARCH64_TYPE_BUILDER (CI) \
494 AARCH64_TYPE_BUILDER (XI) \
495 AARCH64_TYPE_BUILDER (TI) \
497 /* Statically declare all the possible types we might need. */
498 #undef AARCH64_TYPE_BUILDER
499 #define AARCH64_TYPE_BUILDER(X) \
500 static tree X##_aarch64_type_node_p = NULL; \
501 static tree X##_aarch64_type_node_s = NULL; \
502 static tree X##_aarch64_type_node_u = NULL;
504 INT_TYPES
506 static tree float_aarch64_type_node = NULL;
507 static tree double_aarch64_type_node = NULL;
509 gcc_assert (!VECTOR_MODE_P (mode));
511 /* If we've already initialised this type, don't initialise it again,
512 otherwise ask for a new type of the correct size. */
513 #undef AARCH64_TYPE_BUILDER
514 #define AARCH64_TYPE_BUILDER(X) \
515 case X##mode: \
516 if (unsigned_p) \
517 return (X##_aarch64_type_node_u \
518 ? X##_aarch64_type_node_u \
519 : X##_aarch64_type_node_u \
520 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
521 else if (poly_p) \
522 return (X##_aarch64_type_node_p \
523 ? X##_aarch64_type_node_p \
524 : X##_aarch64_type_node_p \
525 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
526 else \
527 return (X##_aarch64_type_node_s \
528 ? X##_aarch64_type_node_s \
529 : X##_aarch64_type_node_s \
530 = make_signed_type (GET_MODE_PRECISION (mode))); \
531 break;
533 switch (mode)
535 INT_TYPES
536 case SFmode:
537 if (!float_aarch64_type_node)
539 float_aarch64_type_node = make_node (REAL_TYPE);
540 TYPE_PRECISION (float_aarch64_type_node) = FLOAT_TYPE_SIZE;
541 layout_type (float_aarch64_type_node);
543 return float_aarch64_type_node;
544 break;
545 case DFmode:
546 if (!double_aarch64_type_node)
548 double_aarch64_type_node = make_node (REAL_TYPE);
549 TYPE_PRECISION (double_aarch64_type_node) = DOUBLE_TYPE_SIZE;
550 layout_type (double_aarch64_type_node);
552 return double_aarch64_type_node;
553 break;
554 default:
555 gcc_unreachable ();
559 tree
560 aarch64_build_vector_type (enum machine_mode mode,
561 bool unsigned_p,
562 bool poly_p)
564 tree eltype;
566 #define VECTOR_TYPES \
567 AARCH64_TYPE_BUILDER (V16QI) \
568 AARCH64_TYPE_BUILDER (V8HI) \
569 AARCH64_TYPE_BUILDER (V4SI) \
570 AARCH64_TYPE_BUILDER (V2DI) \
571 AARCH64_TYPE_BUILDER (V8QI) \
572 AARCH64_TYPE_BUILDER (V4HI) \
573 AARCH64_TYPE_BUILDER (V2SI) \
575 AARCH64_TYPE_BUILDER (V4SF) \
576 AARCH64_TYPE_BUILDER (V2DF) \
577 AARCH64_TYPE_BUILDER (V2SF) \
578 /* Declare our "cache" of values. */
579 #undef AARCH64_TYPE_BUILDER
580 #define AARCH64_TYPE_BUILDER(X) \
581 static tree X##_aarch64_type_node_s = NULL; \
582 static tree X##_aarch64_type_node_u = NULL; \
583 static tree X##_aarch64_type_node_p = NULL;
585 VECTOR_TYPES
587 gcc_assert (VECTOR_MODE_P (mode));
589 #undef AARCH64_TYPE_BUILDER
590 #define AARCH64_TYPE_BUILDER(X) \
591 case X##mode: \
592 if (unsigned_p) \
593 return X##_aarch64_type_node_u \
594 ? X##_aarch64_type_node_u \
595 : X##_aarch64_type_node_u \
596 = build_vector_type_for_mode (aarch64_build_scalar_type \
597 (GET_MODE_INNER (mode), \
598 unsigned_p, poly_p), mode); \
599 else if (poly_p) \
600 return X##_aarch64_type_node_p \
601 ? X##_aarch64_type_node_p \
602 : X##_aarch64_type_node_p \
603 = build_vector_type_for_mode (aarch64_build_scalar_type \
604 (GET_MODE_INNER (mode), \
605 unsigned_p, poly_p), mode); \
606 else \
607 return X##_aarch64_type_node_s \
608 ? X##_aarch64_type_node_s \
609 : X##_aarch64_type_node_s \
610 = build_vector_type_for_mode (aarch64_build_scalar_type \
611 (GET_MODE_INNER (mode), \
612 unsigned_p, poly_p), mode); \
613 break;
615 switch (mode)
617 default:
618 eltype = aarch64_build_scalar_type (GET_MODE_INNER (mode),
619 unsigned_p, poly_p);
620 return build_vector_type_for_mode (eltype, mode);
621 break;
622 VECTOR_TYPES
626 tree
627 aarch64_build_type (enum machine_mode mode, bool unsigned_p, bool poly_p)
629 if (VECTOR_MODE_P (mode))
630 return aarch64_build_vector_type (mode, unsigned_p, poly_p);
631 else
632 return aarch64_build_scalar_type (mode, unsigned_p, poly_p);
635 tree
636 aarch64_build_signed_type (enum machine_mode mode)
638 return aarch64_build_type (mode, false, false);
641 tree
642 aarch64_build_unsigned_type (enum machine_mode mode)
644 return aarch64_build_type (mode, true, false);
647 tree
648 aarch64_build_poly_type (enum machine_mode mode)
650 return aarch64_build_type (mode, false, true);
653 static void
654 aarch64_init_simd_builtins (void)
656 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
658 /* Signed scalar type nodes. */
659 tree aarch64_simd_intQI_type_node = aarch64_build_signed_type (QImode);
660 tree aarch64_simd_intHI_type_node = aarch64_build_signed_type (HImode);
661 tree aarch64_simd_intSI_type_node = aarch64_build_signed_type (SImode);
662 tree aarch64_simd_intDI_type_node = aarch64_build_signed_type (DImode);
663 tree aarch64_simd_intTI_type_node = aarch64_build_signed_type (TImode);
664 tree aarch64_simd_intEI_type_node = aarch64_build_signed_type (EImode);
665 tree aarch64_simd_intOI_type_node = aarch64_build_signed_type (OImode);
666 tree aarch64_simd_intCI_type_node = aarch64_build_signed_type (CImode);
667 tree aarch64_simd_intXI_type_node = aarch64_build_signed_type (XImode);
669 /* Unsigned scalar type nodes. */
670 tree aarch64_simd_intUQI_type_node = aarch64_build_unsigned_type (QImode);
671 tree aarch64_simd_intUHI_type_node = aarch64_build_unsigned_type (HImode);
672 tree aarch64_simd_intUSI_type_node = aarch64_build_unsigned_type (SImode);
673 tree aarch64_simd_intUDI_type_node = aarch64_build_unsigned_type (DImode);
675 /* Poly scalar type nodes. */
676 tree aarch64_simd_polyQI_type_node = aarch64_build_poly_type (QImode);
677 tree aarch64_simd_polyHI_type_node = aarch64_build_poly_type (HImode);
678 tree aarch64_simd_polyDI_type_node = aarch64_build_poly_type (DImode);
679 tree aarch64_simd_polyTI_type_node = aarch64_build_poly_type (TImode);
681 /* Float type nodes. */
682 tree aarch64_simd_float_type_node = aarch64_build_signed_type (SFmode);
683 tree aarch64_simd_double_type_node = aarch64_build_signed_type (DFmode);
685 /* Define typedefs which exactly correspond to the modes we are basing vector
686 types on. If you change these names you'll need to change
687 the table used by aarch64_mangle_type too. */
688 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intQI_type_node,
689 "__builtin_aarch64_simd_qi");
690 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intHI_type_node,
691 "__builtin_aarch64_simd_hi");
692 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intSI_type_node,
693 "__builtin_aarch64_simd_si");
694 (*lang_hooks.types.register_builtin_type) (aarch64_simd_float_type_node,
695 "__builtin_aarch64_simd_sf");
696 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intDI_type_node,
697 "__builtin_aarch64_simd_di");
698 (*lang_hooks.types.register_builtin_type) (aarch64_simd_double_type_node,
699 "__builtin_aarch64_simd_df");
700 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyQI_type_node,
701 "__builtin_aarch64_simd_poly8");
702 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyHI_type_node,
703 "__builtin_aarch64_simd_poly16");
704 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyDI_type_node,
705 "__builtin_aarch64_simd_poly64");
706 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyTI_type_node,
707 "__builtin_aarch64_simd_poly128");
708 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intTI_type_node,
709 "__builtin_aarch64_simd_ti");
710 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intEI_type_node,
711 "__builtin_aarch64_simd_ei");
712 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intOI_type_node,
713 "__builtin_aarch64_simd_oi");
714 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intCI_type_node,
715 "__builtin_aarch64_simd_ci");
716 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intXI_type_node,
717 "__builtin_aarch64_simd_xi");
719 /* Unsigned integer types for various mode sizes. */
720 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUQI_type_node,
721 "__builtin_aarch64_simd_uqi");
722 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUHI_type_node,
723 "__builtin_aarch64_simd_uhi");
724 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUSI_type_node,
725 "__builtin_aarch64_simd_usi");
726 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUDI_type_node,
727 "__builtin_aarch64_simd_udi");
729 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
731 bool print_type_signature_p = false;
732 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
733 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
734 const char *const modenames[] =
736 "v8qi", "v4hi", "v2si", "v2sf", "v1df", "di", "df",
737 "v16qi", "v8hi", "v4si", "v4sf", "v2di", "v2df",
738 "ti", "ei", "oi", "xi", "si", "sf", "hi", "qi"
740 const enum machine_mode modes[] =
742 V8QImode, V4HImode, V2SImode, V2SFmode, V1DFmode, DImode, DFmode,
743 V16QImode, V8HImode, V4SImode, V4SFmode, V2DImode,
744 V2DFmode, TImode, EImode, OImode, XImode, SImode,
745 SFmode, HImode, QImode
747 char namebuf[60];
748 tree ftype = NULL;
749 tree fndecl = NULL;
751 gcc_assert (ARRAY_SIZE (modenames) == T_MAX);
753 d->fcode = fcode;
755 /* We must track two variables here. op_num is
756 the operand number as in the RTL pattern. This is
757 required to access the mode (e.g. V4SF mode) of the
758 argument, from which the base type can be derived.
759 arg_num is an index in to the qualifiers data, which
760 gives qualifiers to the type (e.g. const unsigned).
761 The reason these two variables may differ by one is the
762 void return type. While all return types take the 0th entry
763 in the qualifiers array, there is no operand for them in the
764 RTL pattern. */
765 int op_num = insn_data[d->code].n_operands - 1;
766 int arg_num = d->qualifiers[0] & qualifier_void
767 ? op_num + 1
768 : op_num;
769 tree return_type = void_type_node, args = void_list_node;
770 tree eltype;
772 /* Build a function type directly from the insn_data for this
773 builtin. The build_function_type () function takes care of
774 removing duplicates for us. */
775 for (; op_num >= 0; arg_num--, op_num--)
777 enum machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
778 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
780 if (qualifiers & qualifier_unsigned)
782 type_signature[arg_num] = 'u';
783 print_type_signature_p = true;
785 else if (qualifiers & qualifier_poly)
787 type_signature[arg_num] = 'p';
788 print_type_signature_p = true;
790 else
791 type_signature[arg_num] = 's';
793 /* Skip an internal operand for vget_{low, high}. */
794 if (qualifiers & qualifier_internal)
795 continue;
797 /* Some builtins have different user-facing types
798 for certain arguments, encoded in d->mode. */
799 if (qualifiers & qualifier_map_mode)
800 op_mode = modes[d->mode];
802 /* For pointers, we want a pointer to the basic type
803 of the vector. */
804 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
805 op_mode = GET_MODE_INNER (op_mode);
807 eltype = aarch64_build_type (op_mode,
808 qualifiers & qualifier_unsigned,
809 qualifiers & qualifier_poly);
811 /* Add qualifiers. */
812 if (qualifiers & qualifier_const)
813 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
815 if (qualifiers & qualifier_pointer)
816 eltype = build_pointer_type (eltype);
818 /* If we have reached arg_num == 0, we are at a non-void
819 return type. Otherwise, we are still processing
820 arguments. */
821 if (arg_num == 0)
822 return_type = eltype;
823 else
824 args = tree_cons (NULL_TREE, eltype, args);
827 ftype = build_function_type (return_type, args);
829 gcc_assert (ftype != NULL);
831 if (print_type_signature_p)
832 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s_%s",
833 d->name, modenames[d->mode], type_signature);
834 else
835 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s",
836 d->name, modenames[d->mode]);
838 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
839 NULL, NULL_TREE);
840 aarch64_builtin_decls[fcode] = fndecl;
844 static void
845 aarch64_init_crc32_builtins ()
847 tree usi_type = aarch64_build_unsigned_type (SImode);
848 unsigned int i = 0;
850 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
852 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
853 tree argtype = aarch64_build_unsigned_type (d->mode);
854 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
855 tree fndecl = add_builtin_function (d->name, ftype, d->fcode,
856 BUILT_IN_MD, NULL, NULL_TREE);
858 aarch64_builtin_decls[d->fcode] = fndecl;
862 void
863 aarch64_init_builtins (void)
865 tree ftype_set_fpr
866 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
867 tree ftype_get_fpr
868 = build_function_type_list (unsigned_type_node, NULL);
870 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
871 = add_builtin_function ("__builtin_aarch64_get_fpcr", ftype_get_fpr,
872 AARCH64_BUILTIN_GET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
873 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
874 = add_builtin_function ("__builtin_aarch64_set_fpcr", ftype_set_fpr,
875 AARCH64_BUILTIN_SET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
876 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
877 = add_builtin_function ("__builtin_aarch64_get_fpsr", ftype_get_fpr,
878 AARCH64_BUILTIN_GET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
879 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
880 = add_builtin_function ("__builtin_aarch64_set_fpsr", ftype_set_fpr,
881 AARCH64_BUILTIN_SET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
883 if (TARGET_SIMD)
884 aarch64_init_simd_builtins ();
885 if (TARGET_CRC32)
886 aarch64_init_crc32_builtins ();
889 tree
890 aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
892 if (code >= AARCH64_BUILTIN_MAX)
893 return error_mark_node;
895 return aarch64_builtin_decls[code];
898 typedef enum
900 SIMD_ARG_COPY_TO_REG,
901 SIMD_ARG_CONSTANT,
902 SIMD_ARG_STOP
903 } builtin_simd_arg;
905 static rtx
906 aarch64_simd_expand_args (rtx target, int icode, int have_retval,
907 tree exp, ...)
909 va_list ap;
910 rtx pat;
911 tree arg[SIMD_MAX_BUILTIN_ARGS];
912 rtx op[SIMD_MAX_BUILTIN_ARGS];
913 enum machine_mode tmode = insn_data[icode].operand[0].mode;
914 enum machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
915 int argc = 0;
917 if (have_retval
918 && (!target
919 || GET_MODE (target) != tmode
920 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
921 target = gen_reg_rtx (tmode);
923 va_start (ap, exp);
925 for (;;)
927 builtin_simd_arg thisarg = (builtin_simd_arg) va_arg (ap, int);
929 if (thisarg == SIMD_ARG_STOP)
930 break;
931 else
933 arg[argc] = CALL_EXPR_ARG (exp, argc);
934 op[argc] = expand_normal (arg[argc]);
935 mode[argc] = insn_data[icode].operand[argc + have_retval].mode;
937 switch (thisarg)
939 case SIMD_ARG_COPY_TO_REG:
940 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
941 op[argc] = convert_memory_address (Pmode, op[argc]);
942 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
943 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
944 (op[argc], mode[argc]))
945 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
946 break;
948 case SIMD_ARG_CONSTANT:
949 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
950 (op[argc], mode[argc]))
951 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
952 "expected %<const int%>", argc + 1);
953 break;
955 case SIMD_ARG_STOP:
956 gcc_unreachable ();
959 argc++;
963 va_end (ap);
965 if (have_retval)
966 switch (argc)
968 case 1:
969 pat = GEN_FCN (icode) (target, op[0]);
970 break;
972 case 2:
973 pat = GEN_FCN (icode) (target, op[0], op[1]);
974 break;
976 case 3:
977 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
978 break;
980 case 4:
981 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
982 break;
984 case 5:
985 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
986 break;
988 default:
989 gcc_unreachable ();
991 else
992 switch (argc)
994 case 1:
995 pat = GEN_FCN (icode) (op[0]);
996 break;
998 case 2:
999 pat = GEN_FCN (icode) (op[0], op[1]);
1000 break;
1002 case 3:
1003 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1004 break;
1006 case 4:
1007 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1008 break;
1010 case 5:
1011 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1012 break;
1014 default:
1015 gcc_unreachable ();
1018 if (!pat)
1019 return 0;
1021 emit_insn (pat);
1023 return target;
1026 /* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1028 aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1030 aarch64_simd_builtin_datum *d =
1031 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
1032 enum insn_code icode = d->code;
1033 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
1034 int num_args = insn_data[d->code].n_operands;
1035 int is_void = 0;
1036 int k;
1038 is_void = !!(d->qualifiers[0] & qualifier_void);
1040 num_args += is_void;
1042 for (k = 1; k < num_args; k++)
1044 /* We have four arrays of data, each indexed in a different fashion.
1045 qualifiers - element 0 always describes the function return type.
1046 operands - element 0 is either the operand for return value (if
1047 the function has a non-void return type) or the operand for the
1048 first argument.
1049 expr_args - element 0 always holds the first argument.
1050 args - element 0 is always used for the return type. */
1051 int qualifiers_k = k;
1052 int operands_k = k - is_void;
1053 int expr_args_k = k - 1;
1055 if (d->qualifiers[qualifiers_k] & qualifier_immediate)
1056 args[k] = SIMD_ARG_CONSTANT;
1057 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1059 rtx arg
1060 = expand_normal (CALL_EXPR_ARG (exp,
1061 (expr_args_k)));
1062 /* Handle constants only if the predicate allows it. */
1063 bool op_const_int_p =
1064 (CONST_INT_P (arg)
1065 && (*insn_data[icode].operand[operands_k].predicate)
1066 (arg, insn_data[icode].operand[operands_k].mode));
1067 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1069 else
1070 args[k] = SIMD_ARG_COPY_TO_REG;
1073 args[k] = SIMD_ARG_STOP;
1075 /* The interface to aarch64_simd_expand_args expects a 0 if
1076 the function is void, and a 1 if it is not. */
1077 return aarch64_simd_expand_args
1078 (target, icode, !is_void, exp,
1079 args[1],
1080 args[2],
1081 args[3],
1082 args[4],
1083 SIMD_ARG_STOP);
1087 aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1089 rtx pat;
1090 aarch64_crc_builtin_datum *d
1091 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1092 enum insn_code icode = d->icode;
1093 tree arg0 = CALL_EXPR_ARG (exp, 0);
1094 tree arg1 = CALL_EXPR_ARG (exp, 1);
1095 rtx op0 = expand_normal (arg0);
1096 rtx op1 = expand_normal (arg1);
1097 enum machine_mode tmode = insn_data[icode].operand[0].mode;
1098 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
1099 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1101 if (! target
1102 || GET_MODE (target) != tmode
1103 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1104 target = gen_reg_rtx (tmode);
1106 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1107 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1109 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1110 op0 = copy_to_mode_reg (mode0, op0);
1111 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1112 op1 = copy_to_mode_reg (mode1, op1);
1114 pat = GEN_FCN (icode) (target, op0, op1);
1115 if (! pat)
1116 return 0;
1117 emit_insn (pat);
1118 return target;
1121 /* Expand an expression EXP that calls a built-in function,
1122 with result going to TARGET if that's convenient. */
1124 aarch64_expand_builtin (tree exp,
1125 rtx target,
1126 rtx subtarget ATTRIBUTE_UNUSED,
1127 enum machine_mode mode ATTRIBUTE_UNUSED,
1128 int ignore ATTRIBUTE_UNUSED)
1130 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1131 int fcode = DECL_FUNCTION_CODE (fndecl);
1132 int icode;
1133 rtx pat, op0;
1134 tree arg0;
1136 switch (fcode)
1138 case AARCH64_BUILTIN_GET_FPCR:
1139 case AARCH64_BUILTIN_SET_FPCR:
1140 case AARCH64_BUILTIN_GET_FPSR:
1141 case AARCH64_BUILTIN_SET_FPSR:
1142 if ((fcode == AARCH64_BUILTIN_GET_FPCR)
1143 || (fcode == AARCH64_BUILTIN_GET_FPSR))
1145 icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
1146 CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
1147 target = gen_reg_rtx (SImode);
1148 pat = GEN_FCN (icode) (target);
1150 else
1152 target = NULL_RTX;
1153 icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
1154 CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
1155 arg0 = CALL_EXPR_ARG (exp, 0);
1156 op0 = expand_normal (arg0);
1157 pat = GEN_FCN (icode) (op0);
1159 emit_insn (pat);
1160 return target;
1163 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
1164 return aarch64_simd_expand_builtin (fcode, exp, target);
1165 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
1166 return aarch64_crc32_expand_builtin (fcode, exp, target);
1168 return NULL_RTX;
1171 tree
1172 aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1174 enum machine_mode in_mode, out_mode;
1175 int in_n, out_n;
1177 if (TREE_CODE (type_out) != VECTOR_TYPE
1178 || TREE_CODE (type_in) != VECTOR_TYPE)
1179 return NULL_TREE;
1181 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1182 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1183 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1184 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1186 #undef AARCH64_CHECK_BUILTIN_MODE
1187 #define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1188 #define AARCH64_FIND_FRINT_VARIANT(N) \
1189 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
1190 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
1191 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
1192 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
1193 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
1194 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
1195 : NULL_TREE)))
1196 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1198 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1199 switch (fn)
1201 #undef AARCH64_CHECK_BUILTIN_MODE
1202 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1203 (out_mode == N##Fmode && out_n == C \
1204 && in_mode == N##Fmode && in_n == C)
1205 case BUILT_IN_FLOOR:
1206 case BUILT_IN_FLOORF:
1207 return AARCH64_FIND_FRINT_VARIANT (floor);
1208 case BUILT_IN_CEIL:
1209 case BUILT_IN_CEILF:
1210 return AARCH64_FIND_FRINT_VARIANT (ceil);
1211 case BUILT_IN_TRUNC:
1212 case BUILT_IN_TRUNCF:
1213 return AARCH64_FIND_FRINT_VARIANT (btrunc);
1214 case BUILT_IN_ROUND:
1215 case BUILT_IN_ROUNDF:
1216 return AARCH64_FIND_FRINT_VARIANT (round);
1217 case BUILT_IN_NEARBYINT:
1218 case BUILT_IN_NEARBYINTF:
1219 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
1220 case BUILT_IN_SQRT:
1221 case BUILT_IN_SQRTF:
1222 return AARCH64_FIND_FRINT_VARIANT (sqrt);
1223 #undef AARCH64_CHECK_BUILTIN_MODE
1224 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1225 (out_mode == SImode && out_n == C \
1226 && in_mode == N##Imode && in_n == C)
1227 case BUILT_IN_CLZ:
1229 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1230 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
1231 return NULL_TREE;
1233 #undef AARCH64_CHECK_BUILTIN_MODE
1234 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1235 (out_mode == N##Imode && out_n == C \
1236 && in_mode == N##Fmode && in_n == C)
1237 case BUILT_IN_LFLOOR:
1238 case BUILT_IN_LFLOORF:
1239 case BUILT_IN_LLFLOOR:
1240 case BUILT_IN_IFLOORF:
1242 enum aarch64_builtins builtin;
1243 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1244 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
1245 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1246 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
1247 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1248 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1249 else
1250 return NULL_TREE;
1252 return aarch64_builtin_decls[builtin];
1254 case BUILT_IN_LCEIL:
1255 case BUILT_IN_LCEILF:
1256 case BUILT_IN_LLCEIL:
1257 case BUILT_IN_ICEILF:
1259 enum aarch64_builtins builtin;
1260 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1261 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
1262 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1263 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
1264 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1265 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1266 else
1267 return NULL_TREE;
1269 return aarch64_builtin_decls[builtin];
1271 case BUILT_IN_LROUND:
1272 case BUILT_IN_IROUNDF:
1274 enum aarch64_builtins builtin;
1275 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1276 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
1277 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1278 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
1279 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1280 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1281 else
1282 return NULL_TREE;
1284 return aarch64_builtin_decls[builtin];
1286 case BUILT_IN_BSWAP16:
1287 #undef AARCH64_CHECK_BUILTIN_MODE
1288 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1289 (out_mode == N##Imode && out_n == C \
1290 && in_mode == N##Imode && in_n == C)
1291 if (AARCH64_CHECK_BUILTIN_MODE (4, H))
1292 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
1293 else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
1294 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
1295 else
1296 return NULL_TREE;
1297 case BUILT_IN_BSWAP32:
1298 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1299 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
1300 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1301 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
1302 else
1303 return NULL_TREE;
1304 case BUILT_IN_BSWAP64:
1305 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1306 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
1307 else
1308 return NULL_TREE;
1309 default:
1310 return NULL_TREE;
1314 return NULL_TREE;
1317 #undef VAR1
1318 #define VAR1(T, N, MAP, A) \
1319 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
1321 tree
1322 aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1323 bool ignore ATTRIBUTE_UNUSED)
1325 int fcode = DECL_FUNCTION_CODE (fndecl);
1326 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1328 switch (fcode)
1330 BUILTIN_VALLDI (UNOP, abs, 2)
1331 return fold_build1 (ABS_EXPR, type, args[0]);
1332 break;
1333 BUILTIN_VALLDI (BINOP, cmge, 0)
1334 return fold_build2 (GE_EXPR, type, args[0], args[1]);
1335 break;
1336 BUILTIN_VALLDI (BINOP, cmgt, 0)
1337 return fold_build2 (GT_EXPR, type, args[0], args[1]);
1338 break;
1339 BUILTIN_VALLDI (BINOP, cmeq, 0)
1340 return fold_build2 (EQ_EXPR, type, args[0], args[1]);
1341 break;
1342 BUILTIN_VSDQ_I_DI (BINOP, cmtst, 0)
1344 tree and_node = fold_build2 (BIT_AND_EXPR, type, args[0], args[1]);
1345 tree vec_zero_node = build_zero_cst (type);
1346 return fold_build2 (NE_EXPR, type, and_node, vec_zero_node);
1347 break;
1349 VAR1 (REINTERP_SS, reinterpretdi, 0, v1df)
1350 VAR1 (REINTERP_SS, reinterpretv8qi, 0, v1df)
1351 VAR1 (REINTERP_SS, reinterpretv4hi, 0, v1df)
1352 VAR1 (REINTERP_SS, reinterpretv2si, 0, v1df)
1353 VAR1 (REINTERP_SS, reinterpretv2sf, 0, v1df)
1354 BUILTIN_VD (REINTERP_SS, reinterpretv1df, 0)
1355 BUILTIN_VD (REINTERP_SU, reinterpretv1df, 0)
1356 VAR1 (REINTERP_US, reinterpretdi, 0, v1df)
1357 VAR1 (REINTERP_US, reinterpretv8qi, 0, v1df)
1358 VAR1 (REINTERP_US, reinterpretv4hi, 0, v1df)
1359 VAR1 (REINTERP_US, reinterpretv2si, 0, v1df)
1360 VAR1 (REINTERP_US, reinterpretv2sf, 0, v1df)
1361 BUILTIN_VD (REINTERP_SP, reinterpretv1df, 0)
1362 VAR1 (REINTERP_PS, reinterpretdi, 0, v1df)
1363 VAR1 (REINTERP_PS, reinterpretv8qi, 0, v1df)
1364 VAR1 (REINTERP_PS, reinterpretv4hi, 0, v1df)
1365 VAR1 (REINTERP_PS, reinterpretv2sf, 0, v1df)
1366 return fold_build1 (VIEW_CONVERT_EXPR, type, args[0]);
1367 VAR1 (UNOP, floatv2si, 2, v2sf)
1368 VAR1 (UNOP, floatv4si, 2, v4sf)
1369 VAR1 (UNOP, floatv2di, 2, v2df)
1370 return fold_build1 (FLOAT_EXPR, type, args[0]);
1371 default:
1372 break;
1375 return NULL_TREE;
1378 bool
1379 aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1381 bool changed = false;
1382 gimple stmt = gsi_stmt (*gsi);
1383 tree call = gimple_call_fn (stmt);
1384 tree fndecl;
1385 gimple new_stmt = NULL;
1387 /* The operations folded below are reduction operations. These are
1388 defined to leave their result in the 0'th element (from the perspective
1389 of GCC). The architectural instruction we are folding will leave the
1390 result in the 0'th element (from the perspective of the architecture).
1391 For big-endian systems, these perspectives are not aligned.
1393 It is therefore wrong to perform this fold on big-endian. There
1394 are some tricks we could play with shuffling, but the mid-end is
1395 inconsistent in the way it treats reduction operations, so we will
1396 end up in difficulty. Until we fix the ambiguity - just bail out. */
1397 if (BYTES_BIG_ENDIAN)
1398 return false;
1400 if (call)
1402 fndecl = gimple_call_fndecl (stmt);
1403 if (fndecl)
1405 int fcode = DECL_FUNCTION_CODE (fndecl);
1406 int nargs = gimple_call_num_args (stmt);
1407 tree *args = (nargs > 0
1408 ? gimple_call_arg_ptr (stmt, 0)
1409 : &error_mark_node);
1411 switch (fcode)
1413 BUILTIN_VALL (UNOP, reduc_splus_, 10)
1414 new_stmt = gimple_build_assign_with_ops (
1415 REDUC_PLUS_EXPR,
1416 gimple_call_lhs (stmt),
1417 args[0],
1418 NULL_TREE);
1419 break;
1420 BUILTIN_VDQIF (UNOP, reduc_smax_, 10)
1421 new_stmt = gimple_build_assign_with_ops (
1422 REDUC_MAX_EXPR,
1423 gimple_call_lhs (stmt),
1424 args[0],
1425 NULL_TREE);
1426 break;
1427 BUILTIN_VDQIF (UNOP, reduc_smin_, 10)
1428 new_stmt = gimple_build_assign_with_ops (
1429 REDUC_MIN_EXPR,
1430 gimple_call_lhs (stmt),
1431 args[0],
1432 NULL_TREE);
1433 break;
1435 default:
1436 break;
1441 if (new_stmt)
1443 gsi_replace (gsi, new_stmt, true);
1444 changed = true;
1447 return changed;
1450 void
1451 aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
1453 const unsigned AARCH64_FE_INVALID = 1;
1454 const unsigned AARCH64_FE_DIVBYZERO = 2;
1455 const unsigned AARCH64_FE_OVERFLOW = 4;
1456 const unsigned AARCH64_FE_UNDERFLOW = 8;
1457 const unsigned AARCH64_FE_INEXACT = 16;
1458 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
1459 | AARCH64_FE_DIVBYZERO
1460 | AARCH64_FE_OVERFLOW
1461 | AARCH64_FE_UNDERFLOW
1462 | AARCH64_FE_INEXACT);
1463 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
1464 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
1465 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
1466 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
1467 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
1469 /* Generate the equivalence of :
1470 unsigned int fenv_cr;
1471 fenv_cr = __builtin_aarch64_get_fpcr ();
1473 unsigned int fenv_sr;
1474 fenv_sr = __builtin_aarch64_get_fpsr ();
1476 Now set all exceptions to non-stop
1477 unsigned int mask_cr
1478 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
1479 unsigned int masked_cr;
1480 masked_cr = fenv_cr & mask_cr;
1482 And clear all exception flags
1483 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
1484 unsigned int masked_cr;
1485 masked_sr = fenv_sr & mask_sr;
1487 __builtin_aarch64_set_cr (masked_cr);
1488 __builtin_aarch64_set_sr (masked_sr); */
1490 fenv_cr = create_tmp_var (unsigned_type_node, NULL);
1491 fenv_sr = create_tmp_var (unsigned_type_node, NULL);
1493 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
1494 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
1495 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
1496 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
1498 mask_cr = build_int_cst (unsigned_type_node,
1499 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
1500 mask_sr = build_int_cst (unsigned_type_node,
1501 ~(AARCH64_FE_ALL_EXCEPT));
1503 ld_fenv_cr = build2 (MODIFY_EXPR, unsigned_type_node,
1504 fenv_cr, build_call_expr (get_fpcr, 0));
1505 ld_fenv_sr = build2 (MODIFY_EXPR, unsigned_type_node,
1506 fenv_sr, build_call_expr (get_fpsr, 0));
1508 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
1509 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
1511 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
1512 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1514 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
1515 hold_fnclex_sr);
1516 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
1517 masked_fenv_sr);
1518 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
1520 *hold = build2 (COMPOUND_EXPR, void_type_node,
1521 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
1522 hold_fnclex);
1524 /* Store the value of masked_fenv to clear the exceptions:
1525 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
1527 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1529 /* Generate the equivalent of :
1530 unsigned int new_fenv_var;
1531 new_fenv_var = __builtin_aarch64_get_fpsr ();
1533 __builtin_aarch64_set_fpsr (fenv_sr);
1535 __atomic_feraiseexcept (new_fenv_var); */
1537 new_fenv_var = create_tmp_var (unsigned_type_node, NULL);
1538 reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
1539 new_fenv_var, build_call_expr (get_fpsr, 0));
1540 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
1541 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
1542 update_call = build_call_expr (atomic_feraiseexcept, 1,
1543 fold_convert (integer_type_node, new_fenv_var));
1544 *update = build2 (COMPOUND_EXPR, void_type_node,
1545 build2 (COMPOUND_EXPR, void_type_node,
1546 reload_fenv, restore_fnenv), update_call);
1550 #undef AARCH64_CHECK_BUILTIN_MODE
1551 #undef AARCH64_FIND_FRINT_VARIANT
1552 #undef BUILTIN_DX
1553 #undef BUILTIN_SDQ_I
1554 #undef BUILTIN_SD_HSI
1555 #undef BUILTIN_V2F
1556 #undef BUILTIN_VALL
1557 #undef BUILTIN_VB
1558 #undef BUILTIN_VD
1559 #undef BUILTIN_VD1
1560 #undef BUILTIN_VDC
1561 #undef BUILTIN_VDIC
1562 #undef BUILTIN_VDN
1563 #undef BUILTIN_VDQ
1564 #undef BUILTIN_VDQF
1565 #undef BUILTIN_VDQH
1566 #undef BUILTIN_VDQHS
1567 #undef BUILTIN_VDQIF
1568 #undef BUILTIN_VDQM
1569 #undef BUILTIN_VDQV
1570 #undef BUILTIN_VDQ_BHSI
1571 #undef BUILTIN_VDQ_I
1572 #undef BUILTIN_VDW
1573 #undef BUILTIN_VD_BHSI
1574 #undef BUILTIN_VD_HSI
1575 #undef BUILTIN_VQ
1576 #undef BUILTIN_VQN
1577 #undef BUILTIN_VQW
1578 #undef BUILTIN_VQ_HSI
1579 #undef BUILTIN_VQ_S
1580 #undef BUILTIN_VSDQ_HSI
1581 #undef BUILTIN_VSDQ_I
1582 #undef BUILTIN_VSDQ_I_BHSI
1583 #undef BUILTIN_VSDQ_I_DI
1584 #undef BUILTIN_VSD_HSI
1585 #undef BUILTIN_VSQN_HSDI
1586 #undef BUILTIN_VSTRUCT
1587 #undef CF0
1588 #undef CF1
1589 #undef CF2
1590 #undef CF3
1591 #undef CF4
1592 #undef CF10
1593 #undef VAR1
1594 #undef VAR2
1595 #undef VAR3
1596 #undef VAR4
1597 #undef VAR5
1598 #undef VAR6
1599 #undef VAR7
1600 #undef VAR8
1601 #undef VAR9
1602 #undef VAR10
1603 #undef VAR11