1 /* Builtins' description for AArch64 SIMD architecture.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by ARM Ltd.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "langhooks.h"
31 #include "diagnostic-core.h"
35 enum aarch64_simd_builtin_type_mode
60 #define v8qi_UP T_V8QI
61 #define v4hi_UP T_V4HI
62 #define v2si_UP T_V2SI
63 #define v2sf_UP T_V2SF
66 #define v16qi_UP T_V16QI
67 #define v8hi_UP T_V8HI
68 #define v4si_UP T_V4SI
69 #define v4sf_UP T_V4SF
70 #define v2di_UP T_V2DI
71 #define v2df_UP T_V2DF
97 AARCH64_SIMD_LANEMULL
,
98 AARCH64_SIMD_LANEMULH
,
100 AARCH64_SIMD_SCALARMUL
,
101 AARCH64_SIMD_SCALARMULL
,
102 AARCH64_SIMD_SCALARMULH
,
103 AARCH64_SIMD_SCALARMAC
,
104 AARCH64_SIMD_CONVERT
,
105 AARCH64_SIMD_FIXCONV
,
107 AARCH64_SIMD_RESULTPAIR
,
108 AARCH64_SIMD_REINTERP
,
112 AARCH64_SIMD_LOAD1LANE
,
114 AARCH64_SIMD_STORE1LANE
,
115 AARCH64_SIMD_LOADSTRUCT
,
116 AARCH64_SIMD_LOADSTRUCTLANE
,
117 AARCH64_SIMD_STORESTRUCT
,
118 AARCH64_SIMD_STORESTRUCTLANE
,
119 AARCH64_SIMD_LOGICBINOP
,
120 AARCH64_SIMD_SHIFTINSERT
,
121 AARCH64_SIMD_SHIFTIMM
,
122 AARCH64_SIMD_SHIFTACC
123 } aarch64_simd_itype
;
128 const aarch64_simd_itype itype
;
129 enum aarch64_simd_builtin_type_mode mode
;
130 const enum insn_code code
;
132 } aarch64_simd_builtin_datum
;
134 #define CF0(N, X) CODE_FOR_aarch64_##N##X
135 #define CF1(N, X) CODE_FOR_##N##X##1
136 #define CF2(N, X) CODE_FOR_##N##X##2
137 #define CF3(N, X) CODE_FOR_##N##X##3
138 #define CF4(N, X) CODE_FOR_##N##X##4
139 #define CF10(N, X) CODE_FOR_##N##X
141 #define VAR1(T, N, MAP, A) \
142 {#N, AARCH64_SIMD_##T, UP (A), CF##MAP (N, A), 0},
143 #define VAR2(T, N, MAP, A, B) \
144 VAR1 (T, N, MAP, A) \
146 #define VAR3(T, N, MAP, A, B, C) \
147 VAR2 (T, N, MAP, A, B) \
149 #define VAR4(T, N, MAP, A, B, C, D) \
150 VAR3 (T, N, MAP, A, B, C) \
152 #define VAR5(T, N, MAP, A, B, C, D, E) \
153 VAR4 (T, N, MAP, A, B, C, D) \
155 #define VAR6(T, N, MAP, A, B, C, D, E, F) \
156 VAR5 (T, N, MAP, A, B, C, D, E) \
158 #define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
159 VAR6 (T, N, MAP, A, B, C, D, E, F) \
161 #define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
162 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
164 #define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
165 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
167 #define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
168 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
170 #define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
171 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
173 #define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
174 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
177 /* BUILTIN_<ITERATOR> macros should expand to cover the same range of
178 modes as is given for each define_mode_iterator in
179 config/aarch64/iterators.md. */
181 #define BUILTIN_DX(T, N, MAP) \
182 VAR2 (T, N, MAP, di, df)
183 #define BUILTIN_GPF(T, N, MAP) \
184 VAR2 (T, N, MAP, sf, df)
185 #define BUILTIN_SDQ_I(T, N, MAP) \
186 VAR4 (T, N, MAP, qi, hi, si, di)
187 #define BUILTIN_SD_HSI(T, N, MAP) \
188 VAR2 (T, N, MAP, hi, si)
189 #define BUILTIN_V2F(T, N, MAP) \
190 VAR2 (T, N, MAP, v2sf, v2df)
191 #define BUILTIN_VALL(T, N, MAP) \
192 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
193 v4si, v2di, v2sf, v4sf, v2df)
194 #define BUILTIN_VALLDI(T, N, MAP) \
195 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
196 v4si, v2di, v2sf, v4sf, v2df, di)
197 #define BUILTIN_VB(T, N, MAP) \
198 VAR2 (T, N, MAP, v8qi, v16qi)
199 #define BUILTIN_VD(T, N, MAP) \
200 VAR4 (T, N, MAP, v8qi, v4hi, v2si, v2sf)
201 #define BUILTIN_VDC(T, N, MAP) \
202 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
203 #define BUILTIN_VDIC(T, N, MAP) \
204 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
205 #define BUILTIN_VDN(T, N, MAP) \
206 VAR3 (T, N, MAP, v4hi, v2si, di)
207 #define BUILTIN_VDQ(T, N, MAP) \
208 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
209 #define BUILTIN_VDQF(T, N, MAP) \
210 VAR3 (T, N, MAP, v2sf, v4sf, v2df)
211 #define BUILTIN_VDQH(T, N, MAP) \
212 VAR2 (T, N, MAP, v4hi, v8hi)
213 #define BUILTIN_VDQHS(T, N, MAP) \
214 VAR4 (T, N, MAP, v4hi, v8hi, v2si, v4si)
215 #define BUILTIN_VDQIF(T, N, MAP) \
216 VAR9 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
217 #define BUILTIN_VDQM(T, N, MAP) \
218 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
219 #define BUILTIN_VDQV(T, N, MAP) \
220 VAR5 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v4si)
221 #define BUILTIN_VDQ_BHSI(T, N, MAP) \
222 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
223 #define BUILTIN_VDQ_I(T, N, MAP) \
224 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
225 #define BUILTIN_VDW(T, N, MAP) \
226 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
227 #define BUILTIN_VD_BHSI(T, N, MAP) \
228 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
229 #define BUILTIN_VD_HSI(T, N, MAP) \
230 VAR2 (T, N, MAP, v4hi, v2si)
231 #define BUILTIN_VD_RE(T, N, MAP) \
232 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
233 #define BUILTIN_VQ(T, N, MAP) \
234 VAR6 (T, N, MAP, v16qi, v8hi, v4si, v2di, v4sf, v2df)
235 #define BUILTIN_VQN(T, N, MAP) \
236 VAR3 (T, N, MAP, v8hi, v4si, v2di)
237 #define BUILTIN_VQW(T, N, MAP) \
238 VAR3 (T, N, MAP, v16qi, v8hi, v4si)
239 #define BUILTIN_VQ_HSI(T, N, MAP) \
240 VAR2 (T, N, MAP, v8hi, v4si)
241 #define BUILTIN_VQ_S(T, N, MAP) \
242 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
243 #define BUILTIN_VSDQ_HSI(T, N, MAP) \
244 VAR6 (T, N, MAP, v4hi, v8hi, v2si, v4si, hi, si)
245 #define BUILTIN_VSDQ_I(T, N, MAP) \
246 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
247 #define BUILTIN_VSDQ_I_BHSI(T, N, MAP) \
248 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
249 #define BUILTIN_VSDQ_I_DI(T, N, MAP) \
250 VAR8 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
251 #define BUILTIN_VSD_HSI(T, N, MAP) \
252 VAR4 (T, N, MAP, v4hi, v2si, hi, si)
253 #define BUILTIN_VSQN_HSDI(T, N, MAP) \
254 VAR6 (T, N, MAP, v8hi, v4si, v2di, hi, si, di)
255 #define BUILTIN_VSTRUCT(T, N, MAP) \
256 VAR3 (T, N, MAP, oi, ci, xi)
258 static aarch64_simd_builtin_datum aarch64_simd_builtin_data
[] = {
259 #include "aarch64-simd-builtins.def"
263 #define VAR1(T, N, MAP, A) \
264 AARCH64_SIMD_BUILTIN_##N##A,
266 enum aarch64_builtins
269 AARCH64_SIMD_BUILTIN_BASE
,
270 #include "aarch64-simd-builtins.def"
271 AARCH64_SIMD_BUILTIN_MAX
= AARCH64_SIMD_BUILTIN_BASE
272 + ARRAY_SIZE (aarch64_simd_builtin_data
),
276 static GTY(()) tree aarch64_builtin_decls
[AARCH64_BUILTIN_MAX
];
278 #define NUM_DREG_TYPES 6
279 #define NUM_QREG_TYPES 6
282 aarch64_init_simd_builtins (void)
284 unsigned int i
, fcode
= AARCH64_SIMD_BUILTIN_BASE
+ 1;
286 /* Scalar type nodes. */
287 tree aarch64_simd_intQI_type_node
;
288 tree aarch64_simd_intHI_type_node
;
289 tree aarch64_simd_polyQI_type_node
;
290 tree aarch64_simd_polyHI_type_node
;
291 tree aarch64_simd_intSI_type_node
;
292 tree aarch64_simd_intDI_type_node
;
293 tree aarch64_simd_float_type_node
;
294 tree aarch64_simd_double_type_node
;
296 /* Pointer to scalar type nodes. */
297 tree intQI_pointer_node
;
298 tree intHI_pointer_node
;
299 tree intSI_pointer_node
;
300 tree intDI_pointer_node
;
301 tree float_pointer_node
;
302 tree double_pointer_node
;
304 /* Const scalar type nodes. */
305 tree const_intQI_node
;
306 tree const_intHI_node
;
307 tree const_intSI_node
;
308 tree const_intDI_node
;
309 tree const_float_node
;
310 tree const_double_node
;
312 /* Pointer to const scalar type nodes. */
313 tree const_intQI_pointer_node
;
314 tree const_intHI_pointer_node
;
315 tree const_intSI_pointer_node
;
316 tree const_intDI_pointer_node
;
317 tree const_float_pointer_node
;
318 tree const_double_pointer_node
;
320 /* Vector type nodes. */
325 tree V16QI_type_node
;
332 /* Scalar unsigned type nodes. */
333 tree intUQI_type_node
;
334 tree intUHI_type_node
;
335 tree intUSI_type_node
;
336 tree intUDI_type_node
;
338 /* Opaque integer types for structures of vectors. */
339 tree intEI_type_node
;
340 tree intOI_type_node
;
341 tree intCI_type_node
;
342 tree intXI_type_node
;
344 /* Pointer to vector type nodes. */
345 tree V8QI_pointer_node
;
346 tree V4HI_pointer_node
;
347 tree V2SI_pointer_node
;
348 tree V2SF_pointer_node
;
349 tree V16QI_pointer_node
;
350 tree V8HI_pointer_node
;
351 tree V4SI_pointer_node
;
352 tree V4SF_pointer_node
;
353 tree V2DI_pointer_node
;
354 tree V2DF_pointer_node
;
356 /* Operations which return results as pairs. */
357 tree void_ftype_pv8qi_v8qi_v8qi
;
358 tree void_ftype_pv4hi_v4hi_v4hi
;
359 tree void_ftype_pv2si_v2si_v2si
;
360 tree void_ftype_pv2sf_v2sf_v2sf
;
361 tree void_ftype_pdi_di_di
;
362 tree void_ftype_pv16qi_v16qi_v16qi
;
363 tree void_ftype_pv8hi_v8hi_v8hi
;
364 tree void_ftype_pv4si_v4si_v4si
;
365 tree void_ftype_pv4sf_v4sf_v4sf
;
366 tree void_ftype_pv2di_v2di_v2di
;
367 tree void_ftype_pv2df_v2df_v2df
;
369 tree reinterp_ftype_dreg
[NUM_DREG_TYPES
][NUM_DREG_TYPES
];
370 tree reinterp_ftype_qreg
[NUM_QREG_TYPES
][NUM_QREG_TYPES
];
371 tree dreg_types
[NUM_DREG_TYPES
], qreg_types
[NUM_QREG_TYPES
];
373 /* Create distinguished type nodes for AARCH64_SIMD vector element types,
374 and pointers to values of such types, so we can detect them later. */
375 aarch64_simd_intQI_type_node
=
376 make_signed_type (GET_MODE_PRECISION (QImode
));
377 aarch64_simd_intHI_type_node
=
378 make_signed_type (GET_MODE_PRECISION (HImode
));
379 aarch64_simd_polyQI_type_node
=
380 make_signed_type (GET_MODE_PRECISION (QImode
));
381 aarch64_simd_polyHI_type_node
=
382 make_signed_type (GET_MODE_PRECISION (HImode
));
383 aarch64_simd_intSI_type_node
=
384 make_signed_type (GET_MODE_PRECISION (SImode
));
385 aarch64_simd_intDI_type_node
=
386 make_signed_type (GET_MODE_PRECISION (DImode
));
387 aarch64_simd_float_type_node
= make_node (REAL_TYPE
);
388 aarch64_simd_double_type_node
= make_node (REAL_TYPE
);
389 TYPE_PRECISION (aarch64_simd_float_type_node
) = FLOAT_TYPE_SIZE
;
390 TYPE_PRECISION (aarch64_simd_double_type_node
) = DOUBLE_TYPE_SIZE
;
391 layout_type (aarch64_simd_float_type_node
);
392 layout_type (aarch64_simd_double_type_node
);
394 /* Define typedefs which exactly correspond to the modes we are basing vector
395 types on. If you change these names you'll need to change
396 the table used by aarch64_mangle_type too. */
397 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intQI_type_node
,
398 "__builtin_aarch64_simd_qi");
399 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intHI_type_node
,
400 "__builtin_aarch64_simd_hi");
401 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intSI_type_node
,
402 "__builtin_aarch64_simd_si");
403 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_float_type_node
,
404 "__builtin_aarch64_simd_sf");
405 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intDI_type_node
,
406 "__builtin_aarch64_simd_di");
407 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_double_type_node
,
408 "__builtin_aarch64_simd_df");
409 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_polyQI_type_node
,
410 "__builtin_aarch64_simd_poly8");
411 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_polyHI_type_node
,
412 "__builtin_aarch64_simd_poly16");
414 intQI_pointer_node
= build_pointer_type (aarch64_simd_intQI_type_node
);
415 intHI_pointer_node
= build_pointer_type (aarch64_simd_intHI_type_node
);
416 intSI_pointer_node
= build_pointer_type (aarch64_simd_intSI_type_node
);
417 intDI_pointer_node
= build_pointer_type (aarch64_simd_intDI_type_node
);
418 float_pointer_node
= build_pointer_type (aarch64_simd_float_type_node
);
419 double_pointer_node
= build_pointer_type (aarch64_simd_double_type_node
);
421 /* Next create constant-qualified versions of the above types. */
422 const_intQI_node
= build_qualified_type (aarch64_simd_intQI_type_node
,
424 const_intHI_node
= build_qualified_type (aarch64_simd_intHI_type_node
,
426 const_intSI_node
= build_qualified_type (aarch64_simd_intSI_type_node
,
428 const_intDI_node
= build_qualified_type (aarch64_simd_intDI_type_node
,
430 const_float_node
= build_qualified_type (aarch64_simd_float_type_node
,
432 const_double_node
= build_qualified_type (aarch64_simd_double_type_node
,
435 const_intQI_pointer_node
= build_pointer_type (const_intQI_node
);
436 const_intHI_pointer_node
= build_pointer_type (const_intHI_node
);
437 const_intSI_pointer_node
= build_pointer_type (const_intSI_node
);
438 const_intDI_pointer_node
= build_pointer_type (const_intDI_node
);
439 const_float_pointer_node
= build_pointer_type (const_float_node
);
440 const_double_pointer_node
= build_pointer_type (const_double_node
);
442 /* Now create vector types based on our AARCH64 SIMD element types. */
443 /* 64-bit vectors. */
445 build_vector_type_for_mode (aarch64_simd_intQI_type_node
, V8QImode
);
447 build_vector_type_for_mode (aarch64_simd_intHI_type_node
, V4HImode
);
449 build_vector_type_for_mode (aarch64_simd_intSI_type_node
, V2SImode
);
451 build_vector_type_for_mode (aarch64_simd_float_type_node
, V2SFmode
);
452 /* 128-bit vectors. */
454 build_vector_type_for_mode (aarch64_simd_intQI_type_node
, V16QImode
);
456 build_vector_type_for_mode (aarch64_simd_intHI_type_node
, V8HImode
);
458 build_vector_type_for_mode (aarch64_simd_intSI_type_node
, V4SImode
);
460 build_vector_type_for_mode (aarch64_simd_float_type_node
, V4SFmode
);
462 build_vector_type_for_mode (aarch64_simd_intDI_type_node
, V2DImode
);
464 build_vector_type_for_mode (aarch64_simd_double_type_node
, V2DFmode
);
466 /* Unsigned integer types for various mode sizes. */
467 intUQI_type_node
= make_unsigned_type (GET_MODE_PRECISION (QImode
));
468 intUHI_type_node
= make_unsigned_type (GET_MODE_PRECISION (HImode
));
469 intUSI_type_node
= make_unsigned_type (GET_MODE_PRECISION (SImode
));
470 intUDI_type_node
= make_unsigned_type (GET_MODE_PRECISION (DImode
));
472 (*lang_hooks
.types
.register_builtin_type
) (intUQI_type_node
,
473 "__builtin_aarch64_simd_uqi");
474 (*lang_hooks
.types
.register_builtin_type
) (intUHI_type_node
,
475 "__builtin_aarch64_simd_uhi");
476 (*lang_hooks
.types
.register_builtin_type
) (intUSI_type_node
,
477 "__builtin_aarch64_simd_usi");
478 (*lang_hooks
.types
.register_builtin_type
) (intUDI_type_node
,
479 "__builtin_aarch64_simd_udi");
481 /* Opaque integer types for structures of vectors. */
482 intEI_type_node
= make_signed_type (GET_MODE_PRECISION (EImode
));
483 intOI_type_node
= make_signed_type (GET_MODE_PRECISION (OImode
));
484 intCI_type_node
= make_signed_type (GET_MODE_PRECISION (CImode
));
485 intXI_type_node
= make_signed_type (GET_MODE_PRECISION (XImode
));
487 (*lang_hooks
.types
.register_builtin_type
) (intTI_type_node
,
488 "__builtin_aarch64_simd_ti");
489 (*lang_hooks
.types
.register_builtin_type
) (intEI_type_node
,
490 "__builtin_aarch64_simd_ei");
491 (*lang_hooks
.types
.register_builtin_type
) (intOI_type_node
,
492 "__builtin_aarch64_simd_oi");
493 (*lang_hooks
.types
.register_builtin_type
) (intCI_type_node
,
494 "__builtin_aarch64_simd_ci");
495 (*lang_hooks
.types
.register_builtin_type
) (intXI_type_node
,
496 "__builtin_aarch64_simd_xi");
498 /* Pointers to vector types. */
499 V8QI_pointer_node
= build_pointer_type (V8QI_type_node
);
500 V4HI_pointer_node
= build_pointer_type (V4HI_type_node
);
501 V2SI_pointer_node
= build_pointer_type (V2SI_type_node
);
502 V2SF_pointer_node
= build_pointer_type (V2SF_type_node
);
503 V16QI_pointer_node
= build_pointer_type (V16QI_type_node
);
504 V8HI_pointer_node
= build_pointer_type (V8HI_type_node
);
505 V4SI_pointer_node
= build_pointer_type (V4SI_type_node
);
506 V4SF_pointer_node
= build_pointer_type (V4SF_type_node
);
507 V2DI_pointer_node
= build_pointer_type (V2DI_type_node
);
508 V2DF_pointer_node
= build_pointer_type (V2DF_type_node
);
510 /* Operations which return results as pairs. */
511 void_ftype_pv8qi_v8qi_v8qi
=
512 build_function_type_list (void_type_node
, V8QI_pointer_node
,
513 V8QI_type_node
, V8QI_type_node
, NULL
);
514 void_ftype_pv4hi_v4hi_v4hi
=
515 build_function_type_list (void_type_node
, V4HI_pointer_node
,
516 V4HI_type_node
, V4HI_type_node
, NULL
);
517 void_ftype_pv2si_v2si_v2si
=
518 build_function_type_list (void_type_node
, V2SI_pointer_node
,
519 V2SI_type_node
, V2SI_type_node
, NULL
);
520 void_ftype_pv2sf_v2sf_v2sf
=
521 build_function_type_list (void_type_node
, V2SF_pointer_node
,
522 V2SF_type_node
, V2SF_type_node
, NULL
);
523 void_ftype_pdi_di_di
=
524 build_function_type_list (void_type_node
, intDI_pointer_node
,
525 aarch64_simd_intDI_type_node
,
526 aarch64_simd_intDI_type_node
, NULL
);
527 void_ftype_pv16qi_v16qi_v16qi
=
528 build_function_type_list (void_type_node
, V16QI_pointer_node
,
529 V16QI_type_node
, V16QI_type_node
, NULL
);
530 void_ftype_pv8hi_v8hi_v8hi
=
531 build_function_type_list (void_type_node
, V8HI_pointer_node
,
532 V8HI_type_node
, V8HI_type_node
, NULL
);
533 void_ftype_pv4si_v4si_v4si
=
534 build_function_type_list (void_type_node
, V4SI_pointer_node
,
535 V4SI_type_node
, V4SI_type_node
, NULL
);
536 void_ftype_pv4sf_v4sf_v4sf
=
537 build_function_type_list (void_type_node
, V4SF_pointer_node
,
538 V4SF_type_node
, V4SF_type_node
, NULL
);
539 void_ftype_pv2di_v2di_v2di
=
540 build_function_type_list (void_type_node
, V2DI_pointer_node
,
541 V2DI_type_node
, V2DI_type_node
, NULL
);
542 void_ftype_pv2df_v2df_v2df
=
543 build_function_type_list (void_type_node
, V2DF_pointer_node
,
544 V2DF_type_node
, V2DF_type_node
, NULL
);
546 dreg_types
[0] = V8QI_type_node
;
547 dreg_types
[1] = V4HI_type_node
;
548 dreg_types
[2] = V2SI_type_node
;
549 dreg_types
[3] = V2SF_type_node
;
550 dreg_types
[4] = aarch64_simd_intDI_type_node
;
551 dreg_types
[5] = aarch64_simd_double_type_node
;
553 qreg_types
[0] = V16QI_type_node
;
554 qreg_types
[1] = V8HI_type_node
;
555 qreg_types
[2] = V4SI_type_node
;
556 qreg_types
[3] = V4SF_type_node
;
557 qreg_types
[4] = V2DI_type_node
;
558 qreg_types
[5] = V2DF_type_node
;
560 /* If NUM_DREG_TYPES != NUM_QREG_TYPES, we will need separate nested loops
561 for qreg and dreg reinterp inits. */
562 for (i
= 0; i
< NUM_DREG_TYPES
; i
++)
565 for (j
= 0; j
< NUM_DREG_TYPES
; j
++)
567 reinterp_ftype_dreg
[i
][j
]
568 = build_function_type_list (dreg_types
[i
], dreg_types
[j
], NULL
);
569 reinterp_ftype_qreg
[i
][j
]
570 = build_function_type_list (qreg_types
[i
], qreg_types
[j
], NULL
);
574 for (i
= 0; i
< ARRAY_SIZE (aarch64_simd_builtin_data
); i
++, fcode
++)
576 aarch64_simd_builtin_datum
*d
= &aarch64_simd_builtin_data
[i
];
577 const char *const modenames
[] =
579 "v8qi", "v4hi", "v2si", "v2sf", "di", "df",
580 "v16qi", "v8hi", "v4si", "v4sf", "v2di", "v2df",
581 "ti", "ei", "oi", "xi", "si", "sf", "hi", "qi"
589 gcc_assert (ARRAY_SIZE (modenames
) == T_MAX
);
595 case AARCH64_SIMD_LOAD1
:
596 case AARCH64_SIMD_LOAD1LANE
:
597 case AARCH64_SIMD_LOADSTRUCT
:
598 case AARCH64_SIMD_LOADSTRUCTLANE
:
601 case AARCH64_SIMD_STORE1
:
602 case AARCH64_SIMD_STORE1LANE
:
603 case AARCH64_SIMD_STORESTRUCT
:
604 case AARCH64_SIMD_STORESTRUCTLANE
:
608 case AARCH64_SIMD_UNOP
:
609 case AARCH64_SIMD_BINOP
:
610 case AARCH64_SIMD_TERNOP
:
611 case AARCH64_SIMD_QUADOP
:
612 case AARCH64_SIMD_COMBINE
:
613 case AARCH64_SIMD_CONVERT
:
614 case AARCH64_SIMD_CREATE
:
615 case AARCH64_SIMD_DUP
:
616 case AARCH64_SIMD_DUPLANE
:
617 case AARCH64_SIMD_FIXCONV
:
618 case AARCH64_SIMD_GETLANE
:
619 case AARCH64_SIMD_LANEMAC
:
620 case AARCH64_SIMD_LANEMUL
:
621 case AARCH64_SIMD_LANEMULH
:
622 case AARCH64_SIMD_LANEMULL
:
623 case AARCH64_SIMD_LOGICBINOP
:
624 case AARCH64_SIMD_SCALARMAC
:
625 case AARCH64_SIMD_SCALARMUL
:
626 case AARCH64_SIMD_SCALARMULH
:
627 case AARCH64_SIMD_SCALARMULL
:
628 case AARCH64_SIMD_SELECT
:
629 case AARCH64_SIMD_SETLANE
:
630 case AARCH64_SIMD_SHIFTACC
:
631 case AARCH64_SIMD_SHIFTIMM
:
632 case AARCH64_SIMD_SHIFTINSERT
:
633 case AARCH64_SIMD_SPLIT
:
634 case AARCH64_SIMD_VTBL
:
635 case AARCH64_SIMD_VTBX
:
638 tree return_type
= void_type_node
, args
= void_list_node
;
640 /* Build a function type directly from the insn_data for this
641 builtin. The build_function_type () function takes care of
642 removing duplicates for us. */
644 for (k
= insn_data
[d
->code
].n_operands
-1; k
>= 0; k
--)
646 /* Skip an internal operand for vget_{low, high}. */
647 if (k
== 2 && d
->itype
== AARCH64_SIMD_SPLIT
)
650 if (is_load
&& k
== 1)
652 /* AdvSIMD load patterns always have the memory operand
653 (a DImode pointer) in the operand 1 position. We
654 want a const pointer to the element type in that
656 gcc_assert (insn_data
[d
->code
].operand
[k
].mode
== DImode
);
662 eltype
= const_intQI_pointer_node
;
667 eltype
= const_intHI_pointer_node
;
672 eltype
= const_intSI_pointer_node
;
677 eltype
= const_float_pointer_node
;
682 eltype
= const_intDI_pointer_node
;
687 eltype
= const_double_pointer_node
;
694 else if (is_store
&& k
== 0)
696 /* Similarly, AdvSIMD store patterns use operand 0 as
697 the memory location to store to (a DImode pointer).
698 Use a pointer to the element type of the store in
700 gcc_assert (insn_data
[d
->code
].operand
[k
].mode
== DImode
);
706 eltype
= intQI_pointer_node
;
711 eltype
= intHI_pointer_node
;
716 eltype
= intSI_pointer_node
;
721 eltype
= float_pointer_node
;
726 eltype
= intDI_pointer_node
;
731 eltype
= double_pointer_node
;
740 switch (insn_data
[d
->code
].operand
[k
].mode
)
743 eltype
= void_type_node
;
747 eltype
= aarch64_simd_intQI_type_node
;
750 eltype
= aarch64_simd_intHI_type_node
;
753 eltype
= aarch64_simd_intSI_type_node
;
756 eltype
= aarch64_simd_float_type_node
;
759 eltype
= aarch64_simd_double_type_node
;
762 eltype
= aarch64_simd_intDI_type_node
;
765 eltype
= intTI_type_node
;
768 eltype
= intEI_type_node
;
771 eltype
= intOI_type_node
;
774 eltype
= intCI_type_node
;
777 eltype
= intXI_type_node
;
779 /* 64-bit vectors. */
781 eltype
= V8QI_type_node
;
784 eltype
= V4HI_type_node
;
787 eltype
= V2SI_type_node
;
790 eltype
= V2SF_type_node
;
792 /* 128-bit vectors. */
794 eltype
= V16QI_type_node
;
797 eltype
= V8HI_type_node
;
800 eltype
= V4SI_type_node
;
803 eltype
= V4SF_type_node
;
806 eltype
= V2DI_type_node
;
809 eltype
= V2DF_type_node
;
816 if (k
== 0 && !is_store
)
817 return_type
= eltype
;
819 args
= tree_cons (NULL_TREE
, eltype
, args
);
821 ftype
= build_function_type (return_type
, args
);
825 case AARCH64_SIMD_RESULTPAIR
:
827 switch (insn_data
[d
->code
].operand
[1].mode
)
830 ftype
= void_ftype_pv8qi_v8qi_v8qi
;
833 ftype
= void_ftype_pv4hi_v4hi_v4hi
;
836 ftype
= void_ftype_pv2si_v2si_v2si
;
839 ftype
= void_ftype_pv2sf_v2sf_v2sf
;
842 ftype
= void_ftype_pdi_di_di
;
845 ftype
= void_ftype_pv16qi_v16qi_v16qi
;
848 ftype
= void_ftype_pv8hi_v8hi_v8hi
;
851 ftype
= void_ftype_pv4si_v4si_v4si
;
854 ftype
= void_ftype_pv4sf_v4sf_v4sf
;
857 ftype
= void_ftype_pv2di_v2di_v2di
;
860 ftype
= void_ftype_pv2df_v2df_v2df
;
868 case AARCH64_SIMD_REINTERP
:
870 /* We iterate over 6 doubleword types, then 6 quadword
872 int rhs_d
= d
->mode
% NUM_DREG_TYPES
;
873 int rhs_q
= (d
->mode
- NUM_DREG_TYPES
) % NUM_QREG_TYPES
;
874 switch (insn_data
[d
->code
].operand
[0].mode
)
877 ftype
= reinterp_ftype_dreg
[0][rhs_d
];
880 ftype
= reinterp_ftype_dreg
[1][rhs_d
];
883 ftype
= reinterp_ftype_dreg
[2][rhs_d
];
886 ftype
= reinterp_ftype_dreg
[3][rhs_d
];
889 ftype
= reinterp_ftype_dreg
[4][rhs_d
];
892 ftype
= reinterp_ftype_dreg
[5][rhs_d
];
895 ftype
= reinterp_ftype_qreg
[0][rhs_q
];
898 ftype
= reinterp_ftype_qreg
[1][rhs_q
];
901 ftype
= reinterp_ftype_qreg
[2][rhs_q
];
904 ftype
= reinterp_ftype_qreg
[3][rhs_q
];
907 ftype
= reinterp_ftype_qreg
[4][rhs_q
];
910 ftype
= reinterp_ftype_qreg
[5][rhs_q
];
921 gcc_assert (ftype
!= NULL
);
923 snprintf (namebuf
, sizeof (namebuf
), "__builtin_aarch64_%s%s",
924 d
->name
, modenames
[d
->mode
]);
926 fndecl
= add_builtin_function (namebuf
, ftype
, fcode
, BUILT_IN_MD
,
928 aarch64_builtin_decls
[fcode
] = fndecl
;
933 aarch64_init_builtins (void)
936 aarch64_init_simd_builtins ();
940 aarch64_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
942 if (code
>= AARCH64_BUILTIN_MAX
)
943 return error_mark_node
;
945 return aarch64_builtin_decls
[code
];
950 SIMD_ARG_COPY_TO_REG
,
955 #define SIMD_MAX_BUILTIN_ARGS 5
958 aarch64_simd_expand_args (rtx target
, int icode
, int have_retval
,
963 tree arg
[SIMD_MAX_BUILTIN_ARGS
];
964 rtx op
[SIMD_MAX_BUILTIN_ARGS
];
965 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
966 enum machine_mode mode
[SIMD_MAX_BUILTIN_ARGS
];
971 || GET_MODE (target
) != tmode
972 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
)))
973 target
= gen_reg_rtx (tmode
);
979 builtin_simd_arg thisarg
= (builtin_simd_arg
) va_arg (ap
, int);
981 if (thisarg
== SIMD_ARG_STOP
)
985 arg
[argc
] = CALL_EXPR_ARG (exp
, argc
);
986 op
[argc
] = expand_normal (arg
[argc
]);
987 mode
[argc
] = insn_data
[icode
].operand
[argc
+ have_retval
].mode
;
991 case SIMD_ARG_COPY_TO_REG
:
992 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
993 if (!(*insn_data
[icode
].operand
[argc
+ have_retval
].predicate
)
994 (op
[argc
], mode
[argc
]))
995 op
[argc
] = copy_to_mode_reg (mode
[argc
], op
[argc
]);
998 case SIMD_ARG_CONSTANT
:
999 if (!(*insn_data
[icode
].operand
[argc
+ have_retval
].predicate
)
1000 (op
[argc
], mode
[argc
]))
1001 error_at (EXPR_LOCATION (exp
), "incompatible type for argument %d, "
1002 "expected %<const int%>", argc
+ 1);
1019 pat
= GEN_FCN (icode
) (target
, op
[0]);
1023 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
1027 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2]);
1031 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3]);
1035 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3], op
[4]);
1045 pat
= GEN_FCN (icode
) (op
[0]);
1049 pat
= GEN_FCN (icode
) (op
[0], op
[1]);
1053 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2]);
1057 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3]);
1061 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3], op
[4]);
1076 /* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1078 aarch64_simd_expand_builtin (int fcode
, tree exp
, rtx target
)
1080 aarch64_simd_builtin_datum
*d
=
1081 &aarch64_simd_builtin_data
[fcode
- (AARCH64_SIMD_BUILTIN_BASE
+ 1)];
1082 aarch64_simd_itype itype
= d
->itype
;
1083 enum insn_code icode
= d
->code
;
1087 case AARCH64_SIMD_UNOP
:
1088 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1089 SIMD_ARG_COPY_TO_REG
,
1092 case AARCH64_SIMD_BINOP
:
1094 rtx arg2
= expand_normal (CALL_EXPR_ARG (exp
, 1));
1095 /* Handle constants only if the predicate allows it. */
1096 bool op1_const_int_p
=
1098 && (*insn_data
[icode
].operand
[2].predicate
)
1099 (arg2
, insn_data
[icode
].operand
[2].mode
));
1100 return aarch64_simd_expand_args
1101 (target
, icode
, 1, exp
,
1102 SIMD_ARG_COPY_TO_REG
,
1103 op1_const_int_p
? SIMD_ARG_CONSTANT
: SIMD_ARG_COPY_TO_REG
,
1107 case AARCH64_SIMD_TERNOP
:
1108 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1109 SIMD_ARG_COPY_TO_REG
,
1110 SIMD_ARG_COPY_TO_REG
,
1111 SIMD_ARG_COPY_TO_REG
,
1114 case AARCH64_SIMD_QUADOP
:
1115 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1116 SIMD_ARG_COPY_TO_REG
,
1117 SIMD_ARG_COPY_TO_REG
,
1118 SIMD_ARG_COPY_TO_REG
,
1119 SIMD_ARG_COPY_TO_REG
,
1121 case AARCH64_SIMD_LOAD1
:
1122 case AARCH64_SIMD_LOADSTRUCT
:
1123 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1124 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1126 case AARCH64_SIMD_STORESTRUCT
:
1127 return aarch64_simd_expand_args (target
, icode
, 0, exp
,
1128 SIMD_ARG_COPY_TO_REG
,
1129 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1131 case AARCH64_SIMD_REINTERP
:
1132 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1133 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1135 case AARCH64_SIMD_CREATE
:
1136 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1137 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1139 case AARCH64_SIMD_COMBINE
:
1140 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1141 SIMD_ARG_COPY_TO_REG
,
1142 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1144 case AARCH64_SIMD_GETLANE
:
1145 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1146 SIMD_ARG_COPY_TO_REG
,
1150 case AARCH64_SIMD_SETLANE
:
1151 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1152 SIMD_ARG_COPY_TO_REG
,
1153 SIMD_ARG_COPY_TO_REG
,
1157 case AARCH64_SIMD_SHIFTIMM
:
1158 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1159 SIMD_ARG_COPY_TO_REG
,
1163 case AARCH64_SIMD_SHIFTACC
:
1164 case AARCH64_SIMD_SHIFTINSERT
:
1165 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1166 SIMD_ARG_COPY_TO_REG
,
1167 SIMD_ARG_COPY_TO_REG
,
1176 /* Expand an expression EXP that calls a built-in function,
1177 with result going to TARGET if that's convenient. */
1179 aarch64_expand_builtin (tree exp
,
1181 rtx subtarget ATTRIBUTE_UNUSED
,
1182 enum machine_mode mode ATTRIBUTE_UNUSED
,
1183 int ignore ATTRIBUTE_UNUSED
)
1185 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
1186 int fcode
= DECL_FUNCTION_CODE (fndecl
);
1188 if (fcode
>= AARCH64_SIMD_BUILTIN_BASE
)
1189 return aarch64_simd_expand_builtin (fcode
, exp
, target
);
1195 aarch64_builtin_vectorized_function (tree fndecl
, tree type_out
, tree type_in
)
1197 enum machine_mode in_mode
, out_mode
;
1200 if (TREE_CODE (type_out
) != VECTOR_TYPE
1201 || TREE_CODE (type_in
) != VECTOR_TYPE
)
1204 out_mode
= TYPE_MODE (TREE_TYPE (type_out
));
1205 out_n
= TYPE_VECTOR_SUBPARTS (type_out
);
1206 in_mode
= TYPE_MODE (TREE_TYPE (type_in
));
1207 in_n
= TYPE_VECTOR_SUBPARTS (type_in
);
1209 #undef AARCH64_CHECK_BUILTIN_MODE
1210 #define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1211 #define AARCH64_FIND_FRINT_VARIANT(N) \
1212 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
1213 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2df] \
1214 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
1215 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v4sf] \
1216 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
1217 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2sf] \
1219 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1221 enum built_in_function fn
= DECL_FUNCTION_CODE (fndecl
);
1224 #undef AARCH64_CHECK_BUILTIN_MODE
1225 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1226 (out_mode == N##Fmode && out_n == C \
1227 && in_mode == N##Fmode && in_n == C)
1228 case BUILT_IN_FLOOR
:
1229 case BUILT_IN_FLOORF
:
1230 return AARCH64_FIND_FRINT_VARIANT (floor
);
1232 case BUILT_IN_CEILF
:
1233 return AARCH64_FIND_FRINT_VARIANT (ceil
);
1234 case BUILT_IN_TRUNC
:
1235 case BUILT_IN_TRUNCF
:
1236 return AARCH64_FIND_FRINT_VARIANT (btrunc
);
1237 case BUILT_IN_ROUND
:
1238 case BUILT_IN_ROUNDF
:
1239 return AARCH64_FIND_FRINT_VARIANT (round
);
1240 case BUILT_IN_NEARBYINT
:
1241 case BUILT_IN_NEARBYINTF
:
1242 return AARCH64_FIND_FRINT_VARIANT (nearbyint
);
1244 case BUILT_IN_SQRTF
:
1245 return AARCH64_FIND_FRINT_VARIANT (sqrt
);
1246 #undef AARCH64_CHECK_BUILTIN_MODE
1247 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1248 (out_mode == N##Imode && out_n == C \
1249 && in_mode == N##Fmode && in_n == C)
1250 case BUILT_IN_LFLOOR
:
1251 case BUILT_IN_IFLOORF
:
1253 tree new_tree
= NULL_TREE
;
1254 if (AARCH64_CHECK_BUILTIN_MODE (2, D
))
1256 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lfloorv2dfv2di
];
1257 else if (AARCH64_CHECK_BUILTIN_MODE (4, S
))
1259 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lfloorv4sfv4si
];
1260 else if (AARCH64_CHECK_BUILTIN_MODE (2, S
))
1262 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lfloorv2sfv2si
];
1265 case BUILT_IN_LCEIL
:
1266 case BUILT_IN_ICEILF
:
1268 tree new_tree
= NULL_TREE
;
1269 if (AARCH64_CHECK_BUILTIN_MODE (2, D
))
1271 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lceilv2dfv2di
];
1272 else if (AARCH64_CHECK_BUILTIN_MODE (4, S
))
1274 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lceilv4sfv4si
];
1275 else if (AARCH64_CHECK_BUILTIN_MODE (2, S
))
1277 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lceilv2sfv2si
];
1280 case BUILT_IN_LROUND
:
1281 case BUILT_IN_IROUNDF
:
1283 tree new_tree
= NULL_TREE
;
1284 if (AARCH64_CHECK_BUILTIN_MODE (2, D
))
1286 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lroundv2dfv2di
];
1287 else if (AARCH64_CHECK_BUILTIN_MODE (4, S
))
1289 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lroundv4sfv4si
];
1290 else if (AARCH64_CHECK_BUILTIN_MODE (2, S
))
1292 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lroundv2sfv2si
];
1305 #define VAR1(T, N, MAP, A) \
1306 case AARCH64_SIMD_BUILTIN_##N##A:
1309 aarch64_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*args
,
1310 bool ignore ATTRIBUTE_UNUSED
)
1312 int fcode
= DECL_FUNCTION_CODE (fndecl
);
1313 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
1317 BUILTIN_VDQF (UNOP
, abs
, 2)
1318 return fold_build1 (ABS_EXPR
, type
, args
[0]);
1320 BUILTIN_VALLDI (BINOP
, cmge
, 0)
1321 return fold_build2 (GE_EXPR
, type
, args
[0], args
[1]);
1323 BUILTIN_VALLDI (BINOP
, cmgt
, 0)
1324 return fold_build2 (GT_EXPR
, type
, args
[0], args
[1]);
1326 BUILTIN_VALLDI (BINOP
, cmeq
, 0)
1327 return fold_build2 (EQ_EXPR
, type
, args
[0], args
[1]);
1329 BUILTIN_VSDQ_I_DI (BINOP
, cmtst
, 0)
1331 tree and_node
= fold_build2 (BIT_AND_EXPR
, type
, args
[0], args
[1]);
1332 tree vec_zero_node
= build_zero_cst (type
);
1333 return fold_build2 (NE_EXPR
, type
, and_node
, vec_zero_node
);
1336 VAR1 (UNOP
, floatv2si
, 2, v2sf
)
1337 VAR1 (UNOP
, floatv4si
, 2, v4sf
)
1338 VAR1 (UNOP
, floatv2di
, 2, v2df
)
1339 return fold_build1 (FLOAT_EXPR
, type
, args
[0]);
1348 aarch64_gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
1350 bool changed
= false;
1351 gimple stmt
= gsi_stmt (*gsi
);
1352 tree call
= gimple_call_fn (stmt
);
1354 gimple new_stmt
= NULL
;
1357 fndecl
= gimple_call_fndecl (stmt
);
1360 int fcode
= DECL_FUNCTION_CODE (fndecl
);
1361 int nargs
= gimple_call_num_args (stmt
);
1362 tree
*args
= (nargs
> 0
1363 ? gimple_call_arg_ptr (stmt
, 0)
1364 : &error_mark_node
);
1368 BUILTIN_VALL (UNOP
, reduc_splus_
, 10)
1369 new_stmt
= gimple_build_assign_with_ops (
1371 gimple_call_lhs (stmt
),
1375 BUILTIN_VDQIF (UNOP
, reduc_smax_
, 10)
1376 new_stmt
= gimple_build_assign_with_ops (
1378 gimple_call_lhs (stmt
),
1382 BUILTIN_VDQIF (UNOP
, reduc_smin_
, 10)
1383 new_stmt
= gimple_build_assign_with_ops (
1385 gimple_call_lhs (stmt
),
1398 gsi_replace (gsi
, new_stmt
, true);
1405 #undef AARCH64_CHECK_BUILTIN_MODE
1406 #undef AARCH64_FIND_FRINT_VARIANT
1408 #undef BUILTIN_SDQ_I
1409 #undef BUILTIN_SD_HSI
1420 #undef BUILTIN_VDQHS
1421 #undef BUILTIN_VDQIF
1424 #undef BUILTIN_VDQ_BHSI
1425 #undef BUILTIN_VDQ_I
1427 #undef BUILTIN_VD_BHSI
1428 #undef BUILTIN_VD_HSI
1429 #undef BUILTIN_VD_RE
1433 #undef BUILTIN_VQ_HSI
1435 #undef BUILTIN_VSDQ_HSI
1436 #undef BUILTIN_VSDQ_I
1437 #undef BUILTIN_VSDQ_I_BHSI
1438 #undef BUILTIN_VSDQ_I_DI
1439 #undef BUILTIN_VSD_HSI
1440 #undef BUILTIN_VSQN_HSDI
1441 #undef BUILTIN_VSTRUCT