d: Merge upstream dmd, druntime f8bae04558, phobos ba2ade9dec
[official-gcc.git] / gcc / d / intrinsics.cc
blobc895c1a1a52949c022a607465ff45383434edd65
1 /* intrinsics.cc -- D language compiler intrinsics.
2 Copyright (C) 2006-2024 Free Software Foundation, Inc.
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
7 any later version.
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3. If not see
16 <http://www.gnu.org/licenses/>. */
18 #include "config.h"
19 #include "system.h"
20 #include "coretypes.h"
22 #include "dmd/declaration.h"
23 #include "dmd/expression.h"
24 #include "dmd/identifier.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
29 #include "tm.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "diagnostic.h"
33 #include "langhooks.h"
34 #include "fold-const.h"
35 #include "stringpool.h"
36 #include "builtins.h"
37 #include "vec-perm-indices.h"
39 #include "d-tree.h"
42 /* An internal struct used to hold information on D intrinsics. */
44 struct intrinsic_decl
46 /* The DECL_INTRINSIC_CODE of this decl. */
47 intrinsic_code code;
49 /* The DECL_FUNCTION_CODE of this decl, if it directly maps to any. */
50 built_in_function built_in;
52 /* The name of the intrinsic. */
53 const char *name;
55 /* The module where the intrinsic is located. */
56 const char *module;
58 /* The mangled signature decoration of the intrinsic. */
59 const char *deco;
61 /* True if the intrinsic is only handled in CTFE. */
62 bool ctfeonly;
64 /* True if the intrinsic has a library implementation. */
65 bool fallback;
68 static const intrinsic_decl intrinsic_decls[] =
70 #define DEF_D_INTRINSIC(CODE, BUILTIN, NAME, MODULE, DECO, CTFE, FALLBACK) \
71 { CODE, BUILTIN, NAME, MODULE, DECO, CTFE, FALLBACK },
73 #include "intrinsics.def"
75 #undef DEF_D_INTRINSIC
78 /* Checks if DECL is an intrinsic or run time library function that requires
79 special processing. Sets DECL_INTRINSIC_CODE so it can be identified
80 later in maybe_expand_intrinsic. */
82 void
83 maybe_set_intrinsic (FuncDeclaration *decl)
85 if (!decl->ident || decl->builtin != BUILTIN::unknown)
86 return;
88 /* The builtin flag is updated only if we can evaluate the intrinsic
89 at compile-time. Such as the math or bitop intrinsics. */
90 decl->builtin = BUILTIN::unimp;
92 /* Check if it's a compiler intrinsic. We only require that any
93 internally recognised intrinsics are declared in a module with
94 an explicit module declaration. */
95 Module *m = decl->getModule ();
97 if (!m || !m->md)
98 return;
100 TemplateInstance *ti = decl->isInstantiated ();
101 TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
103 const char *tname = decl->ident->toChars ();
104 const char *tmodule = m->md->toChars ();
105 const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
107 /* Look through all D intrinsics. */
108 for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
110 if (!intrinsic_decls[i].name)
111 continue;
113 if (strcmp (intrinsic_decls[i].name, tname) != 0
114 || strcmp (intrinsic_decls[i].module, tmodule) != 0)
115 continue;
117 /* Instantiated functions would have the wrong type deco, get it from the
118 template member instead. */
119 if (tdeco == NULL)
121 if (!td || !td->onemember)
122 return;
124 FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
125 if (fd == NULL)
126 return;
128 OutBuffer buf;
129 dmd::mangleToBuffer (fd->type, buf);
130 tdeco = buf.extractChars ();
133 /* Matching the type deco may be a bit too strict, as it means that all
134 function attributes that end up in the signature must be kept aligned
135 between the compiler and library declaration. */
136 if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
138 intrinsic_code code = intrinsic_decls[i].code;
140 if (decl->csym == NULL)
141 get_symbol_decl (decl);
143 /* If there is no function body, then the implementation is always
144 provided by the compiler. */
145 if (!decl->fbody)
146 set_decl_built_in_function (decl->csym, BUILT_IN_FRONTEND, code);
148 /* Infer whether the intrinsic can be used for CTFE, let the
149 front-end know that it can be evaluated at compile-time. */
150 switch (code)
152 case INTRINSIC_VA_ARG:
153 case INTRINSIC_C_VA_ARG:
154 case INTRINSIC_VASTART:
155 case INTRINSIC_ADDS:
156 case INTRINSIC_ADDSL:
157 case INTRINSIC_ADDU:
158 case INTRINSIC_ADDUL:
159 case INTRINSIC_SUBS:
160 case INTRINSIC_SUBSL:
161 case INTRINSIC_SUBU:
162 case INTRINSIC_SUBUL:
163 case INTRINSIC_MULS:
164 case INTRINSIC_MULSL:
165 case INTRINSIC_MULU:
166 case INTRINSIC_MULUI:
167 case INTRINSIC_MULUL:
168 case INTRINSIC_NEGS:
169 case INTRINSIC_NEGSL:
170 case INTRINSIC_LOADUNALIGNED:
171 case INTRINSIC_STOREUNALIGNED:
172 case INTRINSIC_SHUFFLE:
173 case INTRINSIC_SHUFFLEVECTOR:
174 case INTRINSIC_CONVERTVECTOR:
175 case INTRINSIC_BLENDVECTOR:
176 case INTRINSIC_VLOAD8:
177 case INTRINSIC_VLOAD16:
178 case INTRINSIC_VLOAD32:
179 case INTRINSIC_VLOAD64:
180 case INTRINSIC_VSTORE8:
181 case INTRINSIC_VSTORE16:
182 case INTRINSIC_VSTORE32:
183 case INTRINSIC_VSTORE64:
184 /* Cannot interpret function during CTFE. If the library
185 provides a definition, its body will be used instead. */
186 break;
188 case INTRINSIC_POW:
190 /* Check that this overload of pow() is has an equivalent
191 built-in function. It could be `int pow(int, int)'. */
192 tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
193 if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
194 decl->builtin = BUILTIN::gcc;
195 break;
198 default:
199 decl->builtin = BUILTIN::gcc;
200 break;
203 /* The intrinsic was marked as CTFE-only. */
204 if (intrinsic_decls[i].ctfeonly)
205 DECL_BUILT_IN_CTFE (decl->csym) = 1;
207 DECL_INTRINSIC_CODE (decl->csym) = code;
208 break;
213 /* Helper function for maybe_warn_intrinsic_mismatch. Issue warning about
214 mismatch in the EXPECTED return type in call to the intrinsic function in
215 CALLEXP, and return TRUE. */
217 static bool
218 warn_mismatched_return_type (tree callexp, const char *expected)
220 warning_at (EXPR_LOCATION (callexp), OPT_Wbuiltin_declaration_mismatch,
221 "mismatch in return type of intrinsic function %qD "
222 "(%qT, should be %qs)", get_callee_fndecl (callexp),
223 TREE_TYPE (callexp), expected);
224 return true;
227 /* Helper function for maybe_warn_intrinsic_mismatch. Issue warning or error
228 about mismatch in the EXPECTED argument type at ARGNO in call to the
229 intrinsic function in CALLEXP, and return TRUE. */
231 static bool
232 warn_mismatched_argument (tree callexp, unsigned argno, const char *expected)
234 warning_at (EXPR_LOCATION (callexp), OPT_Wbuiltin_declaration_mismatch,
235 "mismatch in argument %u type of intrinsic function %qD "
236 "(%qT, should be %qs)", argno + 1, get_callee_fndecl (callexp),
237 TREE_TYPE (CALL_EXPR_ARG (callexp, argno)), expected);
238 return true;
241 static bool
242 warn_mismatched_argument (tree callexp, unsigned argno, tree expected,
243 bool error_p = false)
245 if (error_p)
246 error_at (EXPR_LOCATION (callexp),
247 "mismatch in argument %u type of intrinsic function %qD "
248 "(%qT, should be %qT)", argno + 1, get_callee_fndecl (callexp),
249 TREE_TYPE (CALL_EXPR_ARG (callexp, argno)), expected);
250 else
251 warning_at (EXPR_LOCATION (callexp), OPT_Wbuiltin_declaration_mismatch,
252 "mismatch in argument %u type of intrinsic function %qD "
253 "(%qT, should be %qT)", argno + 1, get_callee_fndecl (callexp),
254 TREE_TYPE (CALL_EXPR_ARG (callexp, argno)), expected);
256 return true;
259 /* Helper function for maybe_warn_intrinsic_mismatch. Builds a vector integer
260 type suitable for the mask argument of INTRINSIC_SHUFFLE from the given
261 input argument TYPE. */
263 static tree
264 build_shuffle_mask_type (tree type)
266 const unsigned bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (type)));
267 const int unsignedp = TYPE_UNSIGNED (TREE_TYPE (type));
268 tree inner = lang_hooks.types.type_for_size (bits, unsignedp);
269 gcc_assert (inner && TREE_CODE (inner) == INTEGER_TYPE);
271 /* %% Get the front-end type for the vector so the D type will be
272 printed (this should really be handled by a D tree printer). */
273 Type *t = build_frontend_type (inner);
274 gcc_assert (t != NULL);
275 unsigned HOST_WIDE_INT nunits = TYPE_VECTOR_SUBPARTS (type).to_constant ();
277 return build_ctype (TypeVector::create (dmd::sarrayOf (t, nunits)));
280 /* Checks if call to intrinsic FUNCTION in CALLEXP matches the internal
281 type and value constraints that we expect from the library definitions.
282 Returns TRUE and issues a warning if there is a mismatch.
284 Note: The return type and parameters are encoded into the signature `deco'
285 string that we match on in maybe_set_intrinsic(), so if the deco mangle
286 string has 'i' in the part that specifies the return type, then the matched
287 intrinsic will always have the return type `int'.
289 For templated intrinsics however, we rely on template constraints to ensure
290 that the generic type matches what we expect it to be. There is still an
291 enforced relationship between a template argument and its instantiated type.
292 For example: `T func(T)(T*)' would have the generic return type `@1T' and
293 generic parameter type `@1PT', so it can be assumed that if the return type
294 matches what we expect then all parameters are fine as well. Otherwise it
295 can be assumed that some internal_error has occurred for this to be the case.
296 Where a templated intrinsic has multiple template arguments, each generic
297 type will need to be checked for its validity. */
299 static bool
300 maybe_warn_intrinsic_mismatch (tree function, tree callexp)
302 switch (DECL_INTRINSIC_CODE (function))
304 case INTRINSIC_NONE:
305 default:
306 return false;
308 case INTRINSIC_LOADUNALIGNED:
310 /* Expects the signature:
311 vector(T) loadUnaligned (vector(T)*); */
312 gcc_assert (call_expr_nargs (callexp) == 1);
314 tree ptr = TREE_TYPE (CALL_EXPR_ARG (callexp, 0));
315 if (!VECTOR_TYPE_P (TREE_TYPE (callexp))
316 || !POINTER_TYPE_P (ptr) || !VECTOR_TYPE_P (TREE_TYPE (ptr)))
317 return warn_mismatched_return_type (callexp, "__vector(T)");
319 return false;
322 case INTRINSIC_STOREUNALIGNED:
324 /* Expects the signature:
325 vector(T) storeUnaligned (vector(T)*, vector(T)); */
326 gcc_assert (call_expr_nargs (callexp) == 2);
328 tree ptr = TREE_TYPE (CALL_EXPR_ARG (callexp, 0));
329 tree val = TREE_TYPE (CALL_EXPR_ARG (callexp, 1));
330 if (!VECTOR_TYPE_P (TREE_TYPE (callexp))
331 || !POINTER_TYPE_P (ptr) || !VECTOR_TYPE_P (TREE_TYPE (ptr))
332 || !VECTOR_TYPE_P (val))
333 return warn_mismatched_return_type (callexp, "__vector(T)");
335 return false;
338 case INTRINSIC_SHUFFLE:
339 case INTRINSIC_BLENDVECTOR:
341 /* Expects the signature:
342 vector(T) shuffle (vector(T), vector(U), vector(V));
343 vector(T) blendvector (vector(T), vector(U), vector(V)); */
344 gcc_assert (call_expr_nargs (callexp) == 3);
346 tree vec0 = TREE_TYPE (CALL_EXPR_ARG (callexp, 0));
347 if (!VECTOR_TYPE_P (TREE_TYPE (callexp))
348 || !VECTOR_TYPE_P (vec0))
349 return warn_mismatched_return_type (callexp, "__vector(T)");
351 tree vec1 = TREE_TYPE (CALL_EXPR_ARG (callexp, 1));
352 if (!VECTOR_TYPE_P (vec1))
353 return warn_mismatched_argument (callexp, 1, vec0);
355 tree mask = TREE_TYPE (CALL_EXPR_ARG (callexp, 2));
356 if (!VECTOR_TYPE_P (mask) || !VECTOR_INTEGER_TYPE_P (mask))
358 tree expected = build_shuffle_mask_type (vec0);
359 return warn_mismatched_argument (callexp, 2, expected,
360 VECTOR_TYPE_P (mask));
363 /* Types have been validated, now issue errors about violations on the
364 constraints of the intrinsic. */
365 if (TYPE_MAIN_VARIANT (vec0) != TYPE_MAIN_VARIANT (vec1))
366 return warn_mismatched_argument (callexp, 1, vec0, true);
368 /* Vector element sizes should be equal between arguments and mask. */
369 if (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (vec0)))
370 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (mask)))
371 || maybe_ne (TYPE_VECTOR_SUBPARTS (vec0),
372 TYPE_VECTOR_SUBPARTS (mask))
373 || maybe_ne (TYPE_VECTOR_SUBPARTS (vec1),
374 TYPE_VECTOR_SUBPARTS (mask)))
376 tree expected = build_shuffle_mask_type (vec0);
377 return warn_mismatched_argument (callexp, 2, expected, true);
380 return false;
383 case INTRINSIC_SHUFFLEVECTOR:
385 /* Expects the signature:
386 vector(T[N]) shufflevector (vector(T), vector(U), N...); */
387 gcc_assert (call_expr_nargs (callexp) >= 3);
388 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (callexp)));
390 tree vec0 = TREE_TYPE (CALL_EXPR_ARG (callexp, 0));
391 if (!VECTOR_TYPE_P (vec0))
392 return warn_mismatched_argument (callexp, 0, "__vector(T)");
394 tree vec1 = TREE_TYPE (CALL_EXPR_ARG (callexp, 1));
395 if (!VECTOR_TYPE_P (vec1))
396 return warn_mismatched_argument (callexp, 1, vec0);
398 for (int i = 2; i < call_expr_nargs (callexp); i++)
400 tree idx = TREE_TYPE (CALL_EXPR_ARG (callexp, i));
401 if (TREE_CODE (idx) != INTEGER_TYPE)
402 return warn_mismatched_argument (callexp, i, d_int_type);
405 /* Types have been validated, now issue errors about violations on the
406 constraints of the intrinsic. */
407 if (TYPE_MAIN_VARIANT (TREE_TYPE (vec0))
408 != TYPE_MAIN_VARIANT (TREE_TYPE (vec1)))
410 /* %% Get the front-end type for the vector so the D type will be
411 printed (this should really be handled by a D tree printer). */
412 unsigned HOST_WIDE_INT nunits;
413 if (!TYPE_VECTOR_SUBPARTS (vec1).is_constant (&nunits))
414 break;
416 Type *inner = build_frontend_type (TREE_TYPE (vec0));
417 Type *vector = TypeVector::create (dmd::sarrayOf (inner, nunits));
418 return warn_mismatched_argument (callexp, 1,
419 build_ctype (vector), true);
422 /* Vector sizes should be known, and number of indices a power of 2. */
423 unsigned HOST_WIDE_INT vec0_length;
424 unsigned HOST_WIDE_INT vec1_length;
425 if (!TYPE_VECTOR_SUBPARTS (vec0).is_constant (&vec0_length)
426 || !TYPE_VECTOR_SUBPARTS (vec1).is_constant (&vec1_length)
427 || !pow2p_hwi (call_expr_nargs (callexp) - 2))
428 break;
430 /* All index arguments must be valid constants as well. */
431 for (int i = 2; i < call_expr_nargs (callexp); i++)
433 tree idx = CALL_EXPR_ARG (callexp, i);
434 if (!tree_fits_shwi_p (idx))
436 error_at (EXPR_LOCATION (callexp),
437 "argument %qE cannot be read at compile time", idx);
438 return true;
441 HOST_WIDE_INT iidx = tree_to_shwi (idx);
442 if (iidx < 0
443 || (unsigned HOST_WIDE_INT) iidx >= vec0_length + vec1_length)
445 error_at (EXPR_LOCATION (callexp),
446 "element index %qE is out of bounds %<[0 .. %E]%>",
447 idx, build_integer_cst (vec0_length + vec1_length));
448 return true;
452 return false;
455 case INTRINSIC_CONVERTVECTOR:
457 /* Expects the signature:
458 vector(T) convertvector (vector(U)); */
459 gcc_assert (call_expr_nargs (callexp) == 1);
461 tree ret = TREE_TYPE (callexp);
462 if (!VECTOR_TYPE_P (ret)
463 || (!VECTOR_INTEGER_TYPE_P (ret) && !VECTOR_FLOAT_TYPE_P (ret)))
464 return warn_mismatched_return_type (callexp, "__vector(T)");
466 tree arg = TREE_TYPE (CALL_EXPR_ARG (callexp, 0));
467 if (!VECTOR_TYPE_P (arg)
468 || (!VECTOR_INTEGER_TYPE_P (arg) && !VECTOR_FLOAT_TYPE_P (arg)))
469 return warn_mismatched_argument (callexp, 0, "__vector(T)");
471 /* Types have been validated, now issue errors about violations on the
472 constraints of the intrinsic. */
473 if (maybe_ne (TYPE_VECTOR_SUBPARTS (ret), TYPE_VECTOR_SUBPARTS (arg)))
475 /* %% Get the front-end type for the vector so the D type will be
476 printed (this should really be handled by a D tree printer). */
477 unsigned HOST_WIDE_INT nunits;
478 if (!TYPE_VECTOR_SUBPARTS (ret).is_constant (&nunits))
479 break;
481 Type *inner = build_frontend_type (TREE_TYPE (arg));
482 Type *vector = TypeVector::create (dmd::sarrayOf (inner, nunits));
483 return warn_mismatched_argument (callexp, 0,
484 build_ctype (vector), true);
487 return false;
491 /* Generic mismatch warning if it hasn't already been handled. */
492 warning_at (EXPR_LOCATION (callexp), OPT_Wbuiltin_declaration_mismatch,
493 "mismatch in call of intrinsic function %qD", function);
494 return true;
497 /* Construct a function call to the built-in function CODE, N is the number of
498 arguments, and the `...' parameters are the argument expressions.
499 The original call expression is held in CALLEXP. */
501 static tree
502 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
504 tree *argarray = XALLOCAVEC (tree, n);
505 va_list ap;
507 va_start (ap, n);
508 for (int i = 0; i < n; i++)
509 argarray[i] = va_arg (ap, tree);
510 va_end (ap);
512 tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
513 builtin_decl_explicit (code),
514 n, argarray);
515 return convert (TREE_TYPE (callexp), fold (exp));
518 /* Expand a front-end instrinsic call to bsf(). This takes one argument,
519 the signature to which can be either:
521 int bsf (uint arg);
522 int bsf (ulong arg);
524 This scans all bits in the given argument starting with the first,
525 returning the bit number of the first bit set. The original call
526 expression is held in CALLEXP. */
528 static tree
529 expand_intrinsic_bsf (tree callexp)
531 /* The bsf() intrinsic gets turned into __builtin_ctz(arg).
532 The return value is supposed to be undefined if arg is zero. */
533 tree arg = CALL_EXPR_ARG (callexp, 0);
534 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
536 /* Which variant of __builtin_ctz* should we call? */
537 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
538 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
539 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
540 : END_BUILTINS;
542 gcc_assert (code != END_BUILTINS);
544 return call_builtin_fn (callexp, code, 1, arg);
547 /* Expand a front-end instrinsic call to bsr(). This takes one argument,
548 the signature to which can be either:
550 int bsr (uint arg);
551 int bsr (ulong arg);
553 This scans all bits in the given argument from the most significant bit
554 to the least significant, returning the bit number of the first bit set.
555 The original call expression is held in CALLEXP. */
557 static tree
558 expand_intrinsic_bsr (tree callexp)
560 /* The bsr() intrinsic gets turned into __builtin_clz(arg) ^ (size - 1).
561 The return value is supposed to be undefined if arg is zero. */
562 tree arg = CALL_EXPR_ARG (callexp, 0);
563 tree type = TREE_TYPE (callexp);
564 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
566 /* Which variant of __builtin_clz* should we call? */
567 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
568 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
569 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
570 : END_BUILTINS;
572 gcc_assert (code != END_BUILTINS);
574 tree result = call_builtin_fn (callexp, code, 1, arg);
576 return fold_build2 (BIT_XOR_EXPR, type, result,
577 build_integer_cst (argsize - 1, type));
580 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
581 bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments,
582 the signature to which is:
584 int bt (size_t* ptr, size_t bitnum);
586 All intrinsics test if a bit is set and return the result of that condition.
587 Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
588 sets the bit, and `btr' resets the bit. The original call expression is
589 held in CALLEXP. */
591 static tree
592 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
594 tree ptr = CALL_EXPR_ARG (callexp, 0);
595 tree bitnum = CALL_EXPR_ARG (callexp, 1);
596 tree type = TREE_TYPE (TREE_TYPE (ptr));
598 /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */
599 tree bitsize = fold_convert (type, TYPE_SIZE (TREE_TYPE (ptr)));
601 /* ptr[bitnum / bitsize] */
602 ptr = build_pointer_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
603 bitnum, bitsize));
604 ptr = indirect_ref (type, ptr);
606 /* mask = 1 << (bitnum % bitsize); */
607 bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
608 bitnum = fold_build2 (LSHIFT_EXPR, type, build_one_cst (type), bitnum);
610 /* cond = ptr[bitnum / size] & mask; */
611 tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
613 /* cond ? -1 : 0; */
614 cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
615 build_minus_one_cst (TREE_TYPE (callexp)),
616 build_zero_cst (TREE_TYPE (callexp)));
618 /* Update the bit as needed, only testing the bit for bt(). */
619 tree_code code;
621 switch (intrinsic)
623 case INTRINSIC_BT:
624 case INTRINSIC_BT64:
625 return cond;
627 case INTRINSIC_BTC:
628 case INTRINSIC_BTC64:
629 code = BIT_XOR_EXPR;
630 break;
632 case INTRINSIC_BTR:
633 case INTRINSIC_BTR64:
634 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
635 code = BIT_AND_EXPR;
636 break;
638 case INTRINSIC_BTS:
639 case INTRINSIC_BTS64:
640 code = BIT_IOR_EXPR;
641 break;
643 default:
644 gcc_unreachable ();
647 /* ptr[bitnum / size] op= mask; */
648 ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
650 /* Store the condition result in a temporary, and return expressions in
651 correct order of evaluation. */
652 tree tmp = build_local_temp (TREE_TYPE (callexp));
653 cond = modify_expr (tmp, cond);
655 return compound_expr (cond, compound_expr (ptr, tmp));
658 /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the
659 signature to which can be either:
661 int popcnt (uint arg);
662 int popcnt (ulong arg);
664 Calculates the number of set bits in an integer. The original call
665 expression is held in CALLEXP. */
667 static tree
668 expand_intrinsic_popcnt (tree callexp)
670 tree arg = CALL_EXPR_ARG (callexp, 0);
671 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
673 /* Which variant of __builtin_popcount* should we call? */
674 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
675 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
676 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
677 : END_BUILTINS;
679 gcc_assert (code != END_BUILTINS);
681 return call_builtin_fn (callexp, code, 1, arg);
684 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
685 rol() or ror(). These intrinsics expect to take one or two arguments,
686 the signature to which can be either:
688 T rol(T) (const T value, const uint count);
689 T rol(uint count, T) (const T value);
690 T ror(T) (const T value, const uint count);
691 T ror(uint count, T) (const T value);
693 This bitwise rotates VALUE left or right by COUNT bit positions. */
695 static tree
696 expand_intrinsic_rotate (intrinsic_code intrinsic, tree callexp)
698 tree type = TREE_TYPE (callexp);
699 tree value = CALL_EXPR_ARG (callexp, 0);
700 tree count;
701 tree_code code;
703 /* Get the equivalent tree code for the intrinsic. */
704 if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROL_TIARG)
705 code = LROTATE_EXPR;
706 else if (intrinsic == INTRINSIC_ROR || intrinsic == INTRINSIC_ROR_TIARG)
707 code = RROTATE_EXPR;
708 else
709 gcc_unreachable ();
711 /* Get the COUNT parameter. Either from the call expression arguments or the
712 template instantiation arguments. */
713 if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROR)
714 count = CALL_EXPR_ARG (callexp, 1);
715 else
717 /* Retrieve from the encoded template instantation. */
718 tree callee = get_callee_fndecl (callexp);
719 TemplateInstance *ti = DECL_LANG_FRONTEND (callee)->isInstantiated ();
720 gcc_assert (ti && ti->tiargs && ti->tiargs->length == 2);
722 Expression *e = dmd::isExpression ((*ti->tiargs)[0]);
723 gcc_assert (e && e->op == EXP::int64);
724 count = build_expr (e, true);
727 return fold_build2 (code, type, value, count);
730 /* Expand a front-end intrinsic call to copysign(). This takes two arguments,
731 the signature to which can be either:
733 float copysign (T to, float from);
734 double copysign (T to, double from);
735 real copysign (T to, real from);
737 This computes a value composed of TO with the sign bit of FROM. The original
738 call expression is held in CALLEXP. */
740 static tree
741 expand_intrinsic_copysign (tree callexp)
743 tree to = CALL_EXPR_ARG (callexp, 0);
744 tree from = CALL_EXPR_ARG (callexp, 1);
745 tree type = TREE_TYPE (to);
747 /* Convert parameters to the same type. Prefer the first parameter unless it
748 is an integral type. */
749 if (INTEGRAL_TYPE_P (type))
751 to = fold_convert (TREE_TYPE (from), to);
752 type = TREE_TYPE (to);
754 else
755 from = fold_convert (type, from);
757 /* Which variant of __builtin_copysign* should we call? */
758 built_in_function code = (type == float_type_node) ? BUILT_IN_COPYSIGNF
759 : (type == double_type_node) ? BUILT_IN_COPYSIGN
760 : (type == long_double_type_node) ? BUILT_IN_COPYSIGNL
761 : END_BUILTINS;
763 gcc_assert (code != END_BUILTINS);
765 return call_builtin_fn (callexp, code, 2, to, from);
768 /* Expand a front-end intrinsic call to pow(). This takes two arguments, the
769 signature to which can be either:
771 float pow (float base, T exponent);
772 double pow (double base, T exponent);
773 real pow (real base, T exponent);
775 This computes the value of BASE raised to the power of EXPONENT.
776 The original call expression is held in CALLEXP. */
778 static tree
779 expand_intrinsic_pow (tree callexp)
781 tree base = CALL_EXPR_ARG (callexp, 0);
782 tree exponent = CALL_EXPR_ARG (callexp, 1);
783 tree exptype = TREE_TYPE (exponent);
785 /* Which variant of __builtin_pow* should we call? */
786 built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
787 : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
788 : END_BUILTINS;
789 gcc_assert (code != END_BUILTINS);
791 tree builtin = mathfn_built_in (TREE_TYPE (base), code);
792 gcc_assert (builtin != NULL_TREE);
794 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
795 base, exponent);
798 /* Expand a front-end intrinsic call to toPrec(). This takes one argument, the
799 signature to which can be either:
801 T toPrec(T)(float f);
802 T toPrec(T)(double f);
803 T toPrec(T)(real f);
805 This rounds the argument F to the precision of the specified floating
806 point type T. The original call expression is held in CALLEXP. */
808 static tree
809 expand_intrinsic_toprec (tree callexp)
811 tree f = CALL_EXPR_ARG (callexp, 0);
812 tree type = TREE_TYPE (callexp);
814 return convert (type, f);
817 /* Expand a front-end intrinsic call to va_arg(). This takes either one or two
818 arguments, the signature to which can be either:
820 T va_arg(T) (ref va_list ap);
821 void va_arg(T) (va_list ap, ref T parmn);
823 This retrieves the next variadic parameter that is type T from the given
824 va_list. If also given, store the value into parmn, otherwise return it.
825 The original call expression is held in CALLEXP. */
827 static tree
828 expand_intrinsic_vaarg (tree callexp)
830 tree ap = CALL_EXPR_ARG (callexp, 0);
831 tree parmn = NULL_TREE;
832 tree type;
834 STRIP_NOPS (ap);
836 if (call_expr_nargs (callexp) == 1)
837 type = TREE_TYPE (callexp);
838 else
840 parmn = CALL_EXPR_ARG (callexp, 1);
841 STRIP_NOPS (parmn);
843 /* The `ref' argument to va_arg is either an address or reference,
844 get the value of it. */
845 if (TREE_CODE (parmn) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (parmn)))
846 parmn = build_deref (parmn);
847 else
849 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
850 parmn = TREE_OPERAND (parmn, 0);
853 type = TREE_TYPE (parmn);
856 /* (T) VA_ARG_EXP<ap>; */
857 tree exp = build1_loc (EXPR_LOCATION (callexp), VA_ARG_EXPR, type, ap);
859 /* parmn = (T) VA_ARG_EXP<ap>; */
860 if (parmn != NULL_TREE)
861 exp = modify_expr (parmn, exp);
863 return exp;
866 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
867 the signature to which is:
869 void va_start(T) (out va_list ap, ref T parmn);
871 This initializes the va_list type, where parmn should be the last named
872 parameter. The original call expression is held in CALLEXP. */
874 static tree
875 expand_intrinsic_vastart (tree callexp)
877 tree ap = CALL_EXPR_ARG (callexp, 0);
878 tree parmn = CALL_EXPR_ARG (callexp, 1);
880 STRIP_NOPS (ap);
881 STRIP_NOPS (parmn);
883 /* The va_list argument should already have its address taken. The second
884 argument, however, is inout and that needs to be fixed to prevent a
885 warning. Could be casting, so need to check type too? */
886 gcc_assert (TREE_CODE (ap) == ADDR_EXPR
887 || (TREE_CODE (ap) == PARM_DECL
888 && POINTER_TYPE_P (TREE_TYPE (ap))));
890 /* Assuming nobody tries to change the return type. */
891 if (TREE_CODE (parmn) != PARM_DECL)
893 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
894 parmn = TREE_OPERAND (parmn, 0);
897 return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
900 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
901 adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics
902 expect to take two or three arguments, the signature to which can be either:
904 int adds (int x, int y, ref bool overflow);
905 long adds (long x, long y, ref bool overflow);
906 int negs (int x, ref bool overflow);
907 long negs (long x, ref bool overflow);
909 This performs an operation on two signed or unsigned integers, checking for
910 overflow. The overflow is sticky, meaning that a sequence of operations
911 can be done and overflow need only be checked at the end. The original call
912 expression is held in CALLEXP. */
914 static tree
915 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
917 tree type = TREE_TYPE (callexp);
918 tree x;
919 tree y;
920 tree overflow;
921 internal_fn icode;
923 /* Which variant of *_OVERFLOW should we generate? */
924 switch (intrinsic)
926 case INTRINSIC_ADDS:
927 case INTRINSIC_ADDSL:
928 case INTRINSIC_ADDU:
929 case INTRINSIC_ADDUL:
930 x = CALL_EXPR_ARG (callexp, 0);
931 y = CALL_EXPR_ARG (callexp, 1);
932 overflow = CALL_EXPR_ARG (callexp, 2);
933 icode = IFN_ADD_OVERFLOW;
934 break;
936 case INTRINSIC_SUBS:
937 case INTRINSIC_SUBSL:
938 case INTRINSIC_SUBU:
939 case INTRINSIC_SUBUL:
940 x = CALL_EXPR_ARG (callexp, 0);
941 y = CALL_EXPR_ARG (callexp, 1);
942 overflow = CALL_EXPR_ARG (callexp, 2);
943 icode = IFN_SUB_OVERFLOW;
944 break;
946 case INTRINSIC_MULS:
947 case INTRINSIC_MULSL:
948 case INTRINSIC_MULU:
949 case INTRINSIC_MULUI:
950 case INTRINSIC_MULUL:
951 x = CALL_EXPR_ARG (callexp, 0);
952 y = CALL_EXPR_ARG (callexp, 1);
953 overflow = CALL_EXPR_ARG (callexp, 2);
954 icode = IFN_MUL_OVERFLOW;
955 break;
957 case INTRINSIC_NEGS:
958 case INTRINSIC_NEGSL:
959 /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */
960 x = fold_convert (type, integer_zero_node);
961 y = CALL_EXPR_ARG (callexp, 0);
962 overflow = CALL_EXPR_ARG (callexp, 1);
963 icode = IFN_SUB_OVERFLOW;
964 break;
966 default:
967 gcc_unreachable ();
970 tree result
971 = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
972 build_complex_type (type), 2, x, y);
974 STRIP_NOPS (overflow);
975 overflow = build_deref (overflow);
977 /* Assign returned result to overflow parameter, however if overflow is
978 already true, maintain its value. */
979 type = TREE_TYPE (overflow);
980 result = save_expr (result);
982 tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
983 fold_convert (type, imaginary_part (result)));
984 exp = modify_expr (overflow, exp);
986 /* Return the value of result. */
987 return compound_expr (exp, real_part (result));
990 /* Expand a front-end instrinsic call to volatileLoad(). This takes one
991 argument, the signature to which can be either:
993 ubyte volatileLoad (ubyte* ptr);
994 ushort volatileLoad (ushort* ptr);
995 uint volatileLoad (uint* ptr);
996 ulong volatileLoad (ulong* ptr);
998 This reads a value from the memory location indicated by ptr. Calls to
999 them are be guaranteed to not be removed (such as during DCE) or reordered
1000 in the same thread. The original call expression is held in CALLEXP. */
1002 static tree
1003 expand_volatile_load (tree callexp)
1005 tree ptr = CALL_EXPR_ARG (callexp, 0);
1006 tree ptrtype = TREE_TYPE (ptr);
1007 gcc_assert (POINTER_TYPE_P (ptrtype));
1009 /* (T) *(volatile T *) ptr; */
1010 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
1011 tree result = indirect_ref (type, ptr);
1012 TREE_THIS_VOLATILE (result) = 1;
1013 TREE_SIDE_EFFECTS (result) = 1;
1015 return result;
1018 /* Expand a front-end instrinsic call to volatileStore(). This takes two
1019 arguments, the signature to which can be either:
1021 void volatileStore (ubyte* ptr, ubyte value);
1022 void volatileStore (ushort* ptr, ushort value);
1023 void volatileStore (uint* ptr, uint value);
1024 void volatileStore (ulong* ptr, ulong value);
1026 This writes a value to the memory location indicated by ptr. Calls to
1027 them are be guaranteed to not be removed (such as during DCE) or reordered
1028 in the same thread. The original call expression is held in CALLEXP. */
1030 static tree
1031 expand_volatile_store (tree callexp)
1033 tree ptr = CALL_EXPR_ARG (callexp, 0);
1034 tree ptrtype = TREE_TYPE (ptr);
1035 gcc_assert (POINTER_TYPE_P (ptrtype));
1037 /* (T) *(volatile T *) ptr; */
1038 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
1039 tree result = indirect_ref (type, ptr);
1040 TREE_THIS_VOLATILE (result) = 1;
1041 TREE_SIDE_EFFECTS (result) = 1;
1043 /* (*(volatile T *) ptr) = value; */
1044 tree value = CALL_EXPR_ARG (callexp, 1);
1045 return modify_expr (result, value);
1048 /* Expand a front-end instrinsic call to convertvector(). This takes one
1049 argument, the signature to which is:
1051 vector(T) convertvector (vector(F) vec);
1053 This converts a vector VEC to TYPE by casting every element in VEC to the
1054 element type of TYPE. The original call expression is held in CALLEXP. */
1056 static tree
1057 expand_intrinsic_vec_convert (tree callexp)
1059 tree vec = CALL_EXPR_ARG (callexp, 0);
1060 tree type = TREE_TYPE (callexp);
1062 /* Use VIEW_CONVERT for simple vector conversions. */
1063 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (vec)))
1064 == TYPE_MAIN_VARIANT (TREE_TYPE (type)))
1065 || (VECTOR_INTEGER_TYPE_P (TREE_TYPE (vec))
1066 && VECTOR_INTEGER_TYPE_P (type)
1067 && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (vec)))
1068 == TYPE_PRECISION (TREE_TYPE (type)))))
1069 return build1_loc (EXPR_LOCATION (callexp), VIEW_CONVERT_EXPR, type, vec);
1071 return build_call_expr_internal_loc (EXPR_LOCATION (callexp), IFN_VEC_CONVERT,
1072 type, 1, vec);
1075 /* Expand a front-end instrinsic call to blendvector(). This expects to take
1076 three arguments, the signature to which is:
1078 vector(T) blendvector (vector(T) vec0, vector(U) vec1, vector(M) mask);
1080 This builds a VEC_COND_EXPR if VEC0, VEC1, and MASK are vector types, VEC0
1081 has the same type as VEC1, and the number of elements of VEC0, VEC1, and MASK
1082 are the same. The original call expression is held in CALLEXP. */
1084 static tree
1085 expand_intrinsic_vec_blend (tree callexp)
1087 tree vec0 = CALL_EXPR_ARG (callexp, 0);
1088 tree vec1 = CALL_EXPR_ARG (callexp, 1);
1089 tree mask = CALL_EXPR_ARG (callexp, 2);
1091 tree cmp = fold_build2_loc (EXPR_LOCATION (callexp), NE_EXPR,
1092 truth_type_for (TREE_TYPE (mask)),
1093 mask, build_zero_cst (TREE_TYPE (mask)));
1095 tree ret = fold_build3_loc (EXPR_LOCATION (callexp), VEC_COND_EXPR,
1096 TREE_TYPE (callexp), cmp, vec0, vec1);
1098 if (!CONSTANT_CLASS_P (vec0) || !CONSTANT_CLASS_P (vec1))
1099 ret = force_target_expr (ret);
1101 return ret;
1104 /* Expand a front-end instrinsic call to shuffle(). This expects to take three
1105 arguments, the signature to which is:
1107 vector(T) shuffle (vector(T) vec0, vector(T) vec1, vector(M) mask);
1109 This builds a VEC_PERM_EXPR if VEC0, VEC1, and MASK are vector types, VEC0
1110 has the same type as VEC1, and the number of elements of VEC0, VEC1, and MASK
1111 are the same. The original call expression is held in CALLEXP. */
1113 static tree
1114 expand_intrinsic_vec_shuffle (tree callexp)
1116 tree vec0 = CALL_EXPR_ARG (callexp, 0);
1117 tree vec1 = CALL_EXPR_ARG (callexp, 1);
1118 tree mask = CALL_EXPR_ARG (callexp, 2);
1120 return build3_loc (EXPR_LOCATION (callexp), VEC_PERM_EXPR,
1121 TREE_TYPE (callexp), vec0, vec1, mask);
1124 /* Expand a front-end instrinsic call to shufflevector(). This takes two
1125 positional arguments and a variadic list, the signature to which is:
1127 vector(TM) shuffle (vector(T) vec1, vector(T) vec2, index...);
1129 This builds a VEC_PERM_EXPR if VEC0 and VEC1 are vector types, VEC0 has the
1130 same element type as VEC1, and the number of elements in INDEX is a valid
1131 power of two. The original call expression is held in CALLEXP. */
1133 static tree
1134 expand_intrinsic_vec_shufflevector (tree callexp)
1136 tree vec0 = CALL_EXPR_ARG (callexp, 0);
1137 tree vec1 = CALL_EXPR_ARG (callexp, 1);
1139 unsigned HOST_WIDE_INT v0elems =
1140 TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec0)).to_constant ();
1141 unsigned HOST_WIDE_INT v1elems =
1142 TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec1)).to_constant ();
1144 unsigned HOST_WIDE_INT num_indices = call_expr_nargs (callexp) - 2;
1145 unsigned HOST_WIDE_INT masklen = MAX (num_indices, MAX (v0elems, v1elems));
1146 unsigned HOST_WIDE_INT pad_size = (v0elems < masklen ? masklen - v0elems : 0);
1147 vec_perm_builder sel (masklen, masklen, 1);
1149 unsigned n = 0;
1150 for (; n < num_indices; ++n)
1152 tree idx = CALL_EXPR_ARG (callexp, n + 2);
1153 HOST_WIDE_INT iidx = tree_to_shwi (idx);
1154 /* VEC_PERM_EXPR does not allow different sized inputs. */
1155 if ((unsigned HOST_WIDE_INT) iidx >= v0elems)
1156 iidx += pad_size;
1158 sel.quick_push (iidx);
1161 /* VEC_PERM_EXPR does not support a result that is smaller than the inputs. */
1162 for (; n < masklen; ++n)
1163 sel.quick_push (n);
1165 vec_perm_indices indices (sel, 2, masklen);
1167 /* Pad out arguments to the common vector size. */
1168 tree ret_type = build_vector_type (TREE_TYPE (TREE_TYPE (vec0)), masklen);
1169 if (v0elems < masklen)
1171 constructor_elt elt = { NULL_TREE, build_zero_cst (TREE_TYPE (vec0)) };
1172 vec0 = build_constructor_single (ret_type, NULL_TREE, vec0);
1173 for (unsigned i = 1; i < masklen / v0elems; ++i)
1174 vec_safe_push (CONSTRUCTOR_ELTS (vec0), elt);
1177 if (v1elems < masklen)
1179 constructor_elt elt = { NULL_TREE, build_zero_cst (TREE_TYPE (vec1)) };
1180 vec1 = build_constructor_single (ret_type, NULL_TREE, vec1);
1181 for (unsigned i = 1; i < masklen / v1elems; ++i)
1182 vec_safe_push (CONSTRUCTOR_ELTS (vec1), elt);
1185 tree mask_type = build_vector_type (build_nonstandard_integer_type
1186 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (ret_type))), 1),
1187 masklen);
1188 tree ret = build3_loc (EXPR_LOCATION (callexp), VEC_PERM_EXPR, ret_type, vec0,
1189 vec1, vec_perm_indices_to_tree (mask_type, indices));
1191 /* Get the low part we are interested in. */
1192 if (num_indices < masklen)
1194 ret = build3_loc (EXPR_LOCATION (callexp), BIT_FIELD_REF,
1195 TREE_TYPE (callexp), ret,
1196 TYPE_SIZE (TREE_TYPE (callexp)), bitsize_zero_node);
1197 /* Wrap the low part operation in a TARGET_EXPR so it gets a separate
1198 temporary during gimplification. */
1199 ret = force_target_expr (ret);
1202 return ret;
1205 /* Expand a front-end instrinsic call to loadUnaligned(). This takes one
1206 argument, the signature to which is:
1208 vector(T) loadUnaligned (vector(T)* ptr)
1210 This generates a load of a vector from an unaligned address PTR.
1211 The original call expression is held in CALLEXP. */
1213 static tree
1214 expand_intrinsic_vec_load_unaligned (tree callexp)
1216 tree ptr = CALL_EXPR_ARG (callexp, 0);
1218 tree unaligned_type = build_variant_type_copy (TREE_TYPE (TREE_TYPE (ptr)));
1219 SET_TYPE_ALIGN (unaligned_type, 1 * BITS_PER_UNIT);
1220 TYPE_USER_ALIGN (unaligned_type) = 1;
1222 tree load = indirect_ref (unaligned_type, ptr);
1223 return convert (TREE_TYPE (callexp), load);
1226 /* Expand a front-end instrinsic call to storeUnaligned(). This takes two
1227 arguments, the signature to which is:
1229 vector(T) storeUnaligned (vector(T)* ptr, vector(T) value)
1231 This generates an assignment of a vector VALUE to an unaligned address PTR.
1232 The original call expression is held in CALLEXP. */
1234 static tree
1235 expand_intrinsic_vec_store_unaligned (tree callexp)
1237 tree ptr = CALL_EXPR_ARG (callexp, 0);
1238 tree vec = CALL_EXPR_ARG (callexp, 1);
1240 tree unaligned_type = build_variant_type_copy (TREE_TYPE (TREE_TYPE (ptr)));
1241 SET_TYPE_ALIGN (unaligned_type, 1 * BITS_PER_UNIT);
1242 TYPE_USER_ALIGN (unaligned_type) = 1;
1244 tree load = indirect_ref (unaligned_type, ptr);
1245 return build_assign (MODIFY_EXPR, load, vec);
1248 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
1249 generated instructions. Most map directly to GCC builtins, others
1250 require a little extra work around them. */
1252 tree
1253 maybe_expand_intrinsic (tree callexp)
1255 tree callee = get_callee_fndecl (callexp);
1257 if (callee == NULL_TREE || TREE_CODE (callee) != FUNCTION_DECL)
1258 return callexp;
1260 /* Don't expand CTFE-only intrinsics outside of semantic processing. */
1261 if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
1262 return callexp;
1264 /* Gate the expansion of the intrinsic with constraint checks, if any fail
1265 then bail out without any lowering. */
1266 if (maybe_warn_intrinsic_mismatch (callee, callexp))
1268 /* Reset the built-in flag so that we don't trip fold_builtin. */
1269 set_decl_built_in_function (callee, NOT_BUILT_IN, 0);
1270 return callexp;
1273 intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
1274 built_in_function code;
1276 switch (intrinsic)
1278 case INTRINSIC_NONE:
1279 return callexp;
1281 case INTRINSIC_BSF:
1282 case INTRINSIC_BSF64:
1283 return expand_intrinsic_bsf (callexp);
1285 case INTRINSIC_BSR:
1286 case INTRINSIC_BSR64:
1287 return expand_intrinsic_bsr (callexp);
1289 case INTRINSIC_BT:
1290 case INTRINSIC_BT64:
1291 case INTRINSIC_BTC:
1292 case INTRINSIC_BTC64:
1293 case INTRINSIC_BTR:
1294 case INTRINSIC_BTR64:
1295 case INTRINSIC_BTS:
1296 case INTRINSIC_BTS64:
1297 return expand_intrinsic_bt (intrinsic, callexp);
1299 case INTRINSIC_POPCNT32:
1300 case INTRINSIC_POPCNT64:
1301 return expand_intrinsic_popcnt (callexp);
1303 case INTRINSIC_ROL:
1304 case INTRINSIC_ROL_TIARG:
1305 case INTRINSIC_ROR:
1306 case INTRINSIC_ROR_TIARG:
1307 return expand_intrinsic_rotate (intrinsic, callexp);
1309 case INTRINSIC_BSWAP16:
1310 case INTRINSIC_BSWAP32:
1311 case INTRINSIC_BSWAP64:
1312 case INTRINSIC_CEIL:
1313 case INTRINSIC_CEILF:
1314 case INTRINSIC_CEILL:
1315 case INTRINSIC_COS:
1316 case INTRINSIC_COSF:
1317 case INTRINSIC_COSL:
1318 case INTRINSIC_EXP:
1319 case INTRINSIC_EXP2:
1320 case INTRINSIC_EXPM1:
1321 case INTRINSIC_FABS:
1322 case INTRINSIC_FABSF:
1323 case INTRINSIC_FABSL:
1324 case INTRINSIC_FLOOR:
1325 case INTRINSIC_FLOORF:
1326 case INTRINSIC_FLOORL:
1327 case INTRINSIC_ISFINITE:
1328 case INTRINSIC_ISINFINITY:
1329 case INTRINSIC_ISNAN:
1330 case INTRINSIC_LOG:
1331 case INTRINSIC_LOG10:
1332 case INTRINSIC_LOG2:
1333 case INTRINSIC_RINT:
1334 case INTRINSIC_RINTF:
1335 case INTRINSIC_RINTL:
1336 case INTRINSIC_RNDTOL:
1337 case INTRINSIC_RNDTOLF:
1338 case INTRINSIC_RNDTOLL:
1339 case INTRINSIC_ROUND:
1340 case INTRINSIC_SIN:
1341 case INTRINSIC_SINF:
1342 case INTRINSIC_SINL:
1343 case INTRINSIC_SQRT:
1344 case INTRINSIC_SQRTF:
1345 case INTRINSIC_SQRTL:
1346 case INTRINSIC_TAN:
1347 case INTRINSIC_TRUNC:
1348 code = intrinsic_decls[intrinsic].built_in;
1349 gcc_assert (code != BUILT_IN_NONE);
1350 return call_builtin_fn (callexp, code, 1,
1351 CALL_EXPR_ARG (callexp, 0));
1353 case INTRINSIC_FMAX:
1354 case INTRINSIC_FMIN:
1355 case INTRINSIC_LDEXP:
1356 case INTRINSIC_LDEXPF:
1357 case INTRINSIC_LDEXPL:
1358 code = intrinsic_decls[intrinsic].built_in;
1359 gcc_assert (code != BUILT_IN_NONE);
1360 return call_builtin_fn (callexp, code, 2,
1361 CALL_EXPR_ARG (callexp, 0),
1362 CALL_EXPR_ARG (callexp, 1));
1364 case INTRINSIC_FMA:
1365 code = intrinsic_decls[intrinsic].built_in;
1366 gcc_assert (code != BUILT_IN_NONE);
1367 return call_builtin_fn (callexp, code, 3,
1368 CALL_EXPR_ARG (callexp, 0),
1369 CALL_EXPR_ARG (callexp, 1),
1370 CALL_EXPR_ARG (callexp, 2));
1372 case INTRINSIC_COPYSIGN:
1373 case INTRINSIC_COPYSIGNI:
1374 return expand_intrinsic_copysign (callexp);
1376 case INTRINSIC_POW:
1377 return expand_intrinsic_pow (callexp);
1379 case INTRINSIC_TOPREC:
1380 case INTRINSIC_TOPRECF:
1381 case INTRINSIC_TOPRECL:
1382 return expand_intrinsic_toprec (callexp);
1384 case INTRINSIC_VA_ARG:
1385 case INTRINSIC_C_VA_ARG:
1386 return expand_intrinsic_vaarg (callexp);
1388 case INTRINSIC_VASTART:
1389 return expand_intrinsic_vastart (callexp);
1391 case INTRINSIC_ADDS:
1392 case INTRINSIC_ADDSL:
1393 case INTRINSIC_ADDU:
1394 case INTRINSIC_ADDUL:
1395 case INTRINSIC_SUBS:
1396 case INTRINSIC_SUBSL:
1397 case INTRINSIC_SUBU:
1398 case INTRINSIC_SUBUL:
1399 case INTRINSIC_MULS:
1400 case INTRINSIC_MULSL:
1401 case INTRINSIC_MULU:
1402 case INTRINSIC_MULUI:
1403 case INTRINSIC_MULUL:
1404 case INTRINSIC_NEGS:
1405 case INTRINSIC_NEGSL:
1406 return expand_intrinsic_checkedint (intrinsic, callexp);
1408 case INTRINSIC_VLOAD8:
1409 case INTRINSIC_VLOAD16:
1410 case INTRINSIC_VLOAD32:
1411 case INTRINSIC_VLOAD64:
1412 return expand_volatile_load (callexp);
1414 case INTRINSIC_VSTORE8:
1415 case INTRINSIC_VSTORE16:
1416 case INTRINSIC_VSTORE32:
1417 case INTRINSIC_VSTORE64:
1418 return expand_volatile_store (callexp);
1420 case INTRINSIC_LOADUNALIGNED:
1421 return expand_intrinsic_vec_load_unaligned (callexp);
1423 case INTRINSIC_STOREUNALIGNED:
1424 return expand_intrinsic_vec_store_unaligned (callexp);
1426 case INTRINSIC_SHUFFLE:
1427 return expand_intrinsic_vec_shuffle (callexp);
1429 case INTRINSIC_SHUFFLEVECTOR:
1430 return expand_intrinsic_vec_shufflevector (callexp);
1432 case INTRINSIC_CONVERTVECTOR:
1433 return expand_intrinsic_vec_convert (callexp);
1435 case INTRINSIC_BLENDVECTOR:
1436 return expand_intrinsic_vec_blend (callexp);
1438 default:
1439 gcc_unreachable ();
1443 /* If FNDECL is an intrinsic, return the FUNCTION_DECL that has a library
1444 fallback implementation of it, otherwise raise an error. */
1446 tree
1447 maybe_reject_intrinsic (tree fndecl)
1449 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1451 intrinsic_code intrinsic = DECL_INTRINSIC_CODE (fndecl);
1453 if (intrinsic == INTRINSIC_NONE)
1455 /* Not an intrinsic, but it still might be a declaration from the
1456 `gcc.builtins' module. */
1457 if (fndecl_built_in_p (fndecl) && DECL_IS_UNDECLARED_BUILTIN (fndecl)
1458 && !DECL_ASSEMBLER_NAME_SET_P (fndecl))
1459 error ("built-in function %qE must be directly called", fndecl);
1461 return fndecl;
1464 /* Nothing to do if the intrinsic has a D library implementation. */
1465 if (intrinsic_decls[intrinsic].fallback)
1466 return fndecl;
1468 /* Check the GCC built-in decl if the intrinsic maps to one. */
1469 built_in_function code = intrinsic_decls[intrinsic].built_in;
1470 if (code != BUILT_IN_NONE)
1472 tree builtin = builtin_decl_explicit (code);
1473 if (!DECL_IS_UNDECLARED_BUILTIN (builtin)
1474 || DECL_ASSEMBLER_NAME_SET_P (builtin))
1475 return builtin;
1478 /* It's a D language intrinsic with no library implementation. */
1479 error ("intrinsic function %qE must be directly called", fndecl);
1480 return fndecl;