svn merge -r 219183:219425 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / builtins.c
blobe28e226d2d3ae41498c99873575a55818c5f37a7
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
38 #include "stor-layout.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "tree-object-size.h"
42 #include "realmpfr.h"
43 #include "predict.h"
44 #include "vec.h"
45 #include "hashtab.h"
46 #include "hash-set.h"
47 #include "hard-reg-set.h"
48 #include "input.h"
49 #include "function.h"
50 #include "cfgrtl.h"
51 #include "basic-block.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "gimple-expr.h"
55 #include "is-a.h"
56 #include "gimple.h"
57 #include "flags.h"
58 #include "regs.h"
59 #include "except.h"
60 #include "insn-config.h"
61 #include "expr.h"
62 #include "insn-codes.h"
63 #include "optabs.h"
64 #include "libfuncs.h"
65 #include "recog.h"
66 #include "output.h"
67 #include "typeclass.h"
68 #include "tm_p.h"
69 #include "target.h"
70 #include "langhooks.h"
71 #include "tree-ssanames.h"
72 #include "tree-dfa.h"
73 #include "value-prof.h"
74 #include "diagnostic-core.h"
75 #include "builtins.h"
76 #include "asan.h"
77 #include "cilk.h"
78 #include "ipa-ref.h"
79 #include "lto-streamer.h"
80 #include "cgraph.h"
81 #include "tree-chkp.h"
82 #include "rtl-chkp.h"
83 #include "gomp-constants.h"
86 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
88 struct target_builtins default_target_builtins;
89 #if SWITCHABLE_TARGET
90 struct target_builtins *this_target_builtins = &default_target_builtins;
91 #endif
93 /* Define the names of the builtin function types and codes. */
94 const char *const built_in_class_names[BUILT_IN_LAST]
95 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
97 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
98 const char * built_in_names[(int) END_BUILTINS] =
100 #include "builtins.def"
102 #undef DEF_BUILTIN
104 /* Setup an array of _DECL trees, make sure each element is
105 initialized to NULL_TREE. */
106 builtin_info_type builtin_info;
108 /* Non-zero if __builtin_constant_p should be folded right away. */
109 bool force_folding_builtin_constant_p;
111 static rtx c_readstr (const char *, machine_mode);
112 static int target_char_cast (tree, char *);
113 static rtx get_memory_rtx (tree, tree);
114 static int apply_args_size (void);
115 static int apply_result_size (void);
116 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
117 static rtx result_vector (int, rtx);
118 #endif
119 static void expand_builtin_update_setjmp_buf (rtx);
120 static void expand_builtin_prefetch (tree);
121 static rtx expand_builtin_apply_args (void);
122 static rtx expand_builtin_apply_args_1 (void);
123 static rtx expand_builtin_apply (rtx, rtx, rtx);
124 static void expand_builtin_return (rtx);
125 static enum type_class type_to_class (tree);
126 static rtx expand_builtin_classify_type (tree);
127 static void expand_errno_check (tree, rtx);
128 static rtx expand_builtin_mathfn (tree, rtx, rtx);
129 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
130 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
131 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
132 static rtx expand_builtin_interclass_mathfn (tree, rtx);
133 static rtx expand_builtin_sincos (tree);
134 static rtx expand_builtin_cexpi (tree, rtx);
135 static rtx expand_builtin_int_roundingfn (tree, rtx);
136 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
137 static rtx expand_builtin_next_arg (void);
138 static rtx expand_builtin_va_start (tree);
139 static rtx expand_builtin_va_end (tree);
140 static rtx expand_builtin_va_copy (tree);
141 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
142 static rtx expand_builtin_strcmp (tree, rtx);
143 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
144 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
145 static rtx expand_builtin_memcpy (tree, rtx);
146 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
147 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
148 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
149 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
150 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
151 machine_mode, int, tree);
152 static rtx expand_builtin_strcpy (tree, rtx);
153 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
154 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
155 static rtx expand_builtin_strncpy (tree, rtx);
156 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
157 static rtx expand_builtin_memset (tree, rtx, machine_mode);
158 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
159 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
160 static rtx expand_builtin_bzero (tree);
161 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
162 static rtx expand_builtin_alloca (tree, bool);
163 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
164 static rtx expand_builtin_frame_address (tree, tree);
165 static tree stabilize_va_list_loc (location_t, tree, int);
166 static rtx expand_builtin_expect (tree, rtx);
167 static tree fold_builtin_constant_p (tree);
168 static tree fold_builtin_classify_type (tree);
169 static tree fold_builtin_strlen (location_t, tree, tree);
170 static tree fold_builtin_inf (location_t, tree, int);
171 static tree fold_builtin_nan (tree, tree, int);
172 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
173 static bool validate_arg (const_tree, enum tree_code code);
174 static bool integer_valued_real_p (tree);
175 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
176 static rtx expand_builtin_fabs (tree, rtx, rtx);
177 static rtx expand_builtin_signbit (tree, rtx);
178 static tree fold_builtin_sqrt (location_t, tree, tree);
179 static tree fold_builtin_cbrt (location_t, tree, tree);
180 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
181 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
182 static tree fold_builtin_cos (location_t, tree, tree, tree);
183 static tree fold_builtin_cosh (location_t, tree, tree, tree);
184 static tree fold_builtin_tan (tree, tree);
185 static tree fold_builtin_trunc (location_t, tree, tree);
186 static tree fold_builtin_floor (location_t, tree, tree);
187 static tree fold_builtin_ceil (location_t, tree, tree);
188 static tree fold_builtin_round (location_t, tree, tree);
189 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
190 static tree fold_builtin_bitop (tree, tree);
191 static tree fold_builtin_strchr (location_t, tree, tree, tree);
192 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
193 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
194 static tree fold_builtin_strcmp (location_t, tree, tree);
195 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
196 static tree fold_builtin_signbit (location_t, tree, tree);
197 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
198 static tree fold_builtin_isascii (location_t, tree);
199 static tree fold_builtin_toascii (location_t, tree);
200 static tree fold_builtin_isdigit (location_t, tree);
201 static tree fold_builtin_fabs (location_t, tree, tree);
202 static tree fold_builtin_abs (location_t, tree, tree);
203 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
204 enum tree_code);
205 static tree fold_builtin_0 (location_t, tree);
206 static tree fold_builtin_1 (location_t, tree, tree);
207 static tree fold_builtin_2 (location_t, tree, tree, tree);
208 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
209 static tree fold_builtin_varargs (location_t, tree, tree*, int);
211 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
212 static tree fold_builtin_strstr (location_t, tree, tree, tree);
213 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
214 static tree fold_builtin_strspn (location_t, tree, tree);
215 static tree fold_builtin_strcspn (location_t, tree, tree);
217 static rtx expand_builtin_object_size (tree);
218 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
219 enum built_in_function);
220 static void maybe_emit_chk_warning (tree, enum built_in_function);
221 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
222 static void maybe_emit_free_warning (tree);
223 static tree fold_builtin_object_size (tree, tree);
225 unsigned HOST_WIDE_INT target_newline;
226 unsigned HOST_WIDE_INT target_percent;
227 static unsigned HOST_WIDE_INT target_c;
228 static unsigned HOST_WIDE_INT target_s;
229 char target_percent_c[3];
230 char target_percent_s[3];
231 char target_percent_s_newline[4];
232 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
233 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
234 static tree do_mpfr_arg2 (tree, tree, tree,
235 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
236 static tree do_mpfr_arg3 (tree, tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_sincos (tree, tree, tree);
239 static tree do_mpfr_bessel_n (tree, tree, tree,
240 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
241 const REAL_VALUE_TYPE *, bool);
242 static tree do_mpfr_remquo (tree, tree, tree);
243 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 static void expand_builtin_sync_synchronize (void);
246 /* Return true if NAME starts with __builtin_ or __sync_. */
248 static bool
249 is_builtin_name (const char *name)
251 if (strncmp (name, "__builtin_", 10) == 0)
252 return true;
253 if (strncmp (name, "__sync_", 7) == 0)
254 return true;
255 if (strncmp (name, "__atomic_", 9) == 0)
256 return true;
257 if (flag_cilkplus
258 && (!strcmp (name, "__cilkrts_detach")
259 || !strcmp (name, "__cilkrts_pop_frame")))
260 return true;
261 return false;
265 /* Return true if DECL is a function symbol representing a built-in. */
267 bool
268 is_builtin_fn (tree decl)
270 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
273 /* Return true if NODE should be considered for inline expansion regardless
274 of the optimization level. This means whenever a function is invoked with
275 its "internal" name, which normally contains the prefix "__builtin". */
277 static bool
278 called_as_built_in (tree node)
280 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
281 we want the name used to call the function, not the name it
282 will have. */
283 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
284 return is_builtin_name (name);
287 /* Compute values M and N such that M divides (address of EXP - N) and such
288 that N < M. If these numbers can be determined, store M in alignp and N in
289 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
290 *alignp and any bit-offset to *bitposp.
292 Note that the address (and thus the alignment) computed here is based
293 on the address to which a symbol resolves, whereas DECL_ALIGN is based
294 on the address at which an object is actually located. These two
295 addresses are not always the same. For example, on ARM targets,
296 the address &foo of a Thumb function foo() has the lowest bit set,
297 whereas foo() itself starts on an even address.
299 If ADDR_P is true we are taking the address of the memory reference EXP
300 and thus cannot rely on the access taking place. */
302 static bool
303 get_object_alignment_2 (tree exp, unsigned int *alignp,
304 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
306 HOST_WIDE_INT bitsize, bitpos;
307 tree offset;
308 machine_mode mode;
309 int unsignedp, volatilep;
310 unsigned int align = BITS_PER_UNIT;
311 bool known_alignment = false;
313 /* Get the innermost object and the constant (bitpos) and possibly
314 variable (offset) offset of the access. */
315 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
316 &mode, &unsignedp, &volatilep, true);
318 /* Extract alignment information from the innermost object and
319 possibly adjust bitpos and offset. */
320 if (TREE_CODE (exp) == FUNCTION_DECL)
322 /* Function addresses can encode extra information besides their
323 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
324 allows the low bit to be used as a virtual bit, we know
325 that the address itself must be at least 2-byte aligned. */
326 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
327 align = 2 * BITS_PER_UNIT;
329 else if (TREE_CODE (exp) == LABEL_DECL)
331 else if (TREE_CODE (exp) == CONST_DECL)
333 /* The alignment of a CONST_DECL is determined by its initializer. */
334 exp = DECL_INITIAL (exp);
335 align = TYPE_ALIGN (TREE_TYPE (exp));
336 #ifdef CONSTANT_ALIGNMENT
337 if (CONSTANT_CLASS_P (exp))
338 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
339 #endif
340 known_alignment = true;
342 else if (DECL_P (exp))
344 align = DECL_ALIGN (exp);
345 known_alignment = true;
347 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
349 align = TYPE_ALIGN (TREE_TYPE (exp));
351 else if (TREE_CODE (exp) == INDIRECT_REF
352 || TREE_CODE (exp) == MEM_REF
353 || TREE_CODE (exp) == TARGET_MEM_REF)
355 tree addr = TREE_OPERAND (exp, 0);
356 unsigned ptr_align;
357 unsigned HOST_WIDE_INT ptr_bitpos;
359 if (TREE_CODE (addr) == BIT_AND_EXPR
360 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
362 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
363 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
364 align *= BITS_PER_UNIT;
365 addr = TREE_OPERAND (addr, 0);
368 known_alignment
369 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
370 align = MAX (ptr_align, align);
372 /* The alignment of the pointer operand in a TARGET_MEM_REF
373 has to take the variable offset parts into account. */
374 if (TREE_CODE (exp) == TARGET_MEM_REF)
376 if (TMR_INDEX (exp))
378 unsigned HOST_WIDE_INT step = 1;
379 if (TMR_STEP (exp))
380 step = TREE_INT_CST_LOW (TMR_STEP (exp));
381 align = MIN (align, (step & -step) * BITS_PER_UNIT);
383 if (TMR_INDEX2 (exp))
384 align = BITS_PER_UNIT;
385 known_alignment = false;
388 /* When EXP is an actual memory reference then we can use
389 TYPE_ALIGN of a pointer indirection to derive alignment.
390 Do so only if get_pointer_alignment_1 did not reveal absolute
391 alignment knowledge and if using that alignment would
392 improve the situation. */
393 if (!addr_p && !known_alignment
394 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
395 align = TYPE_ALIGN (TREE_TYPE (exp));
396 else
398 /* Else adjust bitpos accordingly. */
399 bitpos += ptr_bitpos;
400 if (TREE_CODE (exp) == MEM_REF
401 || TREE_CODE (exp) == TARGET_MEM_REF)
402 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
405 else if (TREE_CODE (exp) == STRING_CST)
407 /* STRING_CST are the only constant objects we allow to be not
408 wrapped inside a CONST_DECL. */
409 align = TYPE_ALIGN (TREE_TYPE (exp));
410 #ifdef CONSTANT_ALIGNMENT
411 if (CONSTANT_CLASS_P (exp))
412 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
413 #endif
414 known_alignment = true;
417 /* If there is a non-constant offset part extract the maximum
418 alignment that can prevail. */
419 if (offset)
421 unsigned int trailing_zeros = tree_ctz (offset);
422 if (trailing_zeros < HOST_BITS_PER_INT)
424 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
425 if (inner)
426 align = MIN (align, inner);
430 *alignp = align;
431 *bitposp = bitpos & (*alignp - 1);
432 return known_alignment;
435 /* For a memory reference expression EXP compute values M and N such that M
436 divides (&EXP - N) and such that N < M. If these numbers can be determined,
437 store M in alignp and N in *BITPOSP and return true. Otherwise return false
438 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
440 bool
441 get_object_alignment_1 (tree exp, unsigned int *alignp,
442 unsigned HOST_WIDE_INT *bitposp)
444 return get_object_alignment_2 (exp, alignp, bitposp, false);
447 /* Return the alignment in bits of EXP, an object. */
449 unsigned int
450 get_object_alignment (tree exp)
452 unsigned HOST_WIDE_INT bitpos = 0;
453 unsigned int align;
455 get_object_alignment_1 (exp, &align, &bitpos);
457 /* align and bitpos now specify known low bits of the pointer.
458 ptr & (align - 1) == bitpos. */
460 if (bitpos != 0)
461 align = (bitpos & -bitpos);
462 return align;
465 /* For a pointer valued expression EXP compute values M and N such that M
466 divides (EXP - N) and such that N < M. If these numbers can be determined,
467 store M in alignp and N in *BITPOSP and return true. Return false if
468 the results are just a conservative approximation.
470 If EXP is not a pointer, false is returned too. */
472 bool
473 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
474 unsigned HOST_WIDE_INT *bitposp)
476 STRIP_NOPS (exp);
478 if (TREE_CODE (exp) == ADDR_EXPR)
479 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
480 alignp, bitposp, true);
481 else if (TREE_CODE (exp) == SSA_NAME
482 && POINTER_TYPE_P (TREE_TYPE (exp)))
484 unsigned int ptr_align, ptr_misalign;
485 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
487 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
489 *bitposp = ptr_misalign * BITS_PER_UNIT;
490 *alignp = ptr_align * BITS_PER_UNIT;
491 /* We cannot really tell whether this result is an approximation. */
492 return true;
494 else
496 *bitposp = 0;
497 *alignp = BITS_PER_UNIT;
498 return false;
501 else if (TREE_CODE (exp) == INTEGER_CST)
503 *alignp = BIGGEST_ALIGNMENT;
504 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
505 & (BIGGEST_ALIGNMENT - 1));
506 return true;
509 *bitposp = 0;
510 *alignp = BITS_PER_UNIT;
511 return false;
514 /* Return the alignment in bits of EXP, a pointer valued expression.
515 The alignment returned is, by default, the alignment of the thing that
516 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
518 Otherwise, look at the expression to see if we can do better, i.e., if the
519 expression is actually pointing at an object whose alignment is tighter. */
521 unsigned int
522 get_pointer_alignment (tree exp)
524 unsigned HOST_WIDE_INT bitpos = 0;
525 unsigned int align;
527 get_pointer_alignment_1 (exp, &align, &bitpos);
529 /* align and bitpos now specify known low bits of the pointer.
530 ptr & (align - 1) == bitpos. */
532 if (bitpos != 0)
533 align = (bitpos & -bitpos);
535 return align;
538 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
539 way, because it could contain a zero byte in the middle.
540 TREE_STRING_LENGTH is the size of the character array, not the string.
542 ONLY_VALUE should be nonzero if the result is not going to be emitted
543 into the instruction stream and zero if it is going to be expanded.
544 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
545 is returned, otherwise NULL, since
546 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
547 evaluate the side-effects.
549 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
550 accesses. Note that this implies the result is not going to be emitted
551 into the instruction stream.
553 The value returned is of type `ssizetype'.
555 Unfortunately, string_constant can't access the values of const char
556 arrays with initializers, so neither can we do so here. */
558 tree
559 c_strlen (tree src, int only_value)
561 tree offset_node;
562 HOST_WIDE_INT offset;
563 int max;
564 const char *ptr;
565 location_t loc;
567 STRIP_NOPS (src);
568 if (TREE_CODE (src) == COND_EXPR
569 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
571 tree len1, len2;
573 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
574 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
575 if (tree_int_cst_equal (len1, len2))
576 return len1;
579 if (TREE_CODE (src) == COMPOUND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 return c_strlen (TREE_OPERAND (src, 1), only_value);
583 loc = EXPR_LOC_OR_LOC (src, input_location);
585 src = string_constant (src, &offset_node);
586 if (src == 0)
587 return NULL_TREE;
589 max = TREE_STRING_LENGTH (src) - 1;
590 ptr = TREE_STRING_POINTER (src);
592 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
594 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
595 compute the offset to the following null if we don't know where to
596 start searching for it. */
597 int i;
599 for (i = 0; i < max; i++)
600 if (ptr[i] == 0)
601 return NULL_TREE;
603 /* We don't know the starting offset, but we do know that the string
604 has no internal zero bytes. We can assume that the offset falls
605 within the bounds of the string; otherwise, the programmer deserves
606 what he gets. Subtract the offset from the length of the string,
607 and return that. This would perhaps not be valid if we were dealing
608 with named arrays in addition to literal string constants. */
610 return size_diffop_loc (loc, size_int (max), offset_node);
613 /* We have a known offset into the string. Start searching there for
614 a null character if we can represent it as a single HOST_WIDE_INT. */
615 if (offset_node == 0)
616 offset = 0;
617 else if (! tree_fits_shwi_p (offset_node))
618 offset = -1;
619 else
620 offset = tree_to_shwi (offset_node);
622 /* If the offset is known to be out of bounds, warn, and call strlen at
623 runtime. */
624 if (offset < 0 || offset > max)
626 /* Suppress multiple warnings for propagated constant strings. */
627 if (only_value != 2
628 && !TREE_NO_WARNING (src))
630 warning_at (loc, 0, "offset outside bounds of constant string");
631 TREE_NO_WARNING (src) = 1;
633 return NULL_TREE;
636 /* Use strlen to search for the first zero byte. Since any strings
637 constructed with build_string will have nulls appended, we win even
638 if we get handed something like (char[4])"abcd".
640 Since OFFSET is our starting index into the string, no further
641 calculation is needed. */
642 return ssize_int (strlen (ptr + offset));
645 /* Return a char pointer for a C string if it is a string constant
646 or sum of string constant and integer constant. */
648 const char *
649 c_getstr (tree src)
651 tree offset_node;
653 src = string_constant (src, &offset_node);
654 if (src == 0)
655 return 0;
657 if (offset_node == 0)
658 return TREE_STRING_POINTER (src);
659 else if (!tree_fits_uhwi_p (offset_node)
660 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
661 return 0;
663 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
666 /* Return a constant integer corresponding to target reading
667 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
669 static rtx
670 c_readstr (const char *str, machine_mode mode)
672 HOST_WIDE_INT ch;
673 unsigned int i, j;
674 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
676 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
677 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
678 / HOST_BITS_PER_WIDE_INT;
680 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
681 for (i = 0; i < len; i++)
682 tmp[i] = 0;
684 ch = 1;
685 for (i = 0; i < GET_MODE_SIZE (mode); i++)
687 j = i;
688 if (WORDS_BIG_ENDIAN)
689 j = GET_MODE_SIZE (mode) - i - 1;
690 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
691 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
692 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
693 j *= BITS_PER_UNIT;
695 if (ch)
696 ch = (unsigned char) str[i];
697 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
700 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
701 return immed_wide_int_const (c, mode);
704 /* Cast a target constant CST to target CHAR and if that value fits into
705 host char type, return zero and put that value into variable pointed to by
706 P. */
708 static int
709 target_char_cast (tree cst, char *p)
711 unsigned HOST_WIDE_INT val, hostval;
713 if (TREE_CODE (cst) != INTEGER_CST
714 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
715 return 1;
717 /* Do not care if it fits or not right here. */
718 val = TREE_INT_CST_LOW (cst);
720 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
721 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
723 hostval = val;
724 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
725 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
727 if (val != hostval)
728 return 1;
730 *p = hostval;
731 return 0;
734 /* Similar to save_expr, but assumes that arbitrary code is not executed
735 in between the multiple evaluations. In particular, we assume that a
736 non-addressable local variable will not be modified. */
738 static tree
739 builtin_save_expr (tree exp)
741 if (TREE_CODE (exp) == SSA_NAME
742 || (TREE_ADDRESSABLE (exp) == 0
743 && (TREE_CODE (exp) == PARM_DECL
744 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
745 return exp;
747 return save_expr (exp);
750 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
751 times to get the address of either a higher stack frame, or a return
752 address located within it (depending on FNDECL_CODE). */
754 static rtx
755 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
757 int i;
759 #ifdef INITIAL_FRAME_ADDRESS_RTX
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 #else
762 rtx tem;
764 /* For a zero count with __builtin_return_address, we don't care what
765 frame address we return, because target-specific definitions will
766 override us. Therefore frame pointer elimination is OK, and using
767 the soft frame pointer is OK.
769 For a nonzero count, or a zero count with __builtin_frame_address,
770 we require a stable offset from the current frame pointer to the
771 previous one, so we must use the hard frame pointer, and
772 we must disable frame pointer elimination. */
773 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
774 tem = frame_pointer_rtx;
775 else
777 tem = hard_frame_pointer_rtx;
779 /* Tell reload not to eliminate the frame pointer. */
780 crtl->accesses_prior_frames = 1;
782 #endif
784 /* Some machines need special handling before we can access
785 arbitrary frames. For example, on the SPARC, we must first flush
786 all register windows to the stack. */
787 #ifdef SETUP_FRAME_ADDRESSES
788 if (count > 0)
789 SETUP_FRAME_ADDRESSES ();
790 #endif
792 /* On the SPARC, the return address is not in the frame, it is in a
793 register. There is no way to access it off of the current frame
794 pointer, but it can be accessed off the previous frame pointer by
795 reading the value from the register window save area. */
796 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
797 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
798 count--;
799 #endif
801 /* Scan back COUNT frames to the specified frame. */
802 for (i = 0; i < count; i++)
804 /* Assume the dynamic chain pointer is in the word that the
805 frame address points to, unless otherwise specified. */
806 #ifdef DYNAMIC_CHAIN_ADDRESS
807 tem = DYNAMIC_CHAIN_ADDRESS (tem);
808 #endif
809 tem = memory_address (Pmode, tem);
810 tem = gen_frame_mem (Pmode, tem);
811 tem = copy_to_reg (tem);
814 /* For __builtin_frame_address, return what we've got. But, on
815 the SPARC for example, we may have to add a bias. */
816 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
817 #ifdef FRAME_ADDR_RTX
818 return FRAME_ADDR_RTX (tem);
819 #else
820 return tem;
821 #endif
823 /* For __builtin_return_address, get the return address from that frame. */
824 #ifdef RETURN_ADDR_RTX
825 tem = RETURN_ADDR_RTX (count, tem);
826 #else
827 tem = memory_address (Pmode,
828 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
829 tem = gen_frame_mem (Pmode, tem);
830 #endif
831 return tem;
834 /* Alias set used for setjmp buffer. */
835 static alias_set_type setjmp_alias_set = -1;
837 /* Construct the leading half of a __builtin_setjmp call. Control will
838 return to RECEIVER_LABEL. This is also called directly by the SJLJ
839 exception handling code. */
841 void
842 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
844 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
845 rtx stack_save;
846 rtx mem;
848 if (setjmp_alias_set == -1)
849 setjmp_alias_set = new_alias_set ();
851 buf_addr = convert_memory_address (Pmode, buf_addr);
853 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
855 /* We store the frame pointer and the address of receiver_label in
856 the buffer and use the rest of it for the stack save area, which
857 is machine-dependent. */
859 mem = gen_rtx_MEM (Pmode, buf_addr);
860 set_mem_alias_set (mem, setjmp_alias_set);
861 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
863 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
864 GET_MODE_SIZE (Pmode))),
865 set_mem_alias_set (mem, setjmp_alias_set);
867 emit_move_insn (validize_mem (mem),
868 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
870 stack_save = gen_rtx_MEM (sa_mode,
871 plus_constant (Pmode, buf_addr,
872 2 * GET_MODE_SIZE (Pmode)));
873 set_mem_alias_set (stack_save, setjmp_alias_set);
874 emit_stack_save (SAVE_NONLOCAL, &stack_save);
876 /* If there is further processing to do, do it. */
877 #ifdef HAVE_builtin_setjmp_setup
878 if (HAVE_builtin_setjmp_setup)
879 emit_insn (gen_builtin_setjmp_setup (buf_addr));
880 #endif
882 /* We have a nonlocal label. */
883 cfun->has_nonlocal_label = 1;
886 /* Construct the trailing part of a __builtin_setjmp call. This is
887 also called directly by the SJLJ exception handling code.
888 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
890 void
891 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
893 rtx chain;
895 /* Mark the FP as used when we get here, so we have to make sure it's
896 marked as used by this function. */
897 emit_use (hard_frame_pointer_rtx);
899 /* Mark the static chain as clobbered here so life information
900 doesn't get messed up for it. */
901 chain = targetm.calls.static_chain (current_function_decl, true);
902 if (chain && REG_P (chain))
903 emit_clobber (chain);
905 /* Now put in the code to restore the frame pointer, and argument
906 pointer, if needed. */
907 #ifdef HAVE_nonlocal_goto
908 if (! HAVE_nonlocal_goto)
909 #endif
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
924 /* Restoring the frame pointer also modifies the hard frame pointer.
925 Mark it used (so that the previous assignment remains live once
926 the frame pointer is eliminated) and clobbered (to represent the
927 implicit update from the assignment). */
928 emit_use (hard_frame_pointer_rtx);
929 emit_clobber (hard_frame_pointer_rtx);
932 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
933 if (fixed_regs[ARG_POINTER_REGNUM])
935 #ifdef ELIMINABLE_REGS
936 /* If the argument pointer can be eliminated in favor of the
937 frame pointer, we don't need to restore it. We assume here
938 that if such an elimination is present, it can always be used.
939 This is the case on all known machines; if we don't make this
940 assumption, we do unnecessary saving on many machines. */
941 size_t i;
942 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
944 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
945 if (elim_regs[i].from == ARG_POINTER_REGNUM
946 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
947 break;
949 if (i == ARRAY_SIZE (elim_regs))
950 #endif
952 /* Now restore our arg pointer from the address at which it
953 was saved in our stack frame. */
954 emit_move_insn (crtl->args.internal_arg_pointer,
955 copy_to_reg (get_arg_pointer_save_area ()));
958 #endif
960 #ifdef HAVE_builtin_setjmp_receiver
961 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
962 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
963 else
964 #endif
965 #ifdef HAVE_nonlocal_goto_receiver
966 if (HAVE_nonlocal_goto_receiver)
967 emit_insn (gen_nonlocal_goto_receiver ());
968 else
969 #endif
970 { /* Nothing */ }
972 /* We must not allow the code we just generated to be reordered by
973 scheduling. Specifically, the update of the frame pointer must
974 happen immediately, not later. */
975 emit_insn (gen_blockage ());
978 /* __builtin_longjmp is passed a pointer to an array of five words (not
979 all will be used on all machines). It operates similarly to the C
980 library function of the same name, but is more efficient. Much of
981 the code below is copied from the handling of non-local gotos. */
983 static void
984 expand_builtin_longjmp (rtx buf_addr, rtx value)
986 rtx fp, lab, stack;
987 rtx_insn *insn, *last;
988 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
990 /* DRAP is needed for stack realign if longjmp is expanded to current
991 function */
992 if (SUPPORTS_STACK_ALIGNMENT)
993 crtl->need_drap = true;
995 if (setjmp_alias_set == -1)
996 setjmp_alias_set = new_alias_set ();
998 buf_addr = convert_memory_address (Pmode, buf_addr);
1000 buf_addr = force_reg (Pmode, buf_addr);
1002 /* We require that the user must pass a second argument of 1, because
1003 that is what builtin_setjmp will return. */
1004 gcc_assert (value == const1_rtx);
1006 last = get_last_insn ();
1007 #ifdef HAVE_builtin_longjmp
1008 if (HAVE_builtin_longjmp)
1009 emit_insn (gen_builtin_longjmp (buf_addr));
1010 else
1011 #endif
1013 fp = gen_rtx_MEM (Pmode, buf_addr);
1014 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1015 GET_MODE_SIZE (Pmode)));
1017 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1018 2 * GET_MODE_SIZE (Pmode)));
1019 set_mem_alias_set (fp, setjmp_alias_set);
1020 set_mem_alias_set (lab, setjmp_alias_set);
1021 set_mem_alias_set (stack, setjmp_alias_set);
1023 /* Pick up FP, label, and SP from the block and jump. This code is
1024 from expand_goto in stmt.c; see there for detailed comments. */
1025 #ifdef HAVE_nonlocal_goto
1026 if (HAVE_nonlocal_goto)
1027 /* We have to pass a value to the nonlocal_goto pattern that will
1028 get copied into the static_chain pointer, but it does not matter
1029 what that value is, because builtin_setjmp does not use it. */
1030 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1031 else
1032 #endif
1034 lab = copy_to_reg (lab);
1036 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1037 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 emit_move_insn (hard_frame_pointer_rtx, fp);
1040 emit_stack_restore (SAVE_NONLOCAL, stack);
1042 emit_use (hard_frame_pointer_rtx);
1043 emit_use (stack_pointer_rtx);
1044 emit_indirect_jump (lab);
1048 /* Search backwards and mark the jump insn as a non-local goto.
1049 Note that this precludes the use of __builtin_longjmp to a
1050 __builtin_setjmp target in the same function. However, we've
1051 already cautioned the user that these functions are for
1052 internal exception handling use only. */
1053 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1055 gcc_assert (insn != last);
1057 if (JUMP_P (insn))
1059 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1060 break;
1062 else if (CALL_P (insn))
1063 break;
1067 static inline bool
1068 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1070 return (iter->i < iter->n);
1073 /* This function validates the types of a function call argument list
1074 against a specified list of tree_codes. If the last specifier is a 0,
1075 that represents an ellipses, otherwise the last specifier must be a
1076 VOID_TYPE. */
1078 static bool
1079 validate_arglist (const_tree callexpr, ...)
1081 enum tree_code code;
1082 bool res = 0;
1083 va_list ap;
1084 const_call_expr_arg_iterator iter;
1085 const_tree arg;
1087 va_start (ap, callexpr);
1088 init_const_call_expr_arg_iterator (callexpr, &iter);
1092 code = (enum tree_code) va_arg (ap, int);
1093 switch (code)
1095 case 0:
1096 /* This signifies an ellipses, any further arguments are all ok. */
1097 res = true;
1098 goto end;
1099 case VOID_TYPE:
1100 /* This signifies an endlink, if no arguments remain, return
1101 true, otherwise return false. */
1102 res = !more_const_call_expr_args_p (&iter);
1103 goto end;
1104 default:
1105 /* If no parameters remain or the parameter's code does not
1106 match the specified code, return false. Otherwise continue
1107 checking any remaining arguments. */
1108 arg = next_const_call_expr_arg (&iter);
1109 if (!validate_arg (arg, code))
1110 goto end;
1111 break;
1114 while (1);
1116 /* We need gotos here since we can only have one VA_CLOSE in a
1117 function. */
1118 end: ;
1119 va_end (ap);
1121 return res;
1124 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1125 and the address of the save area. */
1127 static rtx
1128 expand_builtin_nonlocal_goto (tree exp)
1130 tree t_label, t_save_area;
1131 rtx r_label, r_save_area, r_fp, r_sp;
1132 rtx_insn *insn;
1134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1135 return NULL_RTX;
1137 t_label = CALL_EXPR_ARG (exp, 0);
1138 t_save_area = CALL_EXPR_ARG (exp, 1);
1140 r_label = expand_normal (t_label);
1141 r_label = convert_memory_address (Pmode, r_label);
1142 r_save_area = expand_normal (t_save_area);
1143 r_save_area = convert_memory_address (Pmode, r_save_area);
1144 /* Copy the address of the save location to a register just in case it was
1145 based on the frame pointer. */
1146 r_save_area = copy_to_reg (r_save_area);
1147 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1148 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1149 plus_constant (Pmode, r_save_area,
1150 GET_MODE_SIZE (Pmode)));
1152 crtl->has_nonlocal_goto = 1;
1154 #ifdef HAVE_nonlocal_goto
1155 /* ??? We no longer need to pass the static chain value, afaik. */
1156 if (HAVE_nonlocal_goto)
1157 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1158 else
1159 #endif
1161 r_label = copy_to_reg (r_label);
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1166 /* Restore frame pointer for containing function. */
1167 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1168 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1170 /* USE of hard_frame_pointer_rtx added for consistency;
1171 not clear if really needed. */
1172 emit_use (hard_frame_pointer_rtx);
1173 emit_use (stack_pointer_rtx);
1175 /* If the architecture is using a GP register, we must
1176 conservatively assume that the target function makes use of it.
1177 The prologue of functions with nonlocal gotos must therefore
1178 initialize the GP register to the appropriate value, and we
1179 must then make sure that this value is live at the point
1180 of the jump. (Note that this doesn't necessarily apply
1181 to targets with a nonlocal_goto pattern; they are free
1182 to implement it in their own way. Note also that this is
1183 a no-op if the GP register is a global invariant.) */
1184 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1185 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1186 emit_use (pic_offset_table_rtx);
1188 emit_indirect_jump (r_label);
1191 /* Search backwards to the jump insn and mark it as a
1192 non-local goto. */
1193 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1195 if (JUMP_P (insn))
1197 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1198 break;
1200 else if (CALL_P (insn))
1201 break;
1204 return const0_rtx;
1207 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1208 (not all will be used on all machines) that was passed to __builtin_setjmp.
1209 It updates the stack pointer in that block to correspond to the current
1210 stack pointer. */
1212 static void
1213 expand_builtin_update_setjmp_buf (rtx buf_addr)
1215 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1216 rtx stack_save
1217 = gen_rtx_MEM (sa_mode,
1218 memory_address
1219 (sa_mode,
1220 plus_constant (Pmode, buf_addr,
1221 2 * GET_MODE_SIZE (Pmode))));
1223 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1226 /* Expand a call to __builtin_prefetch. For a target that does not support
1227 data prefetch, evaluate the memory address argument in case it has side
1228 effects. */
1230 static void
1231 expand_builtin_prefetch (tree exp)
1233 tree arg0, arg1, arg2;
1234 int nargs;
1235 rtx op0, op1, op2;
1237 if (!validate_arglist (exp, POINTER_TYPE, 0))
1238 return;
1240 arg0 = CALL_EXPR_ARG (exp, 0);
1242 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1243 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1244 locality). */
1245 nargs = call_expr_nargs (exp);
1246 if (nargs > 1)
1247 arg1 = CALL_EXPR_ARG (exp, 1);
1248 else
1249 arg1 = integer_zero_node;
1250 if (nargs > 2)
1251 arg2 = CALL_EXPR_ARG (exp, 2);
1252 else
1253 arg2 = integer_three_node;
1255 /* Argument 0 is an address. */
1256 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1258 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1259 if (TREE_CODE (arg1) != INTEGER_CST)
1261 error ("second argument to %<__builtin_prefetch%> must be a constant");
1262 arg1 = integer_zero_node;
1264 op1 = expand_normal (arg1);
1265 /* Argument 1 must be either zero or one. */
1266 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1268 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1269 " using zero");
1270 op1 = const0_rtx;
1273 /* Argument 2 (locality) must be a compile-time constant int. */
1274 if (TREE_CODE (arg2) != INTEGER_CST)
1276 error ("third argument to %<__builtin_prefetch%> must be a constant");
1277 arg2 = integer_zero_node;
1279 op2 = expand_normal (arg2);
1280 /* Argument 2 must be 0, 1, 2, or 3. */
1281 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1283 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1284 op2 = const0_rtx;
1287 #ifdef HAVE_prefetch
1288 if (HAVE_prefetch)
1290 struct expand_operand ops[3];
1292 create_address_operand (&ops[0], op0);
1293 create_integer_operand (&ops[1], INTVAL (op1));
1294 create_integer_operand (&ops[2], INTVAL (op2));
1295 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1296 return;
1298 #endif
1300 /* Don't do anything with direct references to volatile memory, but
1301 generate code to handle other side effects. */
1302 if (!MEM_P (op0) && side_effects_p (op0))
1303 emit_insn (op0);
1306 /* Get a MEM rtx for expression EXP which is the address of an operand
1307 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1308 the maximum length of the block of memory that might be accessed or
1309 NULL if unknown. */
1311 static rtx
1312 get_memory_rtx (tree exp, tree len)
1314 tree orig_exp = exp;
1315 rtx addr, mem;
1317 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1318 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1319 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1320 exp = TREE_OPERAND (exp, 0);
1322 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1323 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1325 /* Get an expression we can use to find the attributes to assign to MEM.
1326 First remove any nops. */
1327 while (CONVERT_EXPR_P (exp)
1328 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1329 exp = TREE_OPERAND (exp, 0);
1331 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1332 (as builtin stringops may alias with anything). */
1333 exp = fold_build2 (MEM_REF,
1334 build_array_type (char_type_node,
1335 build_range_type (sizetype,
1336 size_one_node, len)),
1337 exp, build_int_cst (ptr_type_node, 0));
1339 /* If the MEM_REF has no acceptable address, try to get the base object
1340 from the original address we got, and build an all-aliasing
1341 unknown-sized access to that one. */
1342 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1343 set_mem_attributes (mem, exp, 0);
1344 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1345 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1346 0))))
1348 exp = build_fold_addr_expr (exp);
1349 exp = fold_build2 (MEM_REF,
1350 build_array_type (char_type_node,
1351 build_range_type (sizetype,
1352 size_zero_node,
1353 NULL)),
1354 exp, build_int_cst (ptr_type_node, 0));
1355 set_mem_attributes (mem, exp, 0);
1357 set_mem_alias_set (mem, 0);
1358 return mem;
1361 /* Built-in functions to perform an untyped call and return. */
1363 #define apply_args_mode \
1364 (this_target_builtins->x_apply_args_mode)
1365 #define apply_result_mode \
1366 (this_target_builtins->x_apply_result_mode)
1368 /* Return the size required for the block returned by __builtin_apply_args,
1369 and initialize apply_args_mode. */
1371 static int
1372 apply_args_size (void)
1374 static int size = -1;
1375 int align;
1376 unsigned int regno;
1377 machine_mode mode;
1379 /* The values computed by this function never change. */
1380 if (size < 0)
1382 /* The first value is the incoming arg-pointer. */
1383 size = GET_MODE_SIZE (Pmode);
1385 /* The second value is the structure value address unless this is
1386 passed as an "invisible" first argument. */
1387 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1388 size += GET_MODE_SIZE (Pmode);
1390 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1391 if (FUNCTION_ARG_REGNO_P (regno))
1393 mode = targetm.calls.get_raw_arg_mode (regno);
1395 gcc_assert (mode != VOIDmode);
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1400 size += GET_MODE_SIZE (mode);
1401 apply_args_mode[regno] = mode;
1403 else
1405 apply_args_mode[regno] = VOIDmode;
1408 return size;
1411 /* Return the size required for the block returned by __builtin_apply,
1412 and initialize apply_result_mode. */
1414 static int
1415 apply_result_size (void)
1417 static int size = -1;
1418 int align, regno;
1419 machine_mode mode;
1421 /* The values computed by this function never change. */
1422 if (size < 0)
1424 size = 0;
1426 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1427 if (targetm.calls.function_value_regno_p (regno))
1429 mode = targetm.calls.get_raw_result_mode (regno);
1431 gcc_assert (mode != VOIDmode);
1433 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1434 if (size % align != 0)
1435 size = CEIL (size, align) * align;
1436 size += GET_MODE_SIZE (mode);
1437 apply_result_mode[regno] = mode;
1439 else
1440 apply_result_mode[regno] = VOIDmode;
1442 /* Allow targets that use untyped_call and untyped_return to override
1443 the size so that machine-specific information can be stored here. */
1444 #ifdef APPLY_RESULT_SIZE
1445 size = APPLY_RESULT_SIZE;
1446 #endif
1448 return size;
1451 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1452 /* Create a vector describing the result block RESULT. If SAVEP is true,
1453 the result block is used to save the values; otherwise it is used to
1454 restore the values. */
1456 static rtx
1457 result_vector (int savep, rtx result)
1459 int regno, size, align, nelts;
1460 machine_mode mode;
1461 rtx reg, mem;
1462 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1464 size = nelts = 0;
1465 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1466 if ((mode = apply_result_mode[regno]) != VOIDmode)
1468 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1469 if (size % align != 0)
1470 size = CEIL (size, align) * align;
1471 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1472 mem = adjust_address (result, mode, size);
1473 savevec[nelts++] = (savep
1474 ? gen_rtx_SET (VOIDmode, mem, reg)
1475 : gen_rtx_SET (VOIDmode, reg, mem));
1476 size += GET_MODE_SIZE (mode);
1478 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1480 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1482 /* Save the state required to perform an untyped call with the same
1483 arguments as were passed to the current function. */
1485 static rtx
1486 expand_builtin_apply_args_1 (void)
1488 rtx registers, tem;
1489 int size, align, regno;
1490 machine_mode mode;
1491 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1493 /* Create a block where the arg-pointer, structure value address,
1494 and argument registers can be saved. */
1495 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1497 /* Walk past the arg-pointer and structure value address. */
1498 size = GET_MODE_SIZE (Pmode);
1499 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1500 size += GET_MODE_SIZE (Pmode);
1502 /* Save each register used in calling a function to the block. */
1503 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1504 if ((mode = apply_args_mode[regno]) != VOIDmode)
1506 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1507 if (size % align != 0)
1508 size = CEIL (size, align) * align;
1510 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1512 emit_move_insn (adjust_address (registers, mode, size), tem);
1513 size += GET_MODE_SIZE (mode);
1516 /* Save the arg pointer to the block. */
1517 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1518 #ifdef STACK_GROWS_DOWNWARD
1519 /* We need the pointer as the caller actually passed them to us, not
1520 as we might have pretended they were passed. Make sure it's a valid
1521 operand, as emit_move_insn isn't expected to handle a PLUS. */
1523 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1524 NULL_RTX);
1525 #endif
1526 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1528 size = GET_MODE_SIZE (Pmode);
1530 /* Save the structure value address unless this is passed as an
1531 "invisible" first argument. */
1532 if (struct_incoming_value)
1534 emit_move_insn (adjust_address (registers, Pmode, size),
1535 copy_to_reg (struct_incoming_value));
1536 size += GET_MODE_SIZE (Pmode);
1539 /* Return the address of the block. */
1540 return copy_addr_to_reg (XEXP (registers, 0));
1543 /* __builtin_apply_args returns block of memory allocated on
1544 the stack into which is stored the arg pointer, structure
1545 value address, static chain, and all the registers that might
1546 possibly be used in performing a function call. The code is
1547 moved to the start of the function so the incoming values are
1548 saved. */
1550 static rtx
1551 expand_builtin_apply_args (void)
1553 /* Don't do __builtin_apply_args more than once in a function.
1554 Save the result of the first call and reuse it. */
1555 if (apply_args_value != 0)
1556 return apply_args_value;
1558 /* When this function is called, it means that registers must be
1559 saved on entry to this function. So we migrate the
1560 call to the first insn of this function. */
1561 rtx temp;
1562 rtx seq;
1564 start_sequence ();
1565 temp = expand_builtin_apply_args_1 ();
1566 seq = get_insns ();
1567 end_sequence ();
1569 apply_args_value = temp;
1571 /* Put the insns after the NOTE that starts the function.
1572 If this is inside a start_sequence, make the outer-level insn
1573 chain current, so the code is placed at the start of the
1574 function. If internal_arg_pointer is a non-virtual pseudo,
1575 it needs to be placed after the function that initializes
1576 that pseudo. */
1577 push_topmost_sequence ();
1578 if (REG_P (crtl->args.internal_arg_pointer)
1579 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1580 emit_insn_before (seq, parm_birth_insn);
1581 else
1582 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1583 pop_topmost_sequence ();
1584 return temp;
1588 /* Perform an untyped call and save the state required to perform an
1589 untyped return of whatever value was returned by the given function. */
1591 static rtx
1592 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1594 int size, align, regno;
1595 machine_mode mode;
1596 rtx incoming_args, result, reg, dest, src;
1597 rtx_call_insn *call_insn;
1598 rtx old_stack_level = 0;
1599 rtx call_fusage = 0;
1600 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1602 arguments = convert_memory_address (Pmode, arguments);
1604 /* Create a block where the return registers can be saved. */
1605 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1607 /* Fetch the arg pointer from the ARGUMENTS block. */
1608 incoming_args = gen_reg_rtx (Pmode);
1609 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1610 #ifndef STACK_GROWS_DOWNWARD
1611 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1612 incoming_args, 0, OPTAB_LIB_WIDEN);
1613 #endif
1615 /* Push a new argument block and copy the arguments. Do not allow
1616 the (potential) memcpy call below to interfere with our stack
1617 manipulations. */
1618 do_pending_stack_adjust ();
1619 NO_DEFER_POP;
1621 /* Save the stack with nonlocal if available. */
1622 #ifdef HAVE_save_stack_nonlocal
1623 if (HAVE_save_stack_nonlocal)
1624 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1625 else
1626 #endif
1627 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1629 /* Allocate a block of memory onto the stack and copy the memory
1630 arguments to the outgoing arguments address. We can pass TRUE
1631 as the 4th argument because we just saved the stack pointer
1632 and will restore it right after the call. */
1633 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1635 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1636 may have already set current_function_calls_alloca to true.
1637 current_function_calls_alloca won't be set if argsize is zero,
1638 so we have to guarantee need_drap is true here. */
1639 if (SUPPORTS_STACK_ALIGNMENT)
1640 crtl->need_drap = true;
1642 dest = virtual_outgoing_args_rtx;
1643 #ifndef STACK_GROWS_DOWNWARD
1644 if (CONST_INT_P (argsize))
1645 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1646 else
1647 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1648 #endif
1649 dest = gen_rtx_MEM (BLKmode, dest);
1650 set_mem_align (dest, PARM_BOUNDARY);
1651 src = gen_rtx_MEM (BLKmode, incoming_args);
1652 set_mem_align (src, PARM_BOUNDARY);
1653 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1655 /* Refer to the argument block. */
1656 apply_args_size ();
1657 arguments = gen_rtx_MEM (BLKmode, arguments);
1658 set_mem_align (arguments, PARM_BOUNDARY);
1660 /* Walk past the arg-pointer and structure value address. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1663 size += GET_MODE_SIZE (Pmode);
1665 /* Restore each of the registers previously saved. Make USE insns
1666 for each of these registers for use in making the call. */
1667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1668 if ((mode = apply_args_mode[regno]) != VOIDmode)
1670 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1671 if (size % align != 0)
1672 size = CEIL (size, align) * align;
1673 reg = gen_rtx_REG (mode, regno);
1674 emit_move_insn (reg, adjust_address (arguments, mode, size));
1675 use_reg (&call_fusage, reg);
1676 size += GET_MODE_SIZE (mode);
1679 /* Restore the structure value address unless this is passed as an
1680 "invisible" first argument. */
1681 size = GET_MODE_SIZE (Pmode);
1682 if (struct_value)
1684 rtx value = gen_reg_rtx (Pmode);
1685 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1686 emit_move_insn (struct_value, value);
1687 if (REG_P (struct_value))
1688 use_reg (&call_fusage, struct_value);
1689 size += GET_MODE_SIZE (Pmode);
1692 /* All arguments and registers used for the call are set up by now! */
1693 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1695 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1696 and we don't want to load it into a register as an optimization,
1697 because prepare_call_address already did it if it should be done. */
1698 if (GET_CODE (function) != SYMBOL_REF)
1699 function = memory_address (FUNCTION_MODE, function);
1701 /* Generate the actual call instruction and save the return value. */
1702 #ifdef HAVE_untyped_call
1703 if (HAVE_untyped_call)
1704 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1705 result, result_vector (1, result)));
1706 else
1707 #endif
1708 #ifdef HAVE_call_value
1709 if (HAVE_call_value)
1711 rtx valreg = 0;
1713 /* Locate the unique return register. It is not possible to
1714 express a call that sets more than one return register using
1715 call_value; use untyped_call for that. In fact, untyped_call
1716 only needs to save the return registers in the given block. */
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if ((mode = apply_result_mode[regno]) != VOIDmode)
1720 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1722 valreg = gen_rtx_REG (mode, regno);
1725 emit_call_insn (GEN_CALL_VALUE (valreg,
1726 gen_rtx_MEM (FUNCTION_MODE, function),
1727 const0_rtx, NULL_RTX, const0_rtx));
1729 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1731 else
1732 #endif
1733 gcc_unreachable ();
1735 /* Find the CALL insn we just emitted, and attach the register usage
1736 information. */
1737 call_insn = last_call_insn ();
1738 add_function_usage_to (call_insn, call_fusage);
1740 /* Restore the stack. */
1741 #ifdef HAVE_save_stack_nonlocal
1742 if (HAVE_save_stack_nonlocal)
1743 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1744 else
1745 #endif
1746 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1747 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1749 OK_DEFER_POP;
1751 /* Return the address of the result block. */
1752 result = copy_addr_to_reg (XEXP (result, 0));
1753 return convert_memory_address (ptr_mode, result);
1756 /* Perform an untyped return. */
1758 static void
1759 expand_builtin_return (rtx result)
1761 int size, align, regno;
1762 machine_mode mode;
1763 rtx reg;
1764 rtx_insn *call_fusage = 0;
1766 result = convert_memory_address (Pmode, result);
1768 apply_result_size ();
1769 result = gen_rtx_MEM (BLKmode, result);
1771 #ifdef HAVE_untyped_return
1772 if (HAVE_untyped_return)
1774 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1775 emit_barrier ();
1776 return;
1778 #endif
1780 /* Restore the return value and note that each value is used. */
1781 size = 0;
1782 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1783 if ((mode = apply_result_mode[regno]) != VOIDmode)
1785 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1786 if (size % align != 0)
1787 size = CEIL (size, align) * align;
1788 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1789 emit_move_insn (reg, adjust_address (result, mode, size));
1791 push_to_sequence (call_fusage);
1792 emit_use (reg);
1793 call_fusage = get_insns ();
1794 end_sequence ();
1795 size += GET_MODE_SIZE (mode);
1798 /* Put the USE insns before the return. */
1799 emit_insn (call_fusage);
1801 /* Return whatever values was restored by jumping directly to the end
1802 of the function. */
1803 expand_naked_return ();
1806 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1808 static enum type_class
1809 type_to_class (tree type)
1811 switch (TREE_CODE (type))
1813 case VOID_TYPE: return void_type_class;
1814 case INTEGER_TYPE: return integer_type_class;
1815 case ENUMERAL_TYPE: return enumeral_type_class;
1816 case BOOLEAN_TYPE: return boolean_type_class;
1817 case POINTER_TYPE: return pointer_type_class;
1818 case REFERENCE_TYPE: return reference_type_class;
1819 case OFFSET_TYPE: return offset_type_class;
1820 case REAL_TYPE: return real_type_class;
1821 case COMPLEX_TYPE: return complex_type_class;
1822 case FUNCTION_TYPE: return function_type_class;
1823 case METHOD_TYPE: return method_type_class;
1824 case RECORD_TYPE: return record_type_class;
1825 case UNION_TYPE:
1826 case QUAL_UNION_TYPE: return union_type_class;
1827 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1828 ? string_type_class : array_type_class);
1829 case LANG_TYPE: return lang_type_class;
1830 default: return no_type_class;
1834 /* Expand a call EXP to __builtin_classify_type. */
1836 static rtx
1837 expand_builtin_classify_type (tree exp)
1839 if (call_expr_nargs (exp))
1840 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1841 return GEN_INT (no_type_class);
1844 /* This helper macro, meant to be used in mathfn_built_in below,
1845 determines which among a set of three builtin math functions is
1846 appropriate for a given type mode. The `F' and `L' cases are
1847 automatically generated from the `double' case. */
1848 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1849 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1850 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1851 fcodel = BUILT_IN_MATHFN##L ; break;
1852 /* Similar to above, but appends _R after any F/L suffix. */
1853 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1854 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1855 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1856 fcodel = BUILT_IN_MATHFN##L_R ; break;
1858 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1859 if available. If IMPLICIT is true use the implicit builtin declaration,
1860 otherwise use the explicit declaration. If we can't do the conversion,
1861 return zero. */
1863 static tree
1864 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1866 enum built_in_function fcode, fcodef, fcodel, fcode2;
1868 switch (fn)
1870 CASE_MATHFN (BUILT_IN_ACOS)
1871 CASE_MATHFN (BUILT_IN_ACOSH)
1872 CASE_MATHFN (BUILT_IN_ASIN)
1873 CASE_MATHFN (BUILT_IN_ASINH)
1874 CASE_MATHFN (BUILT_IN_ATAN)
1875 CASE_MATHFN (BUILT_IN_ATAN2)
1876 CASE_MATHFN (BUILT_IN_ATANH)
1877 CASE_MATHFN (BUILT_IN_CBRT)
1878 CASE_MATHFN (BUILT_IN_CEIL)
1879 CASE_MATHFN (BUILT_IN_CEXPI)
1880 CASE_MATHFN (BUILT_IN_COPYSIGN)
1881 CASE_MATHFN (BUILT_IN_COS)
1882 CASE_MATHFN (BUILT_IN_COSH)
1883 CASE_MATHFN (BUILT_IN_DREM)
1884 CASE_MATHFN (BUILT_IN_ERF)
1885 CASE_MATHFN (BUILT_IN_ERFC)
1886 CASE_MATHFN (BUILT_IN_EXP)
1887 CASE_MATHFN (BUILT_IN_EXP10)
1888 CASE_MATHFN (BUILT_IN_EXP2)
1889 CASE_MATHFN (BUILT_IN_EXPM1)
1890 CASE_MATHFN (BUILT_IN_FABS)
1891 CASE_MATHFN (BUILT_IN_FDIM)
1892 CASE_MATHFN (BUILT_IN_FLOOR)
1893 CASE_MATHFN (BUILT_IN_FMA)
1894 CASE_MATHFN (BUILT_IN_FMAX)
1895 CASE_MATHFN (BUILT_IN_FMIN)
1896 CASE_MATHFN (BUILT_IN_FMOD)
1897 CASE_MATHFN (BUILT_IN_FREXP)
1898 CASE_MATHFN (BUILT_IN_GAMMA)
1899 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1900 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1901 CASE_MATHFN (BUILT_IN_HYPOT)
1902 CASE_MATHFN (BUILT_IN_ILOGB)
1903 CASE_MATHFN (BUILT_IN_ICEIL)
1904 CASE_MATHFN (BUILT_IN_IFLOOR)
1905 CASE_MATHFN (BUILT_IN_INF)
1906 CASE_MATHFN (BUILT_IN_IRINT)
1907 CASE_MATHFN (BUILT_IN_IROUND)
1908 CASE_MATHFN (BUILT_IN_ISINF)
1909 CASE_MATHFN (BUILT_IN_J0)
1910 CASE_MATHFN (BUILT_IN_J1)
1911 CASE_MATHFN (BUILT_IN_JN)
1912 CASE_MATHFN (BUILT_IN_LCEIL)
1913 CASE_MATHFN (BUILT_IN_LDEXP)
1914 CASE_MATHFN (BUILT_IN_LFLOOR)
1915 CASE_MATHFN (BUILT_IN_LGAMMA)
1916 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1917 CASE_MATHFN (BUILT_IN_LLCEIL)
1918 CASE_MATHFN (BUILT_IN_LLFLOOR)
1919 CASE_MATHFN (BUILT_IN_LLRINT)
1920 CASE_MATHFN (BUILT_IN_LLROUND)
1921 CASE_MATHFN (BUILT_IN_LOG)
1922 CASE_MATHFN (BUILT_IN_LOG10)
1923 CASE_MATHFN (BUILT_IN_LOG1P)
1924 CASE_MATHFN (BUILT_IN_LOG2)
1925 CASE_MATHFN (BUILT_IN_LOGB)
1926 CASE_MATHFN (BUILT_IN_LRINT)
1927 CASE_MATHFN (BUILT_IN_LROUND)
1928 CASE_MATHFN (BUILT_IN_MODF)
1929 CASE_MATHFN (BUILT_IN_NAN)
1930 CASE_MATHFN (BUILT_IN_NANS)
1931 CASE_MATHFN (BUILT_IN_NEARBYINT)
1932 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1933 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1934 CASE_MATHFN (BUILT_IN_POW)
1935 CASE_MATHFN (BUILT_IN_POWI)
1936 CASE_MATHFN (BUILT_IN_POW10)
1937 CASE_MATHFN (BUILT_IN_REMAINDER)
1938 CASE_MATHFN (BUILT_IN_REMQUO)
1939 CASE_MATHFN (BUILT_IN_RINT)
1940 CASE_MATHFN (BUILT_IN_ROUND)
1941 CASE_MATHFN (BUILT_IN_SCALB)
1942 CASE_MATHFN (BUILT_IN_SCALBLN)
1943 CASE_MATHFN (BUILT_IN_SCALBN)
1944 CASE_MATHFN (BUILT_IN_SIGNBIT)
1945 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1946 CASE_MATHFN (BUILT_IN_SIN)
1947 CASE_MATHFN (BUILT_IN_SINCOS)
1948 CASE_MATHFN (BUILT_IN_SINH)
1949 CASE_MATHFN (BUILT_IN_SQRT)
1950 CASE_MATHFN (BUILT_IN_TAN)
1951 CASE_MATHFN (BUILT_IN_TANH)
1952 CASE_MATHFN (BUILT_IN_TGAMMA)
1953 CASE_MATHFN (BUILT_IN_TRUNC)
1954 CASE_MATHFN (BUILT_IN_Y0)
1955 CASE_MATHFN (BUILT_IN_Y1)
1956 CASE_MATHFN (BUILT_IN_YN)
1958 default:
1959 return NULL_TREE;
1962 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1963 fcode2 = fcode;
1964 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1965 fcode2 = fcodef;
1966 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1967 fcode2 = fcodel;
1968 else
1969 return NULL_TREE;
1971 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1972 return NULL_TREE;
1974 return builtin_decl_explicit (fcode2);
1977 /* Like mathfn_built_in_1(), but always use the implicit array. */
1979 tree
1980 mathfn_built_in (tree type, enum built_in_function fn)
1982 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1985 /* If errno must be maintained, expand the RTL to check if the result,
1986 TARGET, of a built-in function call, EXP, is NaN, and if so set
1987 errno to EDOM. */
1989 static void
1990 expand_errno_check (tree exp, rtx target)
1992 rtx_code_label *lab = gen_label_rtx ();
1994 /* Test the result; if it is NaN, set errno=EDOM because
1995 the argument was not in the domain. */
1996 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1997 NULL_RTX, NULL_RTX, lab,
1998 /* The jump is very likely. */
1999 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2001 #ifdef TARGET_EDOM
2002 /* If this built-in doesn't throw an exception, set errno directly. */
2003 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2005 #ifdef GEN_ERRNO_RTX
2006 rtx errno_rtx = GEN_ERRNO_RTX;
2007 #else
2008 rtx errno_rtx
2009 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2010 #endif
2011 emit_move_insn (errno_rtx,
2012 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2013 emit_label (lab);
2014 return;
2016 #endif
2018 /* Make sure the library call isn't expanded as a tail call. */
2019 CALL_EXPR_TAILCALL (exp) = 0;
2021 /* We can't set errno=EDOM directly; let the library call do it.
2022 Pop the arguments right away in case the call gets deleted. */
2023 NO_DEFER_POP;
2024 expand_call (exp, target, 0);
2025 OK_DEFER_POP;
2026 emit_label (lab);
2029 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2030 Return NULL_RTX if a normal call should be emitted rather than expanding
2031 the function in-line. EXP is the expression that is a call to the builtin
2032 function; if convenient, the result should be placed in TARGET.
2033 SUBTARGET may be used as the target for computing one of EXP's operands. */
2035 static rtx
2036 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2038 optab builtin_optab;
2039 rtx op0;
2040 rtx_insn *insns;
2041 tree fndecl = get_callee_fndecl (exp);
2042 machine_mode mode;
2043 bool errno_set = false;
2044 bool try_widening = false;
2045 tree arg;
2047 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2048 return NULL_RTX;
2050 arg = CALL_EXPR_ARG (exp, 0);
2052 switch (DECL_FUNCTION_CODE (fndecl))
2054 CASE_FLT_FN (BUILT_IN_SQRT):
2055 errno_set = ! tree_expr_nonnegative_p (arg);
2056 try_widening = true;
2057 builtin_optab = sqrt_optab;
2058 break;
2059 CASE_FLT_FN (BUILT_IN_EXP):
2060 errno_set = true; builtin_optab = exp_optab; break;
2061 CASE_FLT_FN (BUILT_IN_EXP10):
2062 CASE_FLT_FN (BUILT_IN_POW10):
2063 errno_set = true; builtin_optab = exp10_optab; break;
2064 CASE_FLT_FN (BUILT_IN_EXP2):
2065 errno_set = true; builtin_optab = exp2_optab; break;
2066 CASE_FLT_FN (BUILT_IN_EXPM1):
2067 errno_set = true; builtin_optab = expm1_optab; break;
2068 CASE_FLT_FN (BUILT_IN_LOGB):
2069 errno_set = true; builtin_optab = logb_optab; break;
2070 CASE_FLT_FN (BUILT_IN_LOG):
2071 errno_set = true; builtin_optab = log_optab; break;
2072 CASE_FLT_FN (BUILT_IN_LOG10):
2073 errno_set = true; builtin_optab = log10_optab; break;
2074 CASE_FLT_FN (BUILT_IN_LOG2):
2075 errno_set = true; builtin_optab = log2_optab; break;
2076 CASE_FLT_FN (BUILT_IN_LOG1P):
2077 errno_set = true; builtin_optab = log1p_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ASIN):
2079 builtin_optab = asin_optab; break;
2080 CASE_FLT_FN (BUILT_IN_ACOS):
2081 builtin_optab = acos_optab; break;
2082 CASE_FLT_FN (BUILT_IN_TAN):
2083 builtin_optab = tan_optab; break;
2084 CASE_FLT_FN (BUILT_IN_ATAN):
2085 builtin_optab = atan_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FLOOR):
2087 builtin_optab = floor_optab; break;
2088 CASE_FLT_FN (BUILT_IN_CEIL):
2089 builtin_optab = ceil_optab; break;
2090 CASE_FLT_FN (BUILT_IN_TRUNC):
2091 builtin_optab = btrunc_optab; break;
2092 CASE_FLT_FN (BUILT_IN_ROUND):
2093 builtin_optab = round_optab; break;
2094 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2095 builtin_optab = nearbyint_optab;
2096 if (flag_trapping_math)
2097 break;
2098 /* Else fallthrough and expand as rint. */
2099 CASE_FLT_FN (BUILT_IN_RINT):
2100 builtin_optab = rint_optab; break;
2101 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2102 builtin_optab = significand_optab; break;
2103 default:
2104 gcc_unreachable ();
2107 /* Make a suitable register to place result in. */
2108 mode = TYPE_MODE (TREE_TYPE (exp));
2110 if (! flag_errno_math || ! HONOR_NANS (mode))
2111 errno_set = false;
2113 /* Before working hard, check whether the instruction is available, but try
2114 to widen the mode for specific operations. */
2115 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2116 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2117 && (!errno_set || !optimize_insn_for_size_p ()))
2119 rtx result = gen_reg_rtx (mode);
2121 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2122 need to expand the argument again. This way, we will not perform
2123 side-effects more the once. */
2124 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2126 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2128 start_sequence ();
2130 /* Compute into RESULT.
2131 Set RESULT to wherever the result comes back. */
2132 result = expand_unop (mode, builtin_optab, op0, result, 0);
2134 if (result != 0)
2136 if (errno_set)
2137 expand_errno_check (exp, result);
2139 /* Output the entire sequence. */
2140 insns = get_insns ();
2141 end_sequence ();
2142 emit_insn (insns);
2143 return result;
2146 /* If we were unable to expand via the builtin, stop the sequence
2147 (without outputting the insns) and call to the library function
2148 with the stabilized argument list. */
2149 end_sequence ();
2152 return expand_call (exp, target, target == const0_rtx);
2155 /* Expand a call to the builtin binary math functions (pow and atan2).
2156 Return NULL_RTX if a normal call should be emitted rather than expanding the
2157 function in-line. EXP is the expression that is a call to the builtin
2158 function; if convenient, the result should be placed in TARGET.
2159 SUBTARGET may be used as the target for computing one of EXP's
2160 operands. */
2162 static rtx
2163 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2165 optab builtin_optab;
2166 rtx op0, op1, result;
2167 rtx_insn *insns;
2168 int op1_type = REAL_TYPE;
2169 tree fndecl = get_callee_fndecl (exp);
2170 tree arg0, arg1;
2171 machine_mode mode;
2172 bool errno_set = true;
2174 switch (DECL_FUNCTION_CODE (fndecl))
2176 CASE_FLT_FN (BUILT_IN_SCALBN):
2177 CASE_FLT_FN (BUILT_IN_SCALBLN):
2178 CASE_FLT_FN (BUILT_IN_LDEXP):
2179 op1_type = INTEGER_TYPE;
2180 default:
2181 break;
2184 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2185 return NULL_RTX;
2187 arg0 = CALL_EXPR_ARG (exp, 0);
2188 arg1 = CALL_EXPR_ARG (exp, 1);
2190 switch (DECL_FUNCTION_CODE (fndecl))
2192 CASE_FLT_FN (BUILT_IN_POW):
2193 builtin_optab = pow_optab; break;
2194 CASE_FLT_FN (BUILT_IN_ATAN2):
2195 builtin_optab = atan2_optab; break;
2196 CASE_FLT_FN (BUILT_IN_SCALB):
2197 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2198 return 0;
2199 builtin_optab = scalb_optab; break;
2200 CASE_FLT_FN (BUILT_IN_SCALBN):
2201 CASE_FLT_FN (BUILT_IN_SCALBLN):
2202 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2203 return 0;
2204 /* Fall through... */
2205 CASE_FLT_FN (BUILT_IN_LDEXP):
2206 builtin_optab = ldexp_optab; break;
2207 CASE_FLT_FN (BUILT_IN_FMOD):
2208 builtin_optab = fmod_optab; break;
2209 CASE_FLT_FN (BUILT_IN_REMAINDER):
2210 CASE_FLT_FN (BUILT_IN_DREM):
2211 builtin_optab = remainder_optab; break;
2212 default:
2213 gcc_unreachable ();
2216 /* Make a suitable register to place result in. */
2217 mode = TYPE_MODE (TREE_TYPE (exp));
2219 /* Before working hard, check whether the instruction is available. */
2220 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2221 return NULL_RTX;
2223 result = gen_reg_rtx (mode);
2225 if (! flag_errno_math || ! HONOR_NANS (mode))
2226 errno_set = false;
2228 if (errno_set && optimize_insn_for_size_p ())
2229 return 0;
2231 /* Always stabilize the argument list. */
2232 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2233 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2235 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2236 op1 = expand_normal (arg1);
2238 start_sequence ();
2240 /* Compute into RESULT.
2241 Set RESULT to wherever the result comes back. */
2242 result = expand_binop (mode, builtin_optab, op0, op1,
2243 result, 0, OPTAB_DIRECT);
2245 /* If we were unable to expand via the builtin, stop the sequence
2246 (without outputting the insns) and call to the library function
2247 with the stabilized argument list. */
2248 if (result == 0)
2250 end_sequence ();
2251 return expand_call (exp, target, target == const0_rtx);
2254 if (errno_set)
2255 expand_errno_check (exp, result);
2257 /* Output the entire sequence. */
2258 insns = get_insns ();
2259 end_sequence ();
2260 emit_insn (insns);
2262 return result;
2265 /* Expand a call to the builtin trinary math functions (fma).
2266 Return NULL_RTX if a normal call should be emitted rather than expanding the
2267 function in-line. EXP is the expression that is a call to the builtin
2268 function; if convenient, the result should be placed in TARGET.
2269 SUBTARGET may be used as the target for computing one of EXP's
2270 operands. */
2272 static rtx
2273 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2275 optab builtin_optab;
2276 rtx op0, op1, op2, result;
2277 rtx_insn *insns;
2278 tree fndecl = get_callee_fndecl (exp);
2279 tree arg0, arg1, arg2;
2280 machine_mode mode;
2282 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2283 return NULL_RTX;
2285 arg0 = CALL_EXPR_ARG (exp, 0);
2286 arg1 = CALL_EXPR_ARG (exp, 1);
2287 arg2 = CALL_EXPR_ARG (exp, 2);
2289 switch (DECL_FUNCTION_CODE (fndecl))
2291 CASE_FLT_FN (BUILT_IN_FMA):
2292 builtin_optab = fma_optab; break;
2293 default:
2294 gcc_unreachable ();
2297 /* Make a suitable register to place result in. */
2298 mode = TYPE_MODE (TREE_TYPE (exp));
2300 /* Before working hard, check whether the instruction is available. */
2301 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2302 return NULL_RTX;
2304 result = gen_reg_rtx (mode);
2306 /* Always stabilize the argument list. */
2307 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2308 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2309 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2311 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2312 op1 = expand_normal (arg1);
2313 op2 = expand_normal (arg2);
2315 start_sequence ();
2317 /* Compute into RESULT.
2318 Set RESULT to wherever the result comes back. */
2319 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2320 result, 0);
2322 /* If we were unable to expand via the builtin, stop the sequence
2323 (without outputting the insns) and call to the library function
2324 with the stabilized argument list. */
2325 if (result == 0)
2327 end_sequence ();
2328 return expand_call (exp, target, target == const0_rtx);
2331 /* Output the entire sequence. */
2332 insns = get_insns ();
2333 end_sequence ();
2334 emit_insn (insns);
2336 return result;
2339 /* Expand a call to the builtin sin and cos math functions.
2340 Return NULL_RTX if a normal call should be emitted rather than expanding the
2341 function in-line. EXP is the expression that is a call to the builtin
2342 function; if convenient, the result should be placed in TARGET.
2343 SUBTARGET may be used as the target for computing one of EXP's
2344 operands. */
2346 static rtx
2347 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2349 optab builtin_optab;
2350 rtx op0;
2351 rtx_insn *insns;
2352 tree fndecl = get_callee_fndecl (exp);
2353 machine_mode mode;
2354 tree arg;
2356 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2357 return NULL_RTX;
2359 arg = CALL_EXPR_ARG (exp, 0);
2361 switch (DECL_FUNCTION_CODE (fndecl))
2363 CASE_FLT_FN (BUILT_IN_SIN):
2364 CASE_FLT_FN (BUILT_IN_COS):
2365 builtin_optab = sincos_optab; break;
2366 default:
2367 gcc_unreachable ();
2370 /* Make a suitable register to place result in. */
2371 mode = TYPE_MODE (TREE_TYPE (exp));
2373 /* Check if sincos insn is available, otherwise fallback
2374 to sin or cos insn. */
2375 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2376 switch (DECL_FUNCTION_CODE (fndecl))
2378 CASE_FLT_FN (BUILT_IN_SIN):
2379 builtin_optab = sin_optab; break;
2380 CASE_FLT_FN (BUILT_IN_COS):
2381 builtin_optab = cos_optab; break;
2382 default:
2383 gcc_unreachable ();
2386 /* Before working hard, check whether the instruction is available. */
2387 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2389 rtx result = gen_reg_rtx (mode);
2391 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2392 need to expand the argument again. This way, we will not perform
2393 side-effects more the once. */
2394 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2396 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2398 start_sequence ();
2400 /* Compute into RESULT.
2401 Set RESULT to wherever the result comes back. */
2402 if (builtin_optab == sincos_optab)
2404 int ok;
2406 switch (DECL_FUNCTION_CODE (fndecl))
2408 CASE_FLT_FN (BUILT_IN_SIN):
2409 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2410 break;
2411 CASE_FLT_FN (BUILT_IN_COS):
2412 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2413 break;
2414 default:
2415 gcc_unreachable ();
2417 gcc_assert (ok);
2419 else
2420 result = expand_unop (mode, builtin_optab, op0, result, 0);
2422 if (result != 0)
2424 /* Output the entire sequence. */
2425 insns = get_insns ();
2426 end_sequence ();
2427 emit_insn (insns);
2428 return result;
2431 /* If we were unable to expand via the builtin, stop the sequence
2432 (without outputting the insns) and call to the library function
2433 with the stabilized argument list. */
2434 end_sequence ();
2437 return expand_call (exp, target, target == const0_rtx);
2440 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2441 return an RTL instruction code that implements the functionality.
2442 If that isn't possible or available return CODE_FOR_nothing. */
2444 static enum insn_code
2445 interclass_mathfn_icode (tree arg, tree fndecl)
2447 bool errno_set = false;
2448 optab builtin_optab = unknown_optab;
2449 machine_mode mode;
2451 switch (DECL_FUNCTION_CODE (fndecl))
2453 CASE_FLT_FN (BUILT_IN_ILOGB):
2454 errno_set = true; builtin_optab = ilogb_optab; break;
2455 CASE_FLT_FN (BUILT_IN_ISINF):
2456 builtin_optab = isinf_optab; break;
2457 case BUILT_IN_ISNORMAL:
2458 case BUILT_IN_ISFINITE:
2459 CASE_FLT_FN (BUILT_IN_FINITE):
2460 case BUILT_IN_FINITED32:
2461 case BUILT_IN_FINITED64:
2462 case BUILT_IN_FINITED128:
2463 case BUILT_IN_ISINFD32:
2464 case BUILT_IN_ISINFD64:
2465 case BUILT_IN_ISINFD128:
2466 /* These builtins have no optabs (yet). */
2467 break;
2468 default:
2469 gcc_unreachable ();
2472 /* There's no easy way to detect the case we need to set EDOM. */
2473 if (flag_errno_math && errno_set)
2474 return CODE_FOR_nothing;
2476 /* Optab mode depends on the mode of the input argument. */
2477 mode = TYPE_MODE (TREE_TYPE (arg));
2479 if (builtin_optab)
2480 return optab_handler (builtin_optab, mode);
2481 return CODE_FOR_nothing;
2484 /* Expand a call to one of the builtin math functions that operate on
2485 floating point argument and output an integer result (ilogb, isinf,
2486 isnan, etc).
2487 Return 0 if a normal call should be emitted rather than expanding the
2488 function in-line. EXP is the expression that is a call to the builtin
2489 function; if convenient, the result should be placed in TARGET. */
2491 static rtx
2492 expand_builtin_interclass_mathfn (tree exp, rtx target)
2494 enum insn_code icode = CODE_FOR_nothing;
2495 rtx op0;
2496 tree fndecl = get_callee_fndecl (exp);
2497 machine_mode mode;
2498 tree arg;
2500 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2501 return NULL_RTX;
2503 arg = CALL_EXPR_ARG (exp, 0);
2504 icode = interclass_mathfn_icode (arg, fndecl);
2505 mode = TYPE_MODE (TREE_TYPE (arg));
2507 if (icode != CODE_FOR_nothing)
2509 struct expand_operand ops[1];
2510 rtx_insn *last = get_last_insn ();
2511 tree orig_arg = arg;
2513 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2514 need to expand the argument again. This way, we will not perform
2515 side-effects more the once. */
2516 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2518 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2520 if (mode != GET_MODE (op0))
2521 op0 = convert_to_mode (mode, op0, 0);
2523 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2524 if (maybe_legitimize_operands (icode, 0, 1, ops)
2525 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2526 return ops[0].value;
2528 delete_insns_since (last);
2529 CALL_EXPR_ARG (exp, 0) = orig_arg;
2532 return NULL_RTX;
2535 /* Expand a call to the builtin sincos math function.
2536 Return NULL_RTX if a normal call should be emitted rather than expanding the
2537 function in-line. EXP is the expression that is a call to the builtin
2538 function. */
2540 static rtx
2541 expand_builtin_sincos (tree exp)
2543 rtx op0, op1, op2, target1, target2;
2544 machine_mode mode;
2545 tree arg, sinp, cosp;
2546 int result;
2547 location_t loc = EXPR_LOCATION (exp);
2548 tree alias_type, alias_off;
2550 if (!validate_arglist (exp, REAL_TYPE,
2551 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2552 return NULL_RTX;
2554 arg = CALL_EXPR_ARG (exp, 0);
2555 sinp = CALL_EXPR_ARG (exp, 1);
2556 cosp = CALL_EXPR_ARG (exp, 2);
2558 /* Make a suitable register to place result in. */
2559 mode = TYPE_MODE (TREE_TYPE (arg));
2561 /* Check if sincos insn is available, otherwise emit the call. */
2562 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2563 return NULL_RTX;
2565 target1 = gen_reg_rtx (mode);
2566 target2 = gen_reg_rtx (mode);
2568 op0 = expand_normal (arg);
2569 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2570 alias_off = build_int_cst (alias_type, 0);
2571 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2572 sinp, alias_off));
2573 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2574 cosp, alias_off));
2576 /* Compute into target1 and target2.
2577 Set TARGET to wherever the result comes back. */
2578 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2579 gcc_assert (result);
2581 /* Move target1 and target2 to the memory locations indicated
2582 by op1 and op2. */
2583 emit_move_insn (op1, target1);
2584 emit_move_insn (op2, target2);
2586 return const0_rtx;
2589 /* Expand a call to the internal cexpi builtin to the sincos math function.
2590 EXP is the expression that is a call to the builtin function; if convenient,
2591 the result should be placed in TARGET. */
2593 static rtx
2594 expand_builtin_cexpi (tree exp, rtx target)
2596 tree fndecl = get_callee_fndecl (exp);
2597 tree arg, type;
2598 machine_mode mode;
2599 rtx op0, op1, op2;
2600 location_t loc = EXPR_LOCATION (exp);
2602 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2603 return NULL_RTX;
2605 arg = CALL_EXPR_ARG (exp, 0);
2606 type = TREE_TYPE (arg);
2607 mode = TYPE_MODE (TREE_TYPE (arg));
2609 /* Try expanding via a sincos optab, fall back to emitting a libcall
2610 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2611 is only generated from sincos, cexp or if we have either of them. */
2612 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2614 op1 = gen_reg_rtx (mode);
2615 op2 = gen_reg_rtx (mode);
2617 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2619 /* Compute into op1 and op2. */
2620 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2622 else if (targetm.libc_has_function (function_sincos))
2624 tree call, fn = NULL_TREE;
2625 tree top1, top2;
2626 rtx op1a, op2a;
2628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2629 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2631 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2633 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2634 else
2635 gcc_unreachable ();
2637 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2638 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2639 op1a = copy_addr_to_reg (XEXP (op1, 0));
2640 op2a = copy_addr_to_reg (XEXP (op2, 0));
2641 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2642 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2644 /* Make sure not to fold the sincos call again. */
2645 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2646 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2647 call, 3, arg, top1, top2));
2649 else
2651 tree call, fn = NULL_TREE, narg;
2652 tree ctype = build_complex_type (type);
2654 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2655 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2656 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2657 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2658 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2659 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2660 else
2661 gcc_unreachable ();
2663 /* If we don't have a decl for cexp create one. This is the
2664 friendliest fallback if the user calls __builtin_cexpi
2665 without full target C99 function support. */
2666 if (fn == NULL_TREE)
2668 tree fntype;
2669 const char *name = NULL;
2671 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2672 name = "cexpf";
2673 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2674 name = "cexp";
2675 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2676 name = "cexpl";
2678 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2679 fn = build_fn_decl (name, fntype);
2682 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2683 build_real (type, dconst0), arg);
2685 /* Make sure not to fold the cexp call again. */
2686 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2687 return expand_expr (build_call_nary (ctype, call, 1, narg),
2688 target, VOIDmode, EXPAND_NORMAL);
2691 /* Now build the proper return type. */
2692 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2693 make_tree (TREE_TYPE (arg), op2),
2694 make_tree (TREE_TYPE (arg), op1)),
2695 target, VOIDmode, EXPAND_NORMAL);
2698 /* Conveniently construct a function call expression. FNDECL names the
2699 function to be called, N is the number of arguments, and the "..."
2700 parameters are the argument expressions. Unlike build_call_exr
2701 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2703 static tree
2704 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2706 va_list ap;
2707 tree fntype = TREE_TYPE (fndecl);
2708 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2710 va_start (ap, n);
2711 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2712 va_end (ap);
2713 SET_EXPR_LOCATION (fn, loc);
2714 return fn;
2717 /* Expand a call to one of the builtin rounding functions gcc defines
2718 as an extension (lfloor and lceil). As these are gcc extensions we
2719 do not need to worry about setting errno to EDOM.
2720 If expanding via optab fails, lower expression to (int)(floor(x)).
2721 EXP is the expression that is a call to the builtin function;
2722 if convenient, the result should be placed in TARGET. */
2724 static rtx
2725 expand_builtin_int_roundingfn (tree exp, rtx target)
2727 convert_optab builtin_optab;
2728 rtx op0, tmp;
2729 rtx_insn *insns;
2730 tree fndecl = get_callee_fndecl (exp);
2731 enum built_in_function fallback_fn;
2732 tree fallback_fndecl;
2733 machine_mode mode;
2734 tree arg;
2736 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2737 gcc_unreachable ();
2739 arg = CALL_EXPR_ARG (exp, 0);
2741 switch (DECL_FUNCTION_CODE (fndecl))
2743 CASE_FLT_FN (BUILT_IN_ICEIL):
2744 CASE_FLT_FN (BUILT_IN_LCEIL):
2745 CASE_FLT_FN (BUILT_IN_LLCEIL):
2746 builtin_optab = lceil_optab;
2747 fallback_fn = BUILT_IN_CEIL;
2748 break;
2750 CASE_FLT_FN (BUILT_IN_IFLOOR):
2751 CASE_FLT_FN (BUILT_IN_LFLOOR):
2752 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2753 builtin_optab = lfloor_optab;
2754 fallback_fn = BUILT_IN_FLOOR;
2755 break;
2757 default:
2758 gcc_unreachable ();
2761 /* Make a suitable register to place result in. */
2762 mode = TYPE_MODE (TREE_TYPE (exp));
2764 target = gen_reg_rtx (mode);
2766 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2767 need to expand the argument again. This way, we will not perform
2768 side-effects more the once. */
2769 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2771 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2773 start_sequence ();
2775 /* Compute into TARGET. */
2776 if (expand_sfix_optab (target, op0, builtin_optab))
2778 /* Output the entire sequence. */
2779 insns = get_insns ();
2780 end_sequence ();
2781 emit_insn (insns);
2782 return target;
2785 /* If we were unable to expand via the builtin, stop the sequence
2786 (without outputting the insns). */
2787 end_sequence ();
2789 /* Fall back to floating point rounding optab. */
2790 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2792 /* For non-C99 targets we may end up without a fallback fndecl here
2793 if the user called __builtin_lfloor directly. In this case emit
2794 a call to the floor/ceil variants nevertheless. This should result
2795 in the best user experience for not full C99 targets. */
2796 if (fallback_fndecl == NULL_TREE)
2798 tree fntype;
2799 const char *name = NULL;
2801 switch (DECL_FUNCTION_CODE (fndecl))
2803 case BUILT_IN_ICEIL:
2804 case BUILT_IN_LCEIL:
2805 case BUILT_IN_LLCEIL:
2806 name = "ceil";
2807 break;
2808 case BUILT_IN_ICEILF:
2809 case BUILT_IN_LCEILF:
2810 case BUILT_IN_LLCEILF:
2811 name = "ceilf";
2812 break;
2813 case BUILT_IN_ICEILL:
2814 case BUILT_IN_LCEILL:
2815 case BUILT_IN_LLCEILL:
2816 name = "ceill";
2817 break;
2818 case BUILT_IN_IFLOOR:
2819 case BUILT_IN_LFLOOR:
2820 case BUILT_IN_LLFLOOR:
2821 name = "floor";
2822 break;
2823 case BUILT_IN_IFLOORF:
2824 case BUILT_IN_LFLOORF:
2825 case BUILT_IN_LLFLOORF:
2826 name = "floorf";
2827 break;
2828 case BUILT_IN_IFLOORL:
2829 case BUILT_IN_LFLOORL:
2830 case BUILT_IN_LLFLOORL:
2831 name = "floorl";
2832 break;
2833 default:
2834 gcc_unreachable ();
2837 fntype = build_function_type_list (TREE_TYPE (arg),
2838 TREE_TYPE (arg), NULL_TREE);
2839 fallback_fndecl = build_fn_decl (name, fntype);
2842 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2844 tmp = expand_normal (exp);
2845 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2847 /* Truncate the result of floating point optab to integer
2848 via expand_fix (). */
2849 target = gen_reg_rtx (mode);
2850 expand_fix (target, tmp, 0);
2852 return target;
2855 /* Expand a call to one of the builtin math functions doing integer
2856 conversion (lrint).
2857 Return 0 if a normal call should be emitted rather than expanding the
2858 function in-line. EXP is the expression that is a call to the builtin
2859 function; if convenient, the result should be placed in TARGET. */
2861 static rtx
2862 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2864 convert_optab builtin_optab;
2865 rtx op0;
2866 rtx_insn *insns;
2867 tree fndecl = get_callee_fndecl (exp);
2868 tree arg;
2869 machine_mode mode;
2870 enum built_in_function fallback_fn = BUILT_IN_NONE;
2872 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2873 gcc_unreachable ();
2875 arg = CALL_EXPR_ARG (exp, 0);
2877 switch (DECL_FUNCTION_CODE (fndecl))
2879 CASE_FLT_FN (BUILT_IN_IRINT):
2880 fallback_fn = BUILT_IN_LRINT;
2881 /* FALLTHRU */
2882 CASE_FLT_FN (BUILT_IN_LRINT):
2883 CASE_FLT_FN (BUILT_IN_LLRINT):
2884 builtin_optab = lrint_optab;
2885 break;
2887 CASE_FLT_FN (BUILT_IN_IROUND):
2888 fallback_fn = BUILT_IN_LROUND;
2889 /* FALLTHRU */
2890 CASE_FLT_FN (BUILT_IN_LROUND):
2891 CASE_FLT_FN (BUILT_IN_LLROUND):
2892 builtin_optab = lround_optab;
2893 break;
2895 default:
2896 gcc_unreachable ();
2899 /* There's no easy way to detect the case we need to set EDOM. */
2900 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2901 return NULL_RTX;
2903 /* Make a suitable register to place result in. */
2904 mode = TYPE_MODE (TREE_TYPE (exp));
2906 /* There's no easy way to detect the case we need to set EDOM. */
2907 if (!flag_errno_math)
2909 rtx result = gen_reg_rtx (mode);
2911 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2912 need to expand the argument again. This way, we will not perform
2913 side-effects more the once. */
2914 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2916 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2918 start_sequence ();
2920 if (expand_sfix_optab (result, op0, builtin_optab))
2922 /* Output the entire sequence. */
2923 insns = get_insns ();
2924 end_sequence ();
2925 emit_insn (insns);
2926 return result;
2929 /* If we were unable to expand via the builtin, stop the sequence
2930 (without outputting the insns) and call to the library function
2931 with the stabilized argument list. */
2932 end_sequence ();
2935 if (fallback_fn != BUILT_IN_NONE)
2937 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2938 targets, (int) round (x) should never be transformed into
2939 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2940 a call to lround in the hope that the target provides at least some
2941 C99 functions. This should result in the best user experience for
2942 not full C99 targets. */
2943 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2944 fallback_fn, 0);
2946 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2947 fallback_fndecl, 1, arg);
2949 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2950 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2951 return convert_to_mode (mode, target, 0);
2954 return expand_call (exp, target, target == const0_rtx);
2957 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2958 a normal call should be emitted rather than expanding the function
2959 in-line. EXP is the expression that is a call to the builtin
2960 function; if convenient, the result should be placed in TARGET. */
2962 static rtx
2963 expand_builtin_powi (tree exp, rtx target)
2965 tree arg0, arg1;
2966 rtx op0, op1;
2967 machine_mode mode;
2968 machine_mode mode2;
2970 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2971 return NULL_RTX;
2973 arg0 = CALL_EXPR_ARG (exp, 0);
2974 arg1 = CALL_EXPR_ARG (exp, 1);
2975 mode = TYPE_MODE (TREE_TYPE (exp));
2977 /* Emit a libcall to libgcc. */
2979 /* Mode of the 2nd argument must match that of an int. */
2980 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2982 if (target == NULL_RTX)
2983 target = gen_reg_rtx (mode);
2985 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2986 if (GET_MODE (op0) != mode)
2987 op0 = convert_to_mode (mode, op0, 0);
2988 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2989 if (GET_MODE (op1) != mode2)
2990 op1 = convert_to_mode (mode2, op1, 0);
2992 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2993 target, LCT_CONST, mode, 2,
2994 op0, mode, op1, mode2);
2996 return target;
2999 /* Expand expression EXP which is a call to the strlen builtin. Return
3000 NULL_RTX if we failed the caller should emit a normal call, otherwise
3001 try to get the result in TARGET, if convenient. */
3003 static rtx
3004 expand_builtin_strlen (tree exp, rtx target,
3005 machine_mode target_mode)
3007 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3008 return NULL_RTX;
3009 else
3011 struct expand_operand ops[4];
3012 rtx pat;
3013 tree len;
3014 tree src = CALL_EXPR_ARG (exp, 0);
3015 rtx src_reg;
3016 rtx_insn *before_strlen;
3017 machine_mode insn_mode = target_mode;
3018 enum insn_code icode = CODE_FOR_nothing;
3019 unsigned int align;
3021 /* If the length can be computed at compile-time, return it. */
3022 len = c_strlen (src, 0);
3023 if (len)
3024 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3026 /* If the length can be computed at compile-time and is constant
3027 integer, but there are side-effects in src, evaluate
3028 src for side-effects, then return len.
3029 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3030 can be optimized into: i++; x = 3; */
3031 len = c_strlen (src, 1);
3032 if (len && TREE_CODE (len) == INTEGER_CST)
3034 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3035 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3038 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3040 /* If SRC is not a pointer type, don't do this operation inline. */
3041 if (align == 0)
3042 return NULL_RTX;
3044 /* Bail out if we can't compute strlen in the right mode. */
3045 while (insn_mode != VOIDmode)
3047 icode = optab_handler (strlen_optab, insn_mode);
3048 if (icode != CODE_FOR_nothing)
3049 break;
3051 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3053 if (insn_mode == VOIDmode)
3054 return NULL_RTX;
3056 /* Make a place to hold the source address. We will not expand
3057 the actual source until we are sure that the expansion will
3058 not fail -- there are trees that cannot be expanded twice. */
3059 src_reg = gen_reg_rtx (Pmode);
3061 /* Mark the beginning of the strlen sequence so we can emit the
3062 source operand later. */
3063 before_strlen = get_last_insn ();
3065 create_output_operand (&ops[0], target, insn_mode);
3066 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3067 create_integer_operand (&ops[2], 0);
3068 create_integer_operand (&ops[3], align);
3069 if (!maybe_expand_insn (icode, 4, ops))
3070 return NULL_RTX;
3072 /* Now that we are assured of success, expand the source. */
3073 start_sequence ();
3074 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3075 if (pat != src_reg)
3077 #ifdef POINTERS_EXTEND_UNSIGNED
3078 if (GET_MODE (pat) != Pmode)
3079 pat = convert_to_mode (Pmode, pat,
3080 POINTERS_EXTEND_UNSIGNED);
3081 #endif
3082 emit_move_insn (src_reg, pat);
3084 pat = get_insns ();
3085 end_sequence ();
3087 if (before_strlen)
3088 emit_insn_after (pat, before_strlen);
3089 else
3090 emit_insn_before (pat, get_insns ());
3092 /* Return the value in the proper mode for this function. */
3093 if (GET_MODE (ops[0].value) == target_mode)
3094 target = ops[0].value;
3095 else if (target != 0)
3096 convert_move (target, ops[0].value, 0);
3097 else
3098 target = convert_to_mode (target_mode, ops[0].value, 0);
3100 return target;
3104 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3105 bytes from constant string DATA + OFFSET and return it as target
3106 constant. */
3108 static rtx
3109 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3110 machine_mode mode)
3112 const char *str = (const char *) data;
3114 gcc_assert (offset >= 0
3115 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3116 <= strlen (str) + 1));
3118 return c_readstr (str + offset, mode);
3121 /* LEN specify length of the block of memcpy/memset operation.
3122 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3123 In some cases we can make very likely guess on max size, then we
3124 set it into PROBABLE_MAX_SIZE. */
3126 static void
3127 determine_block_size (tree len, rtx len_rtx,
3128 unsigned HOST_WIDE_INT *min_size,
3129 unsigned HOST_WIDE_INT *max_size,
3130 unsigned HOST_WIDE_INT *probable_max_size)
3132 if (CONST_INT_P (len_rtx))
3134 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3135 return;
3137 else
3139 wide_int min, max;
3140 enum value_range_type range_type = VR_UNDEFINED;
3142 /* Determine bounds from the type. */
3143 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3144 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3145 else
3146 *min_size = 0;
3147 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3148 *probable_max_size = *max_size
3149 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3150 else
3151 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3153 if (TREE_CODE (len) == SSA_NAME)
3154 range_type = get_range_info (len, &min, &max);
3155 if (range_type == VR_RANGE)
3157 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3158 *min_size = min.to_uhwi ();
3159 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3160 *probable_max_size = *max_size = max.to_uhwi ();
3162 else if (range_type == VR_ANTI_RANGE)
3164 /* Anti range 0...N lets us to determine minimal size to N+1. */
3165 if (min == 0)
3167 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3168 *min_size = max.to_uhwi () + 1;
3170 /* Code like
3172 int n;
3173 if (n < 100)
3174 memcpy (a, b, n)
3176 Produce anti range allowing negative values of N. We still
3177 can use the information and make a guess that N is not negative.
3179 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3180 *probable_max_size = min.to_uhwi () - 1;
3183 gcc_checking_assert (*max_size <=
3184 (unsigned HOST_WIDE_INT)
3185 GET_MODE_MASK (GET_MODE (len_rtx)));
3188 /* Helper function to do the actual work for expand_builtin_memcpy. */
3190 static rtx
3191 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3193 const char *src_str;
3194 unsigned int src_align = get_pointer_alignment (src);
3195 unsigned int dest_align = get_pointer_alignment (dest);
3196 rtx dest_mem, src_mem, dest_addr, len_rtx;
3197 HOST_WIDE_INT expected_size = -1;
3198 unsigned int expected_align = 0;
3199 unsigned HOST_WIDE_INT min_size;
3200 unsigned HOST_WIDE_INT max_size;
3201 unsigned HOST_WIDE_INT probable_max_size;
3203 /* If DEST is not a pointer type, call the normal function. */
3204 if (dest_align == 0)
3205 return NULL_RTX;
3207 /* If either SRC is not a pointer type, don't do this
3208 operation in-line. */
3209 if (src_align == 0)
3210 return NULL_RTX;
3212 if (currently_expanding_gimple_stmt)
3213 stringop_block_profile (currently_expanding_gimple_stmt,
3214 &expected_align, &expected_size);
3216 if (expected_align < dest_align)
3217 expected_align = dest_align;
3218 dest_mem = get_memory_rtx (dest, len);
3219 set_mem_align (dest_mem, dest_align);
3220 len_rtx = expand_normal (len);
3221 determine_block_size (len, len_rtx, &min_size, &max_size,
3222 &probable_max_size);
3223 src_str = c_getstr (src);
3225 /* If SRC is a string constant and block move would be done
3226 by pieces, we can avoid loading the string from memory
3227 and only stored the computed constants. */
3228 if (src_str
3229 && CONST_INT_P (len_rtx)
3230 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3231 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3232 CONST_CAST (char *, src_str),
3233 dest_align, false))
3235 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3236 builtin_memcpy_read_str,
3237 CONST_CAST (char *, src_str),
3238 dest_align, false, 0);
3239 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3240 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3241 return dest_mem;
3244 src_mem = get_memory_rtx (src, len);
3245 set_mem_align (src_mem, src_align);
3247 /* Copy word part most expediently. */
3248 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3249 CALL_EXPR_TAILCALL (exp)
3250 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3251 expected_align, expected_size,
3252 min_size, max_size, probable_max_size);
3254 if (dest_addr == 0)
3256 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3257 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3260 return dest_addr;
3263 /* Expand a call EXP to the memcpy builtin.
3264 Return NULL_RTX if we failed, the caller should emit a normal call,
3265 otherwise try to get the result in TARGET, if convenient (and in
3266 mode MODE if that's convenient). */
3268 static rtx
3269 expand_builtin_memcpy (tree exp, rtx target)
3271 if (!validate_arglist (exp,
3272 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3273 return NULL_RTX;
3274 else
3276 tree dest = CALL_EXPR_ARG (exp, 0);
3277 tree src = CALL_EXPR_ARG (exp, 1);
3278 tree len = CALL_EXPR_ARG (exp, 2);
3279 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3283 /* Expand an instrumented call EXP to the memcpy builtin.
3284 Return NULL_RTX if we failed, the caller should emit a normal call,
3285 otherwise try to get the result in TARGET, if convenient (and in
3286 mode MODE if that's convenient). */
3288 static rtx
3289 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3291 if (!validate_arglist (exp,
3292 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3293 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3294 INTEGER_TYPE, VOID_TYPE))
3295 return NULL_RTX;
3296 else
3298 tree dest = CALL_EXPR_ARG (exp, 0);
3299 tree src = CALL_EXPR_ARG (exp, 2);
3300 tree len = CALL_EXPR_ARG (exp, 4);
3301 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3303 /* Return src bounds with the result. */
3304 if (res)
3306 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3307 expand_normal (CALL_EXPR_ARG (exp, 1)));
3308 res = chkp_join_splitted_slot (res, bnd);
3310 return res;
3314 /* Expand a call EXP to the mempcpy builtin.
3315 Return NULL_RTX if we failed; the caller should emit a normal call,
3316 otherwise try to get the result in TARGET, if convenient (and in
3317 mode MODE if that's convenient). If ENDP is 0 return the
3318 destination pointer, if ENDP is 1 return the end pointer ala
3319 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3320 stpcpy. */
3322 static rtx
3323 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3325 if (!validate_arglist (exp,
3326 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3327 return NULL_RTX;
3328 else
3330 tree dest = CALL_EXPR_ARG (exp, 0);
3331 tree src = CALL_EXPR_ARG (exp, 1);
3332 tree len = CALL_EXPR_ARG (exp, 2);
3333 return expand_builtin_mempcpy_args (dest, src, len,
3334 target, mode, /*endp=*/ 1,
3335 exp);
3339 /* Expand an instrumented call EXP to the mempcpy builtin.
3340 Return NULL_RTX if we failed, the caller should emit a normal call,
3341 otherwise try to get the result in TARGET, if convenient (and in
3342 mode MODE if that's convenient). */
3344 static rtx
3345 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3347 if (!validate_arglist (exp,
3348 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3349 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3350 INTEGER_TYPE, VOID_TYPE))
3351 return NULL_RTX;
3352 else
3354 tree dest = CALL_EXPR_ARG (exp, 0);
3355 tree src = CALL_EXPR_ARG (exp, 2);
3356 tree len = CALL_EXPR_ARG (exp, 4);
3357 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3358 mode, 1, exp);
3360 /* Return src bounds with the result. */
3361 if (res)
3363 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3364 expand_normal (CALL_EXPR_ARG (exp, 1)));
3365 res = chkp_join_splitted_slot (res, bnd);
3367 return res;
3371 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3372 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3373 so that this can also be called without constructing an actual CALL_EXPR.
3374 The other arguments and return value are the same as for
3375 expand_builtin_mempcpy. */
3377 static rtx
3378 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3379 rtx target, machine_mode mode, int endp,
3380 tree orig_exp)
3382 tree fndecl = get_callee_fndecl (orig_exp);
3384 /* If return value is ignored, transform mempcpy into memcpy. */
3385 if (target == const0_rtx
3386 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3387 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3389 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3390 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3391 dest, src, len);
3392 return expand_expr (result, target, mode, EXPAND_NORMAL);
3394 else if (target == const0_rtx
3395 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3397 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3398 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3399 dest, src, len);
3400 return expand_expr (result, target, mode, EXPAND_NORMAL);
3402 else
3404 const char *src_str;
3405 unsigned int src_align = get_pointer_alignment (src);
3406 unsigned int dest_align = get_pointer_alignment (dest);
3407 rtx dest_mem, src_mem, len_rtx;
3409 /* If either SRC or DEST is not a pointer type, don't do this
3410 operation in-line. */
3411 if (dest_align == 0 || src_align == 0)
3412 return NULL_RTX;
3414 /* If LEN is not constant, call the normal function. */
3415 if (! tree_fits_uhwi_p (len))
3416 return NULL_RTX;
3418 len_rtx = expand_normal (len);
3419 src_str = c_getstr (src);
3421 /* If SRC is a string constant and block move would be done
3422 by pieces, we can avoid loading the string from memory
3423 and only stored the computed constants. */
3424 if (src_str
3425 && CONST_INT_P (len_rtx)
3426 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3427 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3428 CONST_CAST (char *, src_str),
3429 dest_align, false))
3431 dest_mem = get_memory_rtx (dest, len);
3432 set_mem_align (dest_mem, dest_align);
3433 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3434 builtin_memcpy_read_str,
3435 CONST_CAST (char *, src_str),
3436 dest_align, false, endp);
3437 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3438 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3439 return dest_mem;
3442 if (CONST_INT_P (len_rtx)
3443 && can_move_by_pieces (INTVAL (len_rtx),
3444 MIN (dest_align, src_align)))
3446 dest_mem = get_memory_rtx (dest, len);
3447 set_mem_align (dest_mem, dest_align);
3448 src_mem = get_memory_rtx (src, len);
3449 set_mem_align (src_mem, src_align);
3450 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3451 MIN (dest_align, src_align), endp);
3452 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3453 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3454 return dest_mem;
3457 return NULL_RTX;
3461 #ifndef HAVE_movstr
3462 # define HAVE_movstr 0
3463 # define CODE_FOR_movstr CODE_FOR_nothing
3464 #endif
3466 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3467 we failed, the caller should emit a normal call, otherwise try to
3468 get the result in TARGET, if convenient. If ENDP is 0 return the
3469 destination pointer, if ENDP is 1 return the end pointer ala
3470 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3471 stpcpy. */
3473 static rtx
3474 expand_movstr (tree dest, tree src, rtx target, int endp)
3476 struct expand_operand ops[3];
3477 rtx dest_mem;
3478 rtx src_mem;
3480 if (!HAVE_movstr)
3481 return NULL_RTX;
3483 dest_mem = get_memory_rtx (dest, NULL);
3484 src_mem = get_memory_rtx (src, NULL);
3485 if (!endp)
3487 target = force_reg (Pmode, XEXP (dest_mem, 0));
3488 dest_mem = replace_equiv_address (dest_mem, target);
3491 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3492 create_fixed_operand (&ops[1], dest_mem);
3493 create_fixed_operand (&ops[2], src_mem);
3494 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3495 return NULL_RTX;
3497 if (endp && target != const0_rtx)
3499 target = ops[0].value;
3500 /* movstr is supposed to set end to the address of the NUL
3501 terminator. If the caller requested a mempcpy-like return value,
3502 adjust it. */
3503 if (endp == 1)
3505 rtx tem = plus_constant (GET_MODE (target),
3506 gen_lowpart (GET_MODE (target), target), 1);
3507 emit_move_insn (target, force_operand (tem, NULL_RTX));
3510 return target;
3513 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3514 NULL_RTX if we failed the caller should emit a normal call, otherwise
3515 try to get the result in TARGET, if convenient (and in mode MODE if that's
3516 convenient). */
3518 static rtx
3519 expand_builtin_strcpy (tree exp, rtx target)
3521 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3523 tree dest = CALL_EXPR_ARG (exp, 0);
3524 tree src = CALL_EXPR_ARG (exp, 1);
3525 return expand_builtin_strcpy_args (dest, src, target);
3527 return NULL_RTX;
3530 /* Helper function to do the actual work for expand_builtin_strcpy. The
3531 arguments to the builtin_strcpy call DEST and SRC are broken out
3532 so that this can also be called without constructing an actual CALL_EXPR.
3533 The other arguments and return value are the same as for
3534 expand_builtin_strcpy. */
3536 static rtx
3537 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3539 return expand_movstr (dest, src, target, /*endp=*/0);
3542 /* Expand a call EXP to the stpcpy builtin.
3543 Return NULL_RTX if we failed the caller should emit a normal call,
3544 otherwise try to get the result in TARGET, if convenient (and in
3545 mode MODE if that's convenient). */
3547 static rtx
3548 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3550 tree dst, src;
3551 location_t loc = EXPR_LOCATION (exp);
3553 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3554 return NULL_RTX;
3556 dst = CALL_EXPR_ARG (exp, 0);
3557 src = CALL_EXPR_ARG (exp, 1);
3559 /* If return value is ignored, transform stpcpy into strcpy. */
3560 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3562 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3563 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3564 return expand_expr (result, target, mode, EXPAND_NORMAL);
3566 else
3568 tree len, lenp1;
3569 rtx ret;
3571 /* Ensure we get an actual string whose length can be evaluated at
3572 compile-time, not an expression containing a string. This is
3573 because the latter will potentially produce pessimized code
3574 when used to produce the return value. */
3575 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3576 return expand_movstr (dst, src, target, /*endp=*/2);
3578 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3579 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3580 target, mode, /*endp=*/2,
3581 exp);
3583 if (ret)
3584 return ret;
3586 if (TREE_CODE (len) == INTEGER_CST)
3588 rtx len_rtx = expand_normal (len);
3590 if (CONST_INT_P (len_rtx))
3592 ret = expand_builtin_strcpy_args (dst, src, target);
3594 if (ret)
3596 if (! target)
3598 if (mode != VOIDmode)
3599 target = gen_reg_rtx (mode);
3600 else
3601 target = gen_reg_rtx (GET_MODE (ret));
3603 if (GET_MODE (target) != GET_MODE (ret))
3604 ret = gen_lowpart (GET_MODE (target), ret);
3606 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3607 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3608 gcc_assert (ret);
3610 return target;
3615 return expand_movstr (dst, src, target, /*endp=*/2);
3619 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3620 bytes from constant string DATA + OFFSET and return it as target
3621 constant. */
3624 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3625 machine_mode mode)
3627 const char *str = (const char *) data;
3629 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3630 return const0_rtx;
3632 return c_readstr (str + offset, mode);
3635 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3636 NULL_RTX if we failed the caller should emit a normal call. */
3638 static rtx
3639 expand_builtin_strncpy (tree exp, rtx target)
3641 location_t loc = EXPR_LOCATION (exp);
3643 if (validate_arglist (exp,
3644 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3646 tree dest = CALL_EXPR_ARG (exp, 0);
3647 tree src = CALL_EXPR_ARG (exp, 1);
3648 tree len = CALL_EXPR_ARG (exp, 2);
3649 tree slen = c_strlen (src, 1);
3651 /* We must be passed a constant len and src parameter. */
3652 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3653 return NULL_RTX;
3655 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3657 /* We're required to pad with trailing zeros if the requested
3658 len is greater than strlen(s2)+1. In that case try to
3659 use store_by_pieces, if it fails, punt. */
3660 if (tree_int_cst_lt (slen, len))
3662 unsigned int dest_align = get_pointer_alignment (dest);
3663 const char *p = c_getstr (src);
3664 rtx dest_mem;
3666 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3667 || !can_store_by_pieces (tree_to_uhwi (len),
3668 builtin_strncpy_read_str,
3669 CONST_CAST (char *, p),
3670 dest_align, false))
3671 return NULL_RTX;
3673 dest_mem = get_memory_rtx (dest, len);
3674 store_by_pieces (dest_mem, tree_to_uhwi (len),
3675 builtin_strncpy_read_str,
3676 CONST_CAST (char *, p), dest_align, false, 0);
3677 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3678 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3679 return dest_mem;
3682 return NULL_RTX;
3685 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3686 bytes from constant string DATA + OFFSET and return it as target
3687 constant. */
3690 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3691 machine_mode mode)
3693 const char *c = (const char *) data;
3694 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3696 memset (p, *c, GET_MODE_SIZE (mode));
3698 return c_readstr (p, mode);
3701 /* Callback routine for store_by_pieces. Return the RTL of a register
3702 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3703 char value given in the RTL register data. For example, if mode is
3704 4 bytes wide, return the RTL for 0x01010101*data. */
3706 static rtx
3707 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3708 machine_mode mode)
3710 rtx target, coeff;
3711 size_t size;
3712 char *p;
3714 size = GET_MODE_SIZE (mode);
3715 if (size == 1)
3716 return (rtx) data;
3718 p = XALLOCAVEC (char, size);
3719 memset (p, 1, size);
3720 coeff = c_readstr (p, mode);
3722 target = convert_to_mode (mode, (rtx) data, 1);
3723 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3724 return force_reg (mode, target);
3727 /* Expand expression EXP, which is a call to the memset builtin. Return
3728 NULL_RTX if we failed the caller should emit a normal call, otherwise
3729 try to get the result in TARGET, if convenient (and in mode MODE if that's
3730 convenient). */
3732 static rtx
3733 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3735 if (!validate_arglist (exp,
3736 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3737 return NULL_RTX;
3738 else
3740 tree dest = CALL_EXPR_ARG (exp, 0);
3741 tree val = CALL_EXPR_ARG (exp, 1);
3742 tree len = CALL_EXPR_ARG (exp, 2);
3743 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3747 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3748 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3749 try to get the result in TARGET, if convenient (and in mode MODE if that's
3750 convenient). */
3752 static rtx
3753 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3755 if (!validate_arglist (exp,
3756 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3757 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3758 return NULL_RTX;
3759 else
3761 tree dest = CALL_EXPR_ARG (exp, 0);
3762 tree val = CALL_EXPR_ARG (exp, 2);
3763 tree len = CALL_EXPR_ARG (exp, 3);
3764 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3766 /* Return src bounds with the result. */
3767 if (res)
3769 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3770 expand_normal (CALL_EXPR_ARG (exp, 1)));
3771 res = chkp_join_splitted_slot (res, bnd);
3773 return res;
3777 /* Helper function to do the actual work for expand_builtin_memset. The
3778 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3779 so that this can also be called without constructing an actual CALL_EXPR.
3780 The other arguments and return value are the same as for
3781 expand_builtin_memset. */
3783 static rtx
3784 expand_builtin_memset_args (tree dest, tree val, tree len,
3785 rtx target, machine_mode mode, tree orig_exp)
3787 tree fndecl, fn;
3788 enum built_in_function fcode;
3789 machine_mode val_mode;
3790 char c;
3791 unsigned int dest_align;
3792 rtx dest_mem, dest_addr, len_rtx;
3793 HOST_WIDE_INT expected_size = -1;
3794 unsigned int expected_align = 0;
3795 unsigned HOST_WIDE_INT min_size;
3796 unsigned HOST_WIDE_INT max_size;
3797 unsigned HOST_WIDE_INT probable_max_size;
3799 dest_align = get_pointer_alignment (dest);
3801 /* If DEST is not a pointer type, don't do this operation in-line. */
3802 if (dest_align == 0)
3803 return NULL_RTX;
3805 if (currently_expanding_gimple_stmt)
3806 stringop_block_profile (currently_expanding_gimple_stmt,
3807 &expected_align, &expected_size);
3809 if (expected_align < dest_align)
3810 expected_align = dest_align;
3812 /* If the LEN parameter is zero, return DEST. */
3813 if (integer_zerop (len))
3815 /* Evaluate and ignore VAL in case it has side-effects. */
3816 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3817 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3820 /* Stabilize the arguments in case we fail. */
3821 dest = builtin_save_expr (dest);
3822 val = builtin_save_expr (val);
3823 len = builtin_save_expr (len);
3825 len_rtx = expand_normal (len);
3826 determine_block_size (len, len_rtx, &min_size, &max_size,
3827 &probable_max_size);
3828 dest_mem = get_memory_rtx (dest, len);
3829 val_mode = TYPE_MODE (unsigned_char_type_node);
3831 if (TREE_CODE (val) != INTEGER_CST)
3833 rtx val_rtx;
3835 val_rtx = expand_normal (val);
3836 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3838 /* Assume that we can memset by pieces if we can store
3839 * the coefficients by pieces (in the required modes).
3840 * We can't pass builtin_memset_gen_str as that emits RTL. */
3841 c = 1;
3842 if (tree_fits_uhwi_p (len)
3843 && can_store_by_pieces (tree_to_uhwi (len),
3844 builtin_memset_read_str, &c, dest_align,
3845 true))
3847 val_rtx = force_reg (val_mode, val_rtx);
3848 store_by_pieces (dest_mem, tree_to_uhwi (len),
3849 builtin_memset_gen_str, val_rtx, dest_align,
3850 true, 0);
3852 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3853 dest_align, expected_align,
3854 expected_size, min_size, max_size,
3855 probable_max_size))
3856 goto do_libcall;
3858 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3859 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3860 return dest_mem;
3863 if (target_char_cast (val, &c))
3864 goto do_libcall;
3866 if (c)
3868 if (tree_fits_uhwi_p (len)
3869 && can_store_by_pieces (tree_to_uhwi (len),
3870 builtin_memset_read_str, &c, dest_align,
3871 true))
3872 store_by_pieces (dest_mem, tree_to_uhwi (len),
3873 builtin_memset_read_str, &c, dest_align, true, 0);
3874 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3875 gen_int_mode (c, val_mode),
3876 dest_align, expected_align,
3877 expected_size, min_size, max_size,
3878 probable_max_size))
3879 goto do_libcall;
3881 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3882 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3883 return dest_mem;
3886 set_mem_align (dest_mem, dest_align);
3887 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3888 CALL_EXPR_TAILCALL (orig_exp)
3889 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3890 expected_align, expected_size,
3891 min_size, max_size,
3892 probable_max_size);
3894 if (dest_addr == 0)
3896 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3897 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3900 return dest_addr;
3902 do_libcall:
3903 fndecl = get_callee_fndecl (orig_exp);
3904 fcode = DECL_FUNCTION_CODE (fndecl);
3905 if (fcode == BUILT_IN_MEMSET
3906 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3907 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3908 dest, val, len);
3909 else if (fcode == BUILT_IN_BZERO)
3910 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3911 dest, len);
3912 else
3913 gcc_unreachable ();
3914 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3915 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3916 return expand_call (fn, target, target == const0_rtx);
3919 /* Expand expression EXP, which is a call to the bzero builtin. Return
3920 NULL_RTX if we failed the caller should emit a normal call. */
3922 static rtx
3923 expand_builtin_bzero (tree exp)
3925 tree dest, size;
3926 location_t loc = EXPR_LOCATION (exp);
3928 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3929 return NULL_RTX;
3931 dest = CALL_EXPR_ARG (exp, 0);
3932 size = CALL_EXPR_ARG (exp, 1);
3934 /* New argument list transforming bzero(ptr x, int y) to
3935 memset(ptr x, int 0, size_t y). This is done this way
3936 so that if it isn't expanded inline, we fallback to
3937 calling bzero instead of memset. */
3939 return expand_builtin_memset_args (dest, integer_zero_node,
3940 fold_convert_loc (loc,
3941 size_type_node, size),
3942 const0_rtx, VOIDmode, exp);
3945 /* Expand expression EXP, which is a call to the memcmp built-in function.
3946 Return NULL_RTX if we failed and the caller should emit a normal call,
3947 otherwise try to get the result in TARGET, if convenient (and in mode
3948 MODE, if that's convenient). */
3950 static rtx
3951 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3952 ATTRIBUTE_UNUSED machine_mode mode)
3954 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3956 if (!validate_arglist (exp,
3957 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3958 return NULL_RTX;
3960 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3961 implementing memcmp because it will stop if it encounters two
3962 zero bytes. */
3963 #if defined HAVE_cmpmemsi
3965 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3966 rtx result;
3967 rtx insn;
3968 tree arg1 = CALL_EXPR_ARG (exp, 0);
3969 tree arg2 = CALL_EXPR_ARG (exp, 1);
3970 tree len = CALL_EXPR_ARG (exp, 2);
3972 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3973 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3974 machine_mode insn_mode;
3976 if (HAVE_cmpmemsi)
3977 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3978 else
3979 return NULL_RTX;
3981 /* If we don't have POINTER_TYPE, call the function. */
3982 if (arg1_align == 0 || arg2_align == 0)
3983 return NULL_RTX;
3985 /* Make a place to write the result of the instruction. */
3986 result = target;
3987 if (! (result != 0
3988 && REG_P (result) && GET_MODE (result) == insn_mode
3989 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3990 result = gen_reg_rtx (insn_mode);
3992 arg1_rtx = get_memory_rtx (arg1, len);
3993 arg2_rtx = get_memory_rtx (arg2, len);
3994 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3996 /* Set MEM_SIZE as appropriate. */
3997 if (CONST_INT_P (arg3_rtx))
3999 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4000 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4003 if (HAVE_cmpmemsi)
4004 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4005 GEN_INT (MIN (arg1_align, arg2_align)));
4006 else
4007 gcc_unreachable ();
4009 if (insn)
4010 emit_insn (insn);
4011 else
4012 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4013 TYPE_MODE (integer_type_node), 3,
4014 XEXP (arg1_rtx, 0), Pmode,
4015 XEXP (arg2_rtx, 0), Pmode,
4016 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4017 TYPE_UNSIGNED (sizetype)),
4018 TYPE_MODE (sizetype));
4020 /* Return the value in the proper mode for this function. */
4021 mode = TYPE_MODE (TREE_TYPE (exp));
4022 if (GET_MODE (result) == mode)
4023 return result;
4024 else if (target != 0)
4026 convert_move (target, result, 0);
4027 return target;
4029 else
4030 return convert_to_mode (mode, result, 0);
4032 #endif /* HAVE_cmpmemsi. */
4034 return NULL_RTX;
4037 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4038 if we failed the caller should emit a normal call, otherwise try to get
4039 the result in TARGET, if convenient. */
4041 static rtx
4042 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4044 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4045 return NULL_RTX;
4047 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4048 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4049 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4051 rtx arg1_rtx, arg2_rtx;
4052 rtx result, insn = NULL_RTX;
4053 tree fndecl, fn;
4054 tree arg1 = CALL_EXPR_ARG (exp, 0);
4055 tree arg2 = CALL_EXPR_ARG (exp, 1);
4057 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4058 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4060 /* If we don't have POINTER_TYPE, call the function. */
4061 if (arg1_align == 0 || arg2_align == 0)
4062 return NULL_RTX;
4064 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4065 arg1 = builtin_save_expr (arg1);
4066 arg2 = builtin_save_expr (arg2);
4068 arg1_rtx = get_memory_rtx (arg1, NULL);
4069 arg2_rtx = get_memory_rtx (arg2, NULL);
4071 #ifdef HAVE_cmpstrsi
4072 /* Try to call cmpstrsi. */
4073 if (HAVE_cmpstrsi)
4075 machine_mode insn_mode
4076 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4078 /* Make a place to write the result of the instruction. */
4079 result = target;
4080 if (! (result != 0
4081 && REG_P (result) && GET_MODE (result) == insn_mode
4082 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4083 result = gen_reg_rtx (insn_mode);
4085 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4086 GEN_INT (MIN (arg1_align, arg2_align)));
4088 #endif
4089 #ifdef HAVE_cmpstrnsi
4090 /* Try to determine at least one length and call cmpstrnsi. */
4091 if (!insn && HAVE_cmpstrnsi)
4093 tree len;
4094 rtx arg3_rtx;
4096 machine_mode insn_mode
4097 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4098 tree len1 = c_strlen (arg1, 1);
4099 tree len2 = c_strlen (arg2, 1);
4101 if (len1)
4102 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4103 if (len2)
4104 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4106 /* If we don't have a constant length for the first, use the length
4107 of the second, if we know it. We don't require a constant for
4108 this case; some cost analysis could be done if both are available
4109 but neither is constant. For now, assume they're equally cheap,
4110 unless one has side effects. If both strings have constant lengths,
4111 use the smaller. */
4113 if (!len1)
4114 len = len2;
4115 else if (!len2)
4116 len = len1;
4117 else if (TREE_SIDE_EFFECTS (len1))
4118 len = len2;
4119 else if (TREE_SIDE_EFFECTS (len2))
4120 len = len1;
4121 else if (TREE_CODE (len1) != INTEGER_CST)
4122 len = len2;
4123 else if (TREE_CODE (len2) != INTEGER_CST)
4124 len = len1;
4125 else if (tree_int_cst_lt (len1, len2))
4126 len = len1;
4127 else
4128 len = len2;
4130 /* If both arguments have side effects, we cannot optimize. */
4131 if (!len || TREE_SIDE_EFFECTS (len))
4132 goto do_libcall;
4134 arg3_rtx = expand_normal (len);
4136 /* Make a place to write the result of the instruction. */
4137 result = target;
4138 if (! (result != 0
4139 && REG_P (result) && GET_MODE (result) == insn_mode
4140 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4141 result = gen_reg_rtx (insn_mode);
4143 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4144 GEN_INT (MIN (arg1_align, arg2_align)));
4146 #endif
4148 if (insn)
4150 machine_mode mode;
4151 emit_insn (insn);
4153 /* Return the value in the proper mode for this function. */
4154 mode = TYPE_MODE (TREE_TYPE (exp));
4155 if (GET_MODE (result) == mode)
4156 return result;
4157 if (target == 0)
4158 return convert_to_mode (mode, result, 0);
4159 convert_move (target, result, 0);
4160 return target;
4163 /* Expand the library call ourselves using a stabilized argument
4164 list to avoid re-evaluating the function's arguments twice. */
4165 #ifdef HAVE_cmpstrnsi
4166 do_libcall:
4167 #endif
4168 fndecl = get_callee_fndecl (exp);
4169 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4170 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4171 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4172 return expand_call (fn, target, target == const0_rtx);
4174 #endif
4175 return NULL_RTX;
4178 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4179 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4180 the result in TARGET, if convenient. */
4182 static rtx
4183 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4184 ATTRIBUTE_UNUSED machine_mode mode)
4186 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4188 if (!validate_arglist (exp,
4189 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4190 return NULL_RTX;
4192 /* If c_strlen can determine an expression for one of the string
4193 lengths, and it doesn't have side effects, then emit cmpstrnsi
4194 using length MIN(strlen(string)+1, arg3). */
4195 #ifdef HAVE_cmpstrnsi
4196 if (HAVE_cmpstrnsi)
4198 tree len, len1, len2;
4199 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4200 rtx result, insn;
4201 tree fndecl, fn;
4202 tree arg1 = CALL_EXPR_ARG (exp, 0);
4203 tree arg2 = CALL_EXPR_ARG (exp, 1);
4204 tree arg3 = CALL_EXPR_ARG (exp, 2);
4206 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4207 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4208 machine_mode insn_mode
4209 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4211 len1 = c_strlen (arg1, 1);
4212 len2 = c_strlen (arg2, 1);
4214 if (len1)
4215 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4216 if (len2)
4217 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4219 /* If we don't have a constant length for the first, use the length
4220 of the second, if we know it. We don't require a constant for
4221 this case; some cost analysis could be done if both are available
4222 but neither is constant. For now, assume they're equally cheap,
4223 unless one has side effects. If both strings have constant lengths,
4224 use the smaller. */
4226 if (!len1)
4227 len = len2;
4228 else if (!len2)
4229 len = len1;
4230 else if (TREE_SIDE_EFFECTS (len1))
4231 len = len2;
4232 else if (TREE_SIDE_EFFECTS (len2))
4233 len = len1;
4234 else if (TREE_CODE (len1) != INTEGER_CST)
4235 len = len2;
4236 else if (TREE_CODE (len2) != INTEGER_CST)
4237 len = len1;
4238 else if (tree_int_cst_lt (len1, len2))
4239 len = len1;
4240 else
4241 len = len2;
4243 /* If both arguments have side effects, we cannot optimize. */
4244 if (!len || TREE_SIDE_EFFECTS (len))
4245 return NULL_RTX;
4247 /* The actual new length parameter is MIN(len,arg3). */
4248 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4249 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4251 /* If we don't have POINTER_TYPE, call the function. */
4252 if (arg1_align == 0 || arg2_align == 0)
4253 return NULL_RTX;
4255 /* Make a place to write the result of the instruction. */
4256 result = target;
4257 if (! (result != 0
4258 && REG_P (result) && GET_MODE (result) == insn_mode
4259 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4260 result = gen_reg_rtx (insn_mode);
4262 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4263 arg1 = builtin_save_expr (arg1);
4264 arg2 = builtin_save_expr (arg2);
4265 len = builtin_save_expr (len);
4267 arg1_rtx = get_memory_rtx (arg1, len);
4268 arg2_rtx = get_memory_rtx (arg2, len);
4269 arg3_rtx = expand_normal (len);
4270 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4271 GEN_INT (MIN (arg1_align, arg2_align)));
4272 if (insn)
4274 emit_insn (insn);
4276 /* Return the value in the proper mode for this function. */
4277 mode = TYPE_MODE (TREE_TYPE (exp));
4278 if (GET_MODE (result) == mode)
4279 return result;
4280 if (target == 0)
4281 return convert_to_mode (mode, result, 0);
4282 convert_move (target, result, 0);
4283 return target;
4286 /* Expand the library call ourselves using a stabilized argument
4287 list to avoid re-evaluating the function's arguments twice. */
4288 fndecl = get_callee_fndecl (exp);
4289 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4290 arg1, arg2, len);
4291 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4292 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4293 return expand_call (fn, target, target == const0_rtx);
4295 #endif
4296 return NULL_RTX;
4299 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4300 if that's convenient. */
4303 expand_builtin_saveregs (void)
4305 rtx val;
4306 rtx_insn *seq;
4308 /* Don't do __builtin_saveregs more than once in a function.
4309 Save the result of the first call and reuse it. */
4310 if (saveregs_value != 0)
4311 return saveregs_value;
4313 /* When this function is called, it means that registers must be
4314 saved on entry to this function. So we migrate the call to the
4315 first insn of this function. */
4317 start_sequence ();
4319 /* Do whatever the machine needs done in this case. */
4320 val = targetm.calls.expand_builtin_saveregs ();
4322 seq = get_insns ();
4323 end_sequence ();
4325 saveregs_value = val;
4327 /* Put the insns after the NOTE that starts the function. If this
4328 is inside a start_sequence, make the outer-level insn chain current, so
4329 the code is placed at the start of the function. */
4330 push_topmost_sequence ();
4331 emit_insn_after (seq, entry_of_function ());
4332 pop_topmost_sequence ();
4334 return val;
4337 /* Expand a call to __builtin_next_arg. */
4339 static rtx
4340 expand_builtin_next_arg (void)
4342 /* Checking arguments is already done in fold_builtin_next_arg
4343 that must be called before this function. */
4344 return expand_binop (ptr_mode, add_optab,
4345 crtl->args.internal_arg_pointer,
4346 crtl->args.arg_offset_rtx,
4347 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4350 /* Make it easier for the backends by protecting the valist argument
4351 from multiple evaluations. */
4353 static tree
4354 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4356 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4358 /* The current way of determining the type of valist is completely
4359 bogus. We should have the information on the va builtin instead. */
4360 if (!vatype)
4361 vatype = targetm.fn_abi_va_list (cfun->decl);
4363 if (TREE_CODE (vatype) == ARRAY_TYPE)
4365 if (TREE_SIDE_EFFECTS (valist))
4366 valist = save_expr (valist);
4368 /* For this case, the backends will be expecting a pointer to
4369 vatype, but it's possible we've actually been given an array
4370 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4371 So fix it. */
4372 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4374 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4375 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4378 else
4380 tree pt = build_pointer_type (vatype);
4382 if (! needs_lvalue)
4384 if (! TREE_SIDE_EFFECTS (valist))
4385 return valist;
4387 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4388 TREE_SIDE_EFFECTS (valist) = 1;
4391 if (TREE_SIDE_EFFECTS (valist))
4392 valist = save_expr (valist);
4393 valist = fold_build2_loc (loc, MEM_REF,
4394 vatype, valist, build_int_cst (pt, 0));
4397 return valist;
4400 /* The "standard" definition of va_list is void*. */
4402 tree
4403 std_build_builtin_va_list (void)
4405 return ptr_type_node;
4408 /* The "standard" abi va_list is va_list_type_node. */
4410 tree
4411 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4413 return va_list_type_node;
4416 /* The "standard" type of va_list is va_list_type_node. */
4418 tree
4419 std_canonical_va_list_type (tree type)
4421 tree wtype, htype;
4423 if (INDIRECT_REF_P (type))
4424 type = TREE_TYPE (type);
4425 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4426 type = TREE_TYPE (type);
4427 wtype = va_list_type_node;
4428 htype = type;
4429 /* Treat structure va_list types. */
4430 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4431 htype = TREE_TYPE (htype);
4432 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4434 /* If va_list is an array type, the argument may have decayed
4435 to a pointer type, e.g. by being passed to another function.
4436 In that case, unwrap both types so that we can compare the
4437 underlying records. */
4438 if (TREE_CODE (htype) == ARRAY_TYPE
4439 || POINTER_TYPE_P (htype))
4441 wtype = TREE_TYPE (wtype);
4442 htype = TREE_TYPE (htype);
4445 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4446 return va_list_type_node;
4448 return NULL_TREE;
4451 /* The "standard" implementation of va_start: just assign `nextarg' to
4452 the variable. */
4454 void
4455 std_expand_builtin_va_start (tree valist, rtx nextarg)
4457 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4458 convert_move (va_r, nextarg, 0);
4460 /* We do not have any valid bounds for the pointer, so
4461 just store zero bounds for it. */
4462 if (chkp_function_instrumented_p (current_function_decl))
4463 chkp_expand_bounds_reset_for_mem (valist,
4464 make_tree (TREE_TYPE (valist),
4465 nextarg));
4468 /* Expand EXP, a call to __builtin_va_start. */
4470 static rtx
4471 expand_builtin_va_start (tree exp)
4473 rtx nextarg;
4474 tree valist;
4475 location_t loc = EXPR_LOCATION (exp);
4477 if (call_expr_nargs (exp) < 2)
4479 error_at (loc, "too few arguments to function %<va_start%>");
4480 return const0_rtx;
4483 if (fold_builtin_next_arg (exp, true))
4484 return const0_rtx;
4486 nextarg = expand_builtin_next_arg ();
4487 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4489 if (targetm.expand_builtin_va_start)
4490 targetm.expand_builtin_va_start (valist, nextarg);
4491 else
4492 std_expand_builtin_va_start (valist, nextarg);
4494 return const0_rtx;
4497 /* Expand EXP, a call to __builtin_va_end. */
4499 static rtx
4500 expand_builtin_va_end (tree exp)
4502 tree valist = CALL_EXPR_ARG (exp, 0);
4504 /* Evaluate for side effects, if needed. I hate macros that don't
4505 do that. */
4506 if (TREE_SIDE_EFFECTS (valist))
4507 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4509 return const0_rtx;
4512 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4513 builtin rather than just as an assignment in stdarg.h because of the
4514 nastiness of array-type va_list types. */
4516 static rtx
4517 expand_builtin_va_copy (tree exp)
4519 tree dst, src, t;
4520 location_t loc = EXPR_LOCATION (exp);
4522 dst = CALL_EXPR_ARG (exp, 0);
4523 src = CALL_EXPR_ARG (exp, 1);
4525 dst = stabilize_va_list_loc (loc, dst, 1);
4526 src = stabilize_va_list_loc (loc, src, 0);
4528 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4530 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4532 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4533 TREE_SIDE_EFFECTS (t) = 1;
4534 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4536 else
4538 rtx dstb, srcb, size;
4540 /* Evaluate to pointers. */
4541 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4542 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4543 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4544 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4546 dstb = convert_memory_address (Pmode, dstb);
4547 srcb = convert_memory_address (Pmode, srcb);
4549 /* "Dereference" to BLKmode memories. */
4550 dstb = gen_rtx_MEM (BLKmode, dstb);
4551 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4552 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4553 srcb = gen_rtx_MEM (BLKmode, srcb);
4554 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4555 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4557 /* Copy. */
4558 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4561 return const0_rtx;
4564 /* Expand a call to one of the builtin functions __builtin_frame_address or
4565 __builtin_return_address. */
4567 static rtx
4568 expand_builtin_frame_address (tree fndecl, tree exp)
4570 /* The argument must be a nonnegative integer constant.
4571 It counts the number of frames to scan up the stack.
4572 The value is the return address saved in that frame. */
4573 if (call_expr_nargs (exp) == 0)
4574 /* Warning about missing arg was already issued. */
4575 return const0_rtx;
4576 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4578 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4579 error ("invalid argument to %<__builtin_frame_address%>");
4580 else
4581 error ("invalid argument to %<__builtin_return_address%>");
4582 return const0_rtx;
4584 else
4586 rtx tem
4587 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4588 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4590 /* Some ports cannot access arbitrary stack frames. */
4591 if (tem == NULL)
4593 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4594 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4595 else
4596 warning (0, "unsupported argument to %<__builtin_return_address%>");
4597 return const0_rtx;
4600 /* For __builtin_frame_address, return what we've got. */
4601 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4602 return tem;
4604 if (!REG_P (tem)
4605 && ! CONSTANT_P (tem))
4606 tem = copy_addr_to_reg (tem);
4607 return tem;
4611 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4612 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4613 is the same as for allocate_dynamic_stack_space. */
4615 static rtx
4616 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4618 rtx op0;
4619 rtx result;
4620 bool valid_arglist;
4621 unsigned int align;
4622 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4623 == BUILT_IN_ALLOCA_WITH_ALIGN);
4625 valid_arglist
4626 = (alloca_with_align
4627 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4628 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4630 if (!valid_arglist)
4631 return NULL_RTX;
4633 /* Compute the argument. */
4634 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4636 /* Compute the alignment. */
4637 align = (alloca_with_align
4638 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4639 : BIGGEST_ALIGNMENT);
4641 /* Allocate the desired space. */
4642 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4643 result = convert_memory_address (ptr_mode, result);
4645 return result;
4648 /* Expand a call to bswap builtin in EXP.
4649 Return NULL_RTX if a normal call should be emitted rather than expanding the
4650 function in-line. If convenient, the result should be placed in TARGET.
4651 SUBTARGET may be used as the target for computing one of EXP's operands. */
4653 static rtx
4654 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4655 rtx subtarget)
4657 tree arg;
4658 rtx op0;
4660 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4661 return NULL_RTX;
4663 arg = CALL_EXPR_ARG (exp, 0);
4664 op0 = expand_expr (arg,
4665 subtarget && GET_MODE (subtarget) == target_mode
4666 ? subtarget : NULL_RTX,
4667 target_mode, EXPAND_NORMAL);
4668 if (GET_MODE (op0) != target_mode)
4669 op0 = convert_to_mode (target_mode, op0, 1);
4671 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4673 gcc_assert (target);
4675 return convert_to_mode (target_mode, target, 1);
4678 /* Expand a call to a unary builtin in EXP.
4679 Return NULL_RTX if a normal call should be emitted rather than expanding the
4680 function in-line. If convenient, the result should be placed in TARGET.
4681 SUBTARGET may be used as the target for computing one of EXP's operands. */
4683 static rtx
4684 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4685 rtx subtarget, optab op_optab)
4687 rtx op0;
4689 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4690 return NULL_RTX;
4692 /* Compute the argument. */
4693 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4694 (subtarget
4695 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4696 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4697 VOIDmode, EXPAND_NORMAL);
4698 /* Compute op, into TARGET if possible.
4699 Set TARGET to wherever the result comes back. */
4700 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4701 op_optab, op0, target, op_optab != clrsb_optab);
4702 gcc_assert (target);
4704 return convert_to_mode (target_mode, target, 0);
4707 /* Expand a call to __builtin_expect. We just return our argument
4708 as the builtin_expect semantic should've been already executed by
4709 tree branch prediction pass. */
4711 static rtx
4712 expand_builtin_expect (tree exp, rtx target)
4714 tree arg;
4716 if (call_expr_nargs (exp) < 2)
4717 return const0_rtx;
4718 arg = CALL_EXPR_ARG (exp, 0);
4720 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4721 /* When guessing was done, the hints should be already stripped away. */
4722 gcc_assert (!flag_guess_branch_prob
4723 || optimize == 0 || seen_error ());
4724 return target;
4727 /* Expand a call to __builtin_assume_aligned. We just return our first
4728 argument as the builtin_assume_aligned semantic should've been already
4729 executed by CCP. */
4731 static rtx
4732 expand_builtin_assume_aligned (tree exp, rtx target)
4734 if (call_expr_nargs (exp) < 2)
4735 return const0_rtx;
4736 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4737 EXPAND_NORMAL);
4738 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4739 && (call_expr_nargs (exp) < 3
4740 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4741 return target;
4744 void
4745 expand_builtin_trap (void)
4747 #ifdef HAVE_trap
4748 if (HAVE_trap)
4750 rtx insn = emit_insn (gen_trap ());
4751 /* For trap insns when not accumulating outgoing args force
4752 REG_ARGS_SIZE note to prevent crossjumping of calls with
4753 different args sizes. */
4754 if (!ACCUMULATE_OUTGOING_ARGS)
4755 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4757 else
4758 #endif
4759 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4760 emit_barrier ();
4763 /* Expand a call to __builtin_unreachable. We do nothing except emit
4764 a barrier saying that control flow will not pass here.
4766 It is the responsibility of the program being compiled to ensure
4767 that control flow does never reach __builtin_unreachable. */
4768 static void
4769 expand_builtin_unreachable (void)
4771 emit_barrier ();
4774 /* Expand EXP, a call to fabs, fabsf or fabsl.
4775 Return NULL_RTX if a normal call should be emitted rather than expanding
4776 the function inline. If convenient, the result should be placed
4777 in TARGET. SUBTARGET may be used as the target for computing
4778 the operand. */
4780 static rtx
4781 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4783 machine_mode mode;
4784 tree arg;
4785 rtx op0;
4787 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4788 return NULL_RTX;
4790 arg = CALL_EXPR_ARG (exp, 0);
4791 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4792 mode = TYPE_MODE (TREE_TYPE (arg));
4793 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4794 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4797 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4798 Return NULL is a normal call should be emitted rather than expanding the
4799 function inline. If convenient, the result should be placed in TARGET.
4800 SUBTARGET may be used as the target for computing the operand. */
4802 static rtx
4803 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4805 rtx op0, op1;
4806 tree arg;
4808 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4809 return NULL_RTX;
4811 arg = CALL_EXPR_ARG (exp, 0);
4812 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4814 arg = CALL_EXPR_ARG (exp, 1);
4815 op1 = expand_normal (arg);
4817 return expand_copysign (op0, op1, target);
4820 /* Expand a call to __builtin___clear_cache. */
4822 static rtx
4823 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4825 #ifndef HAVE_clear_cache
4826 #ifdef CLEAR_INSN_CACHE
4827 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4828 does something. Just do the default expansion to a call to
4829 __clear_cache(). */
4830 return NULL_RTX;
4831 #else
4832 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4833 does nothing. There is no need to call it. Do nothing. */
4834 return const0_rtx;
4835 #endif /* CLEAR_INSN_CACHE */
4836 #else
4837 /* We have a "clear_cache" insn, and it will handle everything. */
4838 tree begin, end;
4839 rtx begin_rtx, end_rtx;
4841 /* We must not expand to a library call. If we did, any
4842 fallback library function in libgcc that might contain a call to
4843 __builtin___clear_cache() would recurse infinitely. */
4844 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4846 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4847 return const0_rtx;
4850 if (HAVE_clear_cache)
4852 struct expand_operand ops[2];
4854 begin = CALL_EXPR_ARG (exp, 0);
4855 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4857 end = CALL_EXPR_ARG (exp, 1);
4858 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4860 create_address_operand (&ops[0], begin_rtx);
4861 create_address_operand (&ops[1], end_rtx);
4862 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4863 return const0_rtx;
4865 return const0_rtx;
4866 #endif /* HAVE_clear_cache */
4869 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4871 static rtx
4872 round_trampoline_addr (rtx tramp)
4874 rtx temp, addend, mask;
4876 /* If we don't need too much alignment, we'll have been guaranteed
4877 proper alignment by get_trampoline_type. */
4878 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4879 return tramp;
4881 /* Round address up to desired boundary. */
4882 temp = gen_reg_rtx (Pmode);
4883 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4884 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4886 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4887 temp, 0, OPTAB_LIB_WIDEN);
4888 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4889 temp, 0, OPTAB_LIB_WIDEN);
4891 return tramp;
4894 static rtx
4895 expand_builtin_init_trampoline (tree exp, bool onstack)
4897 tree t_tramp, t_func, t_chain;
4898 rtx m_tramp, r_tramp, r_chain, tmp;
4900 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4901 POINTER_TYPE, VOID_TYPE))
4902 return NULL_RTX;
4904 t_tramp = CALL_EXPR_ARG (exp, 0);
4905 t_func = CALL_EXPR_ARG (exp, 1);
4906 t_chain = CALL_EXPR_ARG (exp, 2);
4908 r_tramp = expand_normal (t_tramp);
4909 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4910 MEM_NOTRAP_P (m_tramp) = 1;
4912 /* If ONSTACK, the TRAMP argument should be the address of a field
4913 within the local function's FRAME decl. Either way, let's see if
4914 we can fill in the MEM_ATTRs for this memory. */
4915 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4916 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4918 /* Creator of a heap trampoline is responsible for making sure the
4919 address is aligned to at least STACK_BOUNDARY. Normally malloc
4920 will ensure this anyhow. */
4921 tmp = round_trampoline_addr (r_tramp);
4922 if (tmp != r_tramp)
4924 m_tramp = change_address (m_tramp, BLKmode, tmp);
4925 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4926 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4929 /* The FUNC argument should be the address of the nested function.
4930 Extract the actual function decl to pass to the hook. */
4931 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4932 t_func = TREE_OPERAND (t_func, 0);
4933 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4935 r_chain = expand_normal (t_chain);
4937 /* Generate insns to initialize the trampoline. */
4938 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4940 if (onstack)
4942 trampolines_created = 1;
4944 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4945 "trampoline generated for nested function %qD", t_func);
4948 return const0_rtx;
4951 static rtx
4952 expand_builtin_adjust_trampoline (tree exp)
4954 rtx tramp;
4956 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4957 return NULL_RTX;
4959 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4960 tramp = round_trampoline_addr (tramp);
4961 if (targetm.calls.trampoline_adjust_address)
4962 tramp = targetm.calls.trampoline_adjust_address (tramp);
4964 return tramp;
4967 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4968 function. The function first checks whether the back end provides
4969 an insn to implement signbit for the respective mode. If not, it
4970 checks whether the floating point format of the value is such that
4971 the sign bit can be extracted. If that is not the case, the
4972 function returns NULL_RTX to indicate that a normal call should be
4973 emitted rather than expanding the function in-line. EXP is the
4974 expression that is a call to the builtin function; if convenient,
4975 the result should be placed in TARGET. */
4976 static rtx
4977 expand_builtin_signbit (tree exp, rtx target)
4979 const struct real_format *fmt;
4980 machine_mode fmode, imode, rmode;
4981 tree arg;
4982 int word, bitpos;
4983 enum insn_code icode;
4984 rtx temp;
4985 location_t loc = EXPR_LOCATION (exp);
4987 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4988 return NULL_RTX;
4990 arg = CALL_EXPR_ARG (exp, 0);
4991 fmode = TYPE_MODE (TREE_TYPE (arg));
4992 rmode = TYPE_MODE (TREE_TYPE (exp));
4993 fmt = REAL_MODE_FORMAT (fmode);
4995 arg = builtin_save_expr (arg);
4997 /* Expand the argument yielding a RTX expression. */
4998 temp = expand_normal (arg);
5000 /* Check if the back end provides an insn that handles signbit for the
5001 argument's mode. */
5002 icode = optab_handler (signbit_optab, fmode);
5003 if (icode != CODE_FOR_nothing)
5005 rtx_insn *last = get_last_insn ();
5006 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5007 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5008 return target;
5009 delete_insns_since (last);
5012 /* For floating point formats without a sign bit, implement signbit
5013 as "ARG < 0.0". */
5014 bitpos = fmt->signbit_ro;
5015 if (bitpos < 0)
5017 /* But we can't do this if the format supports signed zero. */
5018 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5019 return NULL_RTX;
5021 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5022 build_real (TREE_TYPE (arg), dconst0));
5023 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5026 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5028 imode = int_mode_for_mode (fmode);
5029 if (imode == BLKmode)
5030 return NULL_RTX;
5031 temp = gen_lowpart (imode, temp);
5033 else
5035 imode = word_mode;
5036 /* Handle targets with different FP word orders. */
5037 if (FLOAT_WORDS_BIG_ENDIAN)
5038 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5039 else
5040 word = bitpos / BITS_PER_WORD;
5041 temp = operand_subword_force (temp, word, fmode);
5042 bitpos = bitpos % BITS_PER_WORD;
5045 /* Force the intermediate word_mode (or narrower) result into a
5046 register. This avoids attempting to create paradoxical SUBREGs
5047 of floating point modes below. */
5048 temp = force_reg (imode, temp);
5050 /* If the bitpos is within the "result mode" lowpart, the operation
5051 can be implement with a single bitwise AND. Otherwise, we need
5052 a right shift and an AND. */
5054 if (bitpos < GET_MODE_BITSIZE (rmode))
5056 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5058 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5059 temp = gen_lowpart (rmode, temp);
5060 temp = expand_binop (rmode, and_optab, temp,
5061 immed_wide_int_const (mask, rmode),
5062 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5064 else
5066 /* Perform a logical right shift to place the signbit in the least
5067 significant bit, then truncate the result to the desired mode
5068 and mask just this bit. */
5069 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5070 temp = gen_lowpart (rmode, temp);
5071 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5072 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5075 return temp;
5078 /* Expand fork or exec calls. TARGET is the desired target of the
5079 call. EXP is the call. FN is the
5080 identificator of the actual function. IGNORE is nonzero if the
5081 value is to be ignored. */
5083 static rtx
5084 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5086 tree id, decl;
5087 tree call;
5089 /* If we are not profiling, just call the function. */
5090 if (!profile_arc_flag)
5091 return NULL_RTX;
5093 /* Otherwise call the wrapper. This should be equivalent for the rest of
5094 compiler, so the code does not diverge, and the wrapper may run the
5095 code necessary for keeping the profiling sane. */
5097 switch (DECL_FUNCTION_CODE (fn))
5099 case BUILT_IN_FORK:
5100 id = get_identifier ("__gcov_fork");
5101 break;
5103 case BUILT_IN_EXECL:
5104 id = get_identifier ("__gcov_execl");
5105 break;
5107 case BUILT_IN_EXECV:
5108 id = get_identifier ("__gcov_execv");
5109 break;
5111 case BUILT_IN_EXECLP:
5112 id = get_identifier ("__gcov_execlp");
5113 break;
5115 case BUILT_IN_EXECLE:
5116 id = get_identifier ("__gcov_execle");
5117 break;
5119 case BUILT_IN_EXECVP:
5120 id = get_identifier ("__gcov_execvp");
5121 break;
5123 case BUILT_IN_EXECVE:
5124 id = get_identifier ("__gcov_execve");
5125 break;
5127 default:
5128 gcc_unreachable ();
5131 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5132 FUNCTION_DECL, id, TREE_TYPE (fn));
5133 DECL_EXTERNAL (decl) = 1;
5134 TREE_PUBLIC (decl) = 1;
5135 DECL_ARTIFICIAL (decl) = 1;
5136 TREE_NOTHROW (decl) = 1;
5137 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5138 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5139 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5140 return expand_call (call, target, ignore);
5145 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5146 the pointer in these functions is void*, the tree optimizers may remove
5147 casts. The mode computed in expand_builtin isn't reliable either, due
5148 to __sync_bool_compare_and_swap.
5150 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5151 group of builtins. This gives us log2 of the mode size. */
5153 static inline machine_mode
5154 get_builtin_sync_mode (int fcode_diff)
5156 /* The size is not negotiable, so ask not to get BLKmode in return
5157 if the target indicates that a smaller size would be better. */
5158 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5161 /* Expand the memory expression LOC and return the appropriate memory operand
5162 for the builtin_sync operations. */
5164 static rtx
5165 get_builtin_sync_mem (tree loc, machine_mode mode)
5167 rtx addr, mem;
5169 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5170 addr = convert_memory_address (Pmode, addr);
5172 /* Note that we explicitly do not want any alias information for this
5173 memory, so that we kill all other live memories. Otherwise we don't
5174 satisfy the full barrier semantics of the intrinsic. */
5175 mem = validize_mem (gen_rtx_MEM (mode, addr));
5177 /* The alignment needs to be at least according to that of the mode. */
5178 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5179 get_pointer_alignment (loc)));
5180 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5181 MEM_VOLATILE_P (mem) = 1;
5183 return mem;
5186 /* Make sure an argument is in the right mode.
5187 EXP is the tree argument.
5188 MODE is the mode it should be in. */
5190 static rtx
5191 expand_expr_force_mode (tree exp, machine_mode mode)
5193 rtx val;
5194 machine_mode old_mode;
5196 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5197 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5198 of CONST_INTs, where we know the old_mode only from the call argument. */
5200 old_mode = GET_MODE (val);
5201 if (old_mode == VOIDmode)
5202 old_mode = TYPE_MODE (TREE_TYPE (exp));
5203 val = convert_modes (mode, old_mode, val, 1);
5204 return val;
5208 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5209 EXP is the CALL_EXPR. CODE is the rtx code
5210 that corresponds to the arithmetic or logical operation from the name;
5211 an exception here is that NOT actually means NAND. TARGET is an optional
5212 place for us to store the results; AFTER is true if this is the
5213 fetch_and_xxx form. */
5215 static rtx
5216 expand_builtin_sync_operation (machine_mode mode, tree exp,
5217 enum rtx_code code, bool after,
5218 rtx target)
5220 rtx val, mem;
5221 location_t loc = EXPR_LOCATION (exp);
5223 if (code == NOT && warn_sync_nand)
5225 tree fndecl = get_callee_fndecl (exp);
5226 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5228 static bool warned_f_a_n, warned_n_a_f;
5230 switch (fcode)
5232 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5233 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5234 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5235 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5236 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5237 if (warned_f_a_n)
5238 break;
5240 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5241 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5242 warned_f_a_n = true;
5243 break;
5245 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5246 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5247 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5248 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5249 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5250 if (warned_n_a_f)
5251 break;
5253 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5254 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5255 warned_n_a_f = true;
5256 break;
5258 default:
5259 gcc_unreachable ();
5263 /* Expand the operands. */
5264 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5265 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5267 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5268 after);
5271 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5272 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5273 true if this is the boolean form. TARGET is a place for us to store the
5274 results; this is NOT optional if IS_BOOL is true. */
5276 static rtx
5277 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5278 bool is_bool, rtx target)
5280 rtx old_val, new_val, mem;
5281 rtx *pbool, *poval;
5283 /* Expand the operands. */
5284 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5285 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5286 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5288 pbool = poval = NULL;
5289 if (target != const0_rtx)
5291 if (is_bool)
5292 pbool = &target;
5293 else
5294 poval = &target;
5296 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5297 false, MEMMODEL_SEQ_CST,
5298 MEMMODEL_SEQ_CST))
5299 return NULL_RTX;
5301 return target;
5304 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5305 general form is actually an atomic exchange, and some targets only
5306 support a reduced form with the second argument being a constant 1.
5307 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5308 the results. */
5310 static rtx
5311 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5312 rtx target)
5314 rtx val, mem;
5316 /* Expand the operands. */
5317 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5318 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5320 return expand_sync_lock_test_and_set (target, mem, val);
5323 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5325 static void
5326 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5328 rtx mem;
5330 /* Expand the operands. */
5331 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5333 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5336 /* Given an integer representing an ``enum memmodel'', verify its
5337 correctness and return the memory model enum. */
5339 static enum memmodel
5340 get_memmodel (tree exp)
5342 rtx op;
5343 unsigned HOST_WIDE_INT val;
5345 /* If the parameter is not a constant, it's a run time value so we'll just
5346 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5347 if (TREE_CODE (exp) != INTEGER_CST)
5348 return MEMMODEL_SEQ_CST;
5350 op = expand_normal (exp);
5352 val = INTVAL (op);
5353 if (targetm.memmodel_check)
5354 val = targetm.memmodel_check (val);
5355 else if (val & ~MEMMODEL_MASK)
5357 warning (OPT_Winvalid_memory_model,
5358 "Unknown architecture specifier in memory model to builtin.");
5359 return MEMMODEL_SEQ_CST;
5362 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5364 warning (OPT_Winvalid_memory_model,
5365 "invalid memory model argument to builtin");
5366 return MEMMODEL_SEQ_CST;
5369 return (enum memmodel) val;
5372 /* Expand the __atomic_exchange intrinsic:
5373 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5374 EXP is the CALL_EXPR.
5375 TARGET is an optional place for us to store the results. */
5377 static rtx
5378 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5380 rtx val, mem;
5381 enum memmodel model;
5383 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5384 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5386 error ("invalid memory model for %<__atomic_exchange%>");
5387 return NULL_RTX;
5390 if (!flag_inline_atomics)
5391 return NULL_RTX;
5393 /* Expand the operands. */
5394 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5395 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5397 return expand_atomic_exchange (target, mem, val, model);
5400 /* Expand the __atomic_compare_exchange intrinsic:
5401 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5402 TYPE desired, BOOL weak,
5403 enum memmodel success,
5404 enum memmodel failure)
5405 EXP is the CALL_EXPR.
5406 TARGET is an optional place for us to store the results. */
5408 static rtx
5409 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5410 rtx target)
5412 rtx expect, desired, mem, oldval;
5413 rtx_code_label *label;
5414 enum memmodel success, failure;
5415 tree weak;
5416 bool is_weak;
5418 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5419 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5421 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5422 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5424 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5425 return NULL_RTX;
5428 if (failure > success)
5430 error ("failure memory model cannot be stronger than success "
5431 "memory model for %<__atomic_compare_exchange%>");
5432 return NULL_RTX;
5435 if (!flag_inline_atomics)
5436 return NULL_RTX;
5438 /* Expand the operands. */
5439 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5441 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5442 expect = convert_memory_address (Pmode, expect);
5443 expect = gen_rtx_MEM (mode, expect);
5444 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5446 weak = CALL_EXPR_ARG (exp, 3);
5447 is_weak = false;
5448 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5449 is_weak = true;
5451 if (target == const0_rtx)
5452 target = NULL;
5454 /* Lest the rtl backend create a race condition with an imporoper store
5455 to memory, always create a new pseudo for OLDVAL. */
5456 oldval = NULL;
5458 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5459 is_weak, success, failure))
5460 return NULL_RTX;
5462 /* Conditionally store back to EXPECT, lest we create a race condition
5463 with an improper store to memory. */
5464 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5465 the normal case where EXPECT is totally private, i.e. a register. At
5466 which point the store can be unconditional. */
5467 label = gen_label_rtx ();
5468 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5469 emit_move_insn (expect, oldval);
5470 emit_label (label);
5472 return target;
5475 /* Expand the __atomic_load intrinsic:
5476 TYPE __atomic_load (TYPE *object, enum memmodel)
5477 EXP is the CALL_EXPR.
5478 TARGET is an optional place for us to store the results. */
5480 static rtx
5481 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5483 rtx mem;
5484 enum memmodel model;
5486 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5487 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5488 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5490 error ("invalid memory model for %<__atomic_load%>");
5491 return NULL_RTX;
5494 if (!flag_inline_atomics)
5495 return NULL_RTX;
5497 /* Expand the operand. */
5498 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5500 return expand_atomic_load (target, mem, model);
5504 /* Expand the __atomic_store intrinsic:
5505 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5506 EXP is the CALL_EXPR.
5507 TARGET is an optional place for us to store the results. */
5509 static rtx
5510 expand_builtin_atomic_store (machine_mode mode, tree exp)
5512 rtx mem, val;
5513 enum memmodel model;
5515 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5516 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5517 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5518 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5520 error ("invalid memory model for %<__atomic_store%>");
5521 return NULL_RTX;
5524 if (!flag_inline_atomics)
5525 return NULL_RTX;
5527 /* Expand the operands. */
5528 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5529 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5531 return expand_atomic_store (mem, val, model, false);
5534 /* Expand the __atomic_fetch_XXX intrinsic:
5535 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5536 EXP is the CALL_EXPR.
5537 TARGET is an optional place for us to store the results.
5538 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5539 FETCH_AFTER is true if returning the result of the operation.
5540 FETCH_AFTER is false if returning the value before the operation.
5541 IGNORE is true if the result is not used.
5542 EXT_CALL is the correct builtin for an external call if this cannot be
5543 resolved to an instruction sequence. */
5545 static rtx
5546 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5547 enum rtx_code code, bool fetch_after,
5548 bool ignore, enum built_in_function ext_call)
5550 rtx val, mem, ret;
5551 enum memmodel model;
5552 tree fndecl;
5553 tree addr;
5555 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5557 /* Expand the operands. */
5558 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5559 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5561 /* Only try generating instructions if inlining is turned on. */
5562 if (flag_inline_atomics)
5564 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5565 if (ret)
5566 return ret;
5569 /* Return if a different routine isn't needed for the library call. */
5570 if (ext_call == BUILT_IN_NONE)
5571 return NULL_RTX;
5573 /* Change the call to the specified function. */
5574 fndecl = get_callee_fndecl (exp);
5575 addr = CALL_EXPR_FN (exp);
5576 STRIP_NOPS (addr);
5578 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5579 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5581 /* Expand the call here so we can emit trailing code. */
5582 ret = expand_call (exp, target, ignore);
5584 /* Replace the original function just in case it matters. */
5585 TREE_OPERAND (addr, 0) = fndecl;
5587 /* Then issue the arithmetic correction to return the right result. */
5588 if (!ignore)
5590 if (code == NOT)
5592 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5593 OPTAB_LIB_WIDEN);
5594 ret = expand_simple_unop (mode, NOT, ret, target, true);
5596 else
5597 ret = expand_simple_binop (mode, code, ret, val, target, true,
5598 OPTAB_LIB_WIDEN);
5600 return ret;
5604 #ifndef HAVE_atomic_clear
5605 # define HAVE_atomic_clear 0
5606 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5607 #endif
5609 /* Expand an atomic clear operation.
5610 void _atomic_clear (BOOL *obj, enum memmodel)
5611 EXP is the call expression. */
5613 static rtx
5614 expand_builtin_atomic_clear (tree exp)
5616 machine_mode mode;
5617 rtx mem, ret;
5618 enum memmodel model;
5620 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5621 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5622 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5624 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5625 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5627 error ("invalid memory model for %<__atomic_store%>");
5628 return const0_rtx;
5631 if (HAVE_atomic_clear)
5633 emit_insn (gen_atomic_clear (mem, model));
5634 return const0_rtx;
5637 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5638 Failing that, a store is issued by __atomic_store. The only way this can
5639 fail is if the bool type is larger than a word size. Unlikely, but
5640 handle it anyway for completeness. Assume a single threaded model since
5641 there is no atomic support in this case, and no barriers are required. */
5642 ret = expand_atomic_store (mem, const0_rtx, model, true);
5643 if (!ret)
5644 emit_move_insn (mem, const0_rtx);
5645 return const0_rtx;
5648 /* Expand an atomic test_and_set operation.
5649 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5650 EXP is the call expression. */
5652 static rtx
5653 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5655 rtx mem;
5656 enum memmodel model;
5657 machine_mode mode;
5659 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5660 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5661 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5663 return expand_atomic_test_and_set (target, mem, model);
5667 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5668 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5670 static tree
5671 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5673 int size;
5674 machine_mode mode;
5675 unsigned int mode_align, type_align;
5677 if (TREE_CODE (arg0) != INTEGER_CST)
5678 return NULL_TREE;
5680 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5681 mode = mode_for_size (size, MODE_INT, 0);
5682 mode_align = GET_MODE_ALIGNMENT (mode);
5684 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5685 type_align = mode_align;
5686 else
5688 tree ttype = TREE_TYPE (arg1);
5690 /* This function is usually invoked and folded immediately by the front
5691 end before anything else has a chance to look at it. The pointer
5692 parameter at this point is usually cast to a void *, so check for that
5693 and look past the cast. */
5694 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5695 && VOID_TYPE_P (TREE_TYPE (ttype)))
5696 arg1 = TREE_OPERAND (arg1, 0);
5698 ttype = TREE_TYPE (arg1);
5699 gcc_assert (POINTER_TYPE_P (ttype));
5701 /* Get the underlying type of the object. */
5702 ttype = TREE_TYPE (ttype);
5703 type_align = TYPE_ALIGN (ttype);
5706 /* If the object has smaller alignment, the the lock free routines cannot
5707 be used. */
5708 if (type_align < mode_align)
5709 return boolean_false_node;
5711 /* Check if a compare_and_swap pattern exists for the mode which represents
5712 the required size. The pattern is not allowed to fail, so the existence
5713 of the pattern indicates support is present. */
5714 if (can_compare_and_swap_p (mode, true))
5715 return boolean_true_node;
5716 else
5717 return boolean_false_node;
5720 /* Return true if the parameters to call EXP represent an object which will
5721 always generate lock free instructions. The first argument represents the
5722 size of the object, and the second parameter is a pointer to the object
5723 itself. If NULL is passed for the object, then the result is based on
5724 typical alignment for an object of the specified size. Otherwise return
5725 false. */
5727 static rtx
5728 expand_builtin_atomic_always_lock_free (tree exp)
5730 tree size;
5731 tree arg0 = CALL_EXPR_ARG (exp, 0);
5732 tree arg1 = CALL_EXPR_ARG (exp, 1);
5734 if (TREE_CODE (arg0) != INTEGER_CST)
5736 error ("non-constant argument 1 to __atomic_always_lock_free");
5737 return const0_rtx;
5740 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5741 if (size == boolean_true_node)
5742 return const1_rtx;
5743 return const0_rtx;
5746 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5747 is lock free on this architecture. */
5749 static tree
5750 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5752 if (!flag_inline_atomics)
5753 return NULL_TREE;
5755 /* If it isn't always lock free, don't generate a result. */
5756 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5757 return boolean_true_node;
5759 return NULL_TREE;
5762 /* Return true if the parameters to call EXP represent an object which will
5763 always generate lock free instructions. The first argument represents the
5764 size of the object, and the second parameter is a pointer to the object
5765 itself. If NULL is passed for the object, then the result is based on
5766 typical alignment for an object of the specified size. Otherwise return
5767 NULL*/
5769 static rtx
5770 expand_builtin_atomic_is_lock_free (tree exp)
5772 tree size;
5773 tree arg0 = CALL_EXPR_ARG (exp, 0);
5774 tree arg1 = CALL_EXPR_ARG (exp, 1);
5776 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5778 error ("non-integer argument 1 to __atomic_is_lock_free");
5779 return NULL_RTX;
5782 if (!flag_inline_atomics)
5783 return NULL_RTX;
5785 /* If the value is known at compile time, return the RTX for it. */
5786 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5787 if (size == boolean_true_node)
5788 return const1_rtx;
5790 return NULL_RTX;
5793 /* Expand the __atomic_thread_fence intrinsic:
5794 void __atomic_thread_fence (enum memmodel)
5795 EXP is the CALL_EXPR. */
5797 static void
5798 expand_builtin_atomic_thread_fence (tree exp)
5800 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5801 expand_mem_thread_fence (model);
5804 /* Expand the __atomic_signal_fence intrinsic:
5805 void __atomic_signal_fence (enum memmodel)
5806 EXP is the CALL_EXPR. */
5808 static void
5809 expand_builtin_atomic_signal_fence (tree exp)
5811 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5812 expand_mem_signal_fence (model);
5815 /* Expand the __sync_synchronize intrinsic. */
5817 static void
5818 expand_builtin_sync_synchronize (void)
5820 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5823 static rtx
5824 expand_builtin_thread_pointer (tree exp, rtx target)
5826 enum insn_code icode;
5827 if (!validate_arglist (exp, VOID_TYPE))
5828 return const0_rtx;
5829 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5830 if (icode != CODE_FOR_nothing)
5832 struct expand_operand op;
5833 /* If the target is not sutitable then create a new target. */
5834 if (target == NULL_RTX
5835 || !REG_P (target)
5836 || GET_MODE (target) != Pmode)
5837 target = gen_reg_rtx (Pmode);
5838 create_output_operand (&op, target, Pmode);
5839 expand_insn (icode, 1, &op);
5840 return target;
5842 error ("__builtin_thread_pointer is not supported on this target");
5843 return const0_rtx;
5846 static void
5847 expand_builtin_set_thread_pointer (tree exp)
5849 enum insn_code icode;
5850 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5851 return;
5852 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5853 if (icode != CODE_FOR_nothing)
5855 struct expand_operand op;
5856 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5857 Pmode, EXPAND_NORMAL);
5858 create_input_operand (&op, val, Pmode);
5859 expand_insn (icode, 1, &op);
5860 return;
5862 error ("__builtin_set_thread_pointer is not supported on this target");
5866 /* Emit code to restore the current value of stack. */
5868 static void
5869 expand_stack_restore (tree var)
5871 rtx_insn *prev;
5872 rtx sa = expand_normal (var);
5874 sa = convert_memory_address (Pmode, sa);
5876 prev = get_last_insn ();
5877 emit_stack_restore (SAVE_BLOCK, sa);
5878 fixup_args_size_notes (prev, get_last_insn (), 0);
5882 /* Emit code to save the current value of stack. */
5884 static rtx
5885 expand_stack_save (void)
5887 rtx ret = NULL_RTX;
5889 do_pending_stack_adjust ();
5890 emit_stack_save (SAVE_BLOCK, &ret);
5891 return ret;
5895 /* Expand OpenACC acc_on_device.
5897 This has to happen late (that is, not in early folding; expand_builtin_*,
5898 rather than fold_builtin_*), as we have to act differently for host and
5899 acceleration device (ACCEL_COMPILER conditional). */
5901 static rtx
5902 expand_builtin_acc_on_device (tree exp, rtx target)
5904 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5905 return NULL_RTX;
5907 tree arg = CALL_EXPR_ARG (exp, 0);
5909 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5910 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5911 rtx v = expand_normal (arg), v1, v2;
5912 #ifdef ACCEL_COMPILER
5913 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5914 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5915 #else
5916 v1 = GEN_INT (GOMP_DEVICE_NONE);
5917 v2 = GEN_INT (GOMP_DEVICE_HOST);
5918 #endif
5919 machine_mode target_mode = TYPE_MODE (integer_type_node);
5920 if (!REG_P (target) || GET_MODE (target) != target_mode)
5921 target = gen_reg_rtx (target_mode);
5922 emit_move_insn (target, const1_rtx);
5923 rtx_code_label *done_label = gen_label_rtx ();
5924 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5925 NULL_RTX, done_label, PROB_EVEN);
5926 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5927 NULL_RTX, done_label, PROB_EVEN);
5928 emit_move_insn (target, const0_rtx);
5929 emit_label (done_label);
5931 return target;
5935 /* Expand an expression EXP that calls a built-in function,
5936 with result going to TARGET if that's convenient
5937 (and in mode MODE if that's convenient).
5938 SUBTARGET may be used as the target for computing one of EXP's operands.
5939 IGNORE is nonzero if the value is to be ignored. */
5942 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5943 int ignore)
5945 tree fndecl = get_callee_fndecl (exp);
5946 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5947 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5948 int flags;
5950 /* When ASan is enabled, we don't want to expand some memory/string
5951 builtins and rely on libsanitizer's hooks. This allows us to avoid
5952 redundant checks and be sure, that possible overflow will be detected
5953 by ASan. */
5955 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5956 return expand_call (exp, target, ignore);
5958 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5959 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5961 /* When not optimizing, generate calls to library functions for a certain
5962 set of builtins. */
5963 if (!optimize
5964 && !called_as_built_in (fndecl)
5965 && fcode != BUILT_IN_FORK
5966 && fcode != BUILT_IN_EXECL
5967 && fcode != BUILT_IN_EXECV
5968 && fcode != BUILT_IN_EXECLP
5969 && fcode != BUILT_IN_EXECLE
5970 && fcode != BUILT_IN_EXECVP
5971 && fcode != BUILT_IN_EXECVE
5972 && fcode != BUILT_IN_ALLOCA
5973 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5974 && fcode != BUILT_IN_FREE
5975 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5976 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5977 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5978 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5979 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5980 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5981 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5982 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5983 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5984 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5985 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5986 && fcode != BUILT_IN_CHKP_BNDRET)
5987 return expand_call (exp, target, ignore);
5989 /* The built-in function expanders test for target == const0_rtx
5990 to determine whether the function's result will be ignored. */
5991 if (ignore)
5992 target = const0_rtx;
5994 /* If the result of a pure or const built-in function is ignored, and
5995 none of its arguments are volatile, we can avoid expanding the
5996 built-in call and just evaluate the arguments for side-effects. */
5997 if (target == const0_rtx
5998 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5999 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6001 bool volatilep = false;
6002 tree arg;
6003 call_expr_arg_iterator iter;
6005 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6006 if (TREE_THIS_VOLATILE (arg))
6008 volatilep = true;
6009 break;
6012 if (! volatilep)
6014 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6015 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6016 return const0_rtx;
6020 /* expand_builtin_with_bounds is supposed to be used for
6021 instrumented builtin calls. */
6022 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6024 switch (fcode)
6026 CASE_FLT_FN (BUILT_IN_FABS):
6027 case BUILT_IN_FABSD32:
6028 case BUILT_IN_FABSD64:
6029 case BUILT_IN_FABSD128:
6030 target = expand_builtin_fabs (exp, target, subtarget);
6031 if (target)
6032 return target;
6033 break;
6035 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6036 target = expand_builtin_copysign (exp, target, subtarget);
6037 if (target)
6038 return target;
6039 break;
6041 /* Just do a normal library call if we were unable to fold
6042 the values. */
6043 CASE_FLT_FN (BUILT_IN_CABS):
6044 break;
6046 CASE_FLT_FN (BUILT_IN_EXP):
6047 CASE_FLT_FN (BUILT_IN_EXP10):
6048 CASE_FLT_FN (BUILT_IN_POW10):
6049 CASE_FLT_FN (BUILT_IN_EXP2):
6050 CASE_FLT_FN (BUILT_IN_EXPM1):
6051 CASE_FLT_FN (BUILT_IN_LOGB):
6052 CASE_FLT_FN (BUILT_IN_LOG):
6053 CASE_FLT_FN (BUILT_IN_LOG10):
6054 CASE_FLT_FN (BUILT_IN_LOG2):
6055 CASE_FLT_FN (BUILT_IN_LOG1P):
6056 CASE_FLT_FN (BUILT_IN_TAN):
6057 CASE_FLT_FN (BUILT_IN_ASIN):
6058 CASE_FLT_FN (BUILT_IN_ACOS):
6059 CASE_FLT_FN (BUILT_IN_ATAN):
6060 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6061 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6062 because of possible accuracy problems. */
6063 if (! flag_unsafe_math_optimizations)
6064 break;
6065 CASE_FLT_FN (BUILT_IN_SQRT):
6066 CASE_FLT_FN (BUILT_IN_FLOOR):
6067 CASE_FLT_FN (BUILT_IN_CEIL):
6068 CASE_FLT_FN (BUILT_IN_TRUNC):
6069 CASE_FLT_FN (BUILT_IN_ROUND):
6070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6071 CASE_FLT_FN (BUILT_IN_RINT):
6072 target = expand_builtin_mathfn (exp, target, subtarget);
6073 if (target)
6074 return target;
6075 break;
6077 CASE_FLT_FN (BUILT_IN_FMA):
6078 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6079 if (target)
6080 return target;
6081 break;
6083 CASE_FLT_FN (BUILT_IN_ILOGB):
6084 if (! flag_unsafe_math_optimizations)
6085 break;
6086 CASE_FLT_FN (BUILT_IN_ISINF):
6087 CASE_FLT_FN (BUILT_IN_FINITE):
6088 case BUILT_IN_ISFINITE:
6089 case BUILT_IN_ISNORMAL:
6090 target = expand_builtin_interclass_mathfn (exp, target);
6091 if (target)
6092 return target;
6093 break;
6095 CASE_FLT_FN (BUILT_IN_ICEIL):
6096 CASE_FLT_FN (BUILT_IN_LCEIL):
6097 CASE_FLT_FN (BUILT_IN_LLCEIL):
6098 CASE_FLT_FN (BUILT_IN_LFLOOR):
6099 CASE_FLT_FN (BUILT_IN_IFLOOR):
6100 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6101 target = expand_builtin_int_roundingfn (exp, target);
6102 if (target)
6103 return target;
6104 break;
6106 CASE_FLT_FN (BUILT_IN_IRINT):
6107 CASE_FLT_FN (BUILT_IN_LRINT):
6108 CASE_FLT_FN (BUILT_IN_LLRINT):
6109 CASE_FLT_FN (BUILT_IN_IROUND):
6110 CASE_FLT_FN (BUILT_IN_LROUND):
6111 CASE_FLT_FN (BUILT_IN_LLROUND):
6112 target = expand_builtin_int_roundingfn_2 (exp, target);
6113 if (target)
6114 return target;
6115 break;
6117 CASE_FLT_FN (BUILT_IN_POWI):
6118 target = expand_builtin_powi (exp, target);
6119 if (target)
6120 return target;
6121 break;
6123 CASE_FLT_FN (BUILT_IN_ATAN2):
6124 CASE_FLT_FN (BUILT_IN_LDEXP):
6125 CASE_FLT_FN (BUILT_IN_SCALB):
6126 CASE_FLT_FN (BUILT_IN_SCALBN):
6127 CASE_FLT_FN (BUILT_IN_SCALBLN):
6128 if (! flag_unsafe_math_optimizations)
6129 break;
6131 CASE_FLT_FN (BUILT_IN_FMOD):
6132 CASE_FLT_FN (BUILT_IN_REMAINDER):
6133 CASE_FLT_FN (BUILT_IN_DREM):
6134 CASE_FLT_FN (BUILT_IN_POW):
6135 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6136 if (target)
6137 return target;
6138 break;
6140 CASE_FLT_FN (BUILT_IN_CEXPI):
6141 target = expand_builtin_cexpi (exp, target);
6142 gcc_assert (target);
6143 return target;
6145 CASE_FLT_FN (BUILT_IN_SIN):
6146 CASE_FLT_FN (BUILT_IN_COS):
6147 if (! flag_unsafe_math_optimizations)
6148 break;
6149 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6150 if (target)
6151 return target;
6152 break;
6154 CASE_FLT_FN (BUILT_IN_SINCOS):
6155 if (! flag_unsafe_math_optimizations)
6156 break;
6157 target = expand_builtin_sincos (exp);
6158 if (target)
6159 return target;
6160 break;
6162 case BUILT_IN_APPLY_ARGS:
6163 return expand_builtin_apply_args ();
6165 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6166 FUNCTION with a copy of the parameters described by
6167 ARGUMENTS, and ARGSIZE. It returns a block of memory
6168 allocated on the stack into which is stored all the registers
6169 that might possibly be used for returning the result of a
6170 function. ARGUMENTS is the value returned by
6171 __builtin_apply_args. ARGSIZE is the number of bytes of
6172 arguments that must be copied. ??? How should this value be
6173 computed? We'll also need a safe worst case value for varargs
6174 functions. */
6175 case BUILT_IN_APPLY:
6176 if (!validate_arglist (exp, POINTER_TYPE,
6177 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6178 && !validate_arglist (exp, REFERENCE_TYPE,
6179 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6180 return const0_rtx;
6181 else
6183 rtx ops[3];
6185 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6186 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6187 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6189 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6192 /* __builtin_return (RESULT) causes the function to return the
6193 value described by RESULT. RESULT is address of the block of
6194 memory returned by __builtin_apply. */
6195 case BUILT_IN_RETURN:
6196 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6197 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6198 return const0_rtx;
6200 case BUILT_IN_SAVEREGS:
6201 return expand_builtin_saveregs ();
6203 case BUILT_IN_VA_ARG_PACK:
6204 /* All valid uses of __builtin_va_arg_pack () are removed during
6205 inlining. */
6206 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6207 return const0_rtx;
6209 case BUILT_IN_VA_ARG_PACK_LEN:
6210 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6211 inlining. */
6212 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6213 return const0_rtx;
6215 /* Return the address of the first anonymous stack arg. */
6216 case BUILT_IN_NEXT_ARG:
6217 if (fold_builtin_next_arg (exp, false))
6218 return const0_rtx;
6219 return expand_builtin_next_arg ();
6221 case BUILT_IN_CLEAR_CACHE:
6222 target = expand_builtin___clear_cache (exp);
6223 if (target)
6224 return target;
6225 break;
6227 case BUILT_IN_CLASSIFY_TYPE:
6228 return expand_builtin_classify_type (exp);
6230 case BUILT_IN_CONSTANT_P:
6231 return const0_rtx;
6233 case BUILT_IN_FRAME_ADDRESS:
6234 case BUILT_IN_RETURN_ADDRESS:
6235 return expand_builtin_frame_address (fndecl, exp);
6237 /* Returns the address of the area where the structure is returned.
6238 0 otherwise. */
6239 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6240 if (call_expr_nargs (exp) != 0
6241 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6242 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6243 return const0_rtx;
6244 else
6245 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6247 case BUILT_IN_ALLOCA:
6248 case BUILT_IN_ALLOCA_WITH_ALIGN:
6249 /* If the allocation stems from the declaration of a variable-sized
6250 object, it cannot accumulate. */
6251 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6252 if (target)
6253 return target;
6254 break;
6256 case BUILT_IN_STACK_SAVE:
6257 return expand_stack_save ();
6259 case BUILT_IN_STACK_RESTORE:
6260 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6261 return const0_rtx;
6263 case BUILT_IN_BSWAP16:
6264 case BUILT_IN_BSWAP32:
6265 case BUILT_IN_BSWAP64:
6266 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6267 if (target)
6268 return target;
6269 break;
6271 CASE_INT_FN (BUILT_IN_FFS):
6272 target = expand_builtin_unop (target_mode, exp, target,
6273 subtarget, ffs_optab);
6274 if (target)
6275 return target;
6276 break;
6278 CASE_INT_FN (BUILT_IN_CLZ):
6279 target = expand_builtin_unop (target_mode, exp, target,
6280 subtarget, clz_optab);
6281 if (target)
6282 return target;
6283 break;
6285 CASE_INT_FN (BUILT_IN_CTZ):
6286 target = expand_builtin_unop (target_mode, exp, target,
6287 subtarget, ctz_optab);
6288 if (target)
6289 return target;
6290 break;
6292 CASE_INT_FN (BUILT_IN_CLRSB):
6293 target = expand_builtin_unop (target_mode, exp, target,
6294 subtarget, clrsb_optab);
6295 if (target)
6296 return target;
6297 break;
6299 CASE_INT_FN (BUILT_IN_POPCOUNT):
6300 target = expand_builtin_unop (target_mode, exp, target,
6301 subtarget, popcount_optab);
6302 if (target)
6303 return target;
6304 break;
6306 CASE_INT_FN (BUILT_IN_PARITY):
6307 target = expand_builtin_unop (target_mode, exp, target,
6308 subtarget, parity_optab);
6309 if (target)
6310 return target;
6311 break;
6313 case BUILT_IN_STRLEN:
6314 target = expand_builtin_strlen (exp, target, target_mode);
6315 if (target)
6316 return target;
6317 break;
6319 case BUILT_IN_STRCPY:
6320 target = expand_builtin_strcpy (exp, target);
6321 if (target)
6322 return target;
6323 break;
6325 case BUILT_IN_STRNCPY:
6326 target = expand_builtin_strncpy (exp, target);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_STPCPY:
6332 target = expand_builtin_stpcpy (exp, target, mode);
6333 if (target)
6334 return target;
6335 break;
6337 case BUILT_IN_MEMCPY:
6338 target = expand_builtin_memcpy (exp, target);
6339 if (target)
6340 return target;
6341 break;
6343 case BUILT_IN_MEMPCPY:
6344 target = expand_builtin_mempcpy (exp, target, mode);
6345 if (target)
6346 return target;
6347 break;
6349 case BUILT_IN_MEMSET:
6350 target = expand_builtin_memset (exp, target, mode);
6351 if (target)
6352 return target;
6353 break;
6355 case BUILT_IN_BZERO:
6356 target = expand_builtin_bzero (exp);
6357 if (target)
6358 return target;
6359 break;
6361 case BUILT_IN_STRCMP:
6362 target = expand_builtin_strcmp (exp, target);
6363 if (target)
6364 return target;
6365 break;
6367 case BUILT_IN_STRNCMP:
6368 target = expand_builtin_strncmp (exp, target, mode);
6369 if (target)
6370 return target;
6371 break;
6373 case BUILT_IN_BCMP:
6374 case BUILT_IN_MEMCMP:
6375 target = expand_builtin_memcmp (exp, target, mode);
6376 if (target)
6377 return target;
6378 break;
6380 case BUILT_IN_SETJMP:
6381 /* This should have been lowered to the builtins below. */
6382 gcc_unreachable ();
6384 case BUILT_IN_SETJMP_SETUP:
6385 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6386 and the receiver label. */
6387 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6389 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6390 VOIDmode, EXPAND_NORMAL);
6391 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6392 rtx label_r = label_rtx (label);
6394 /* This is copied from the handling of non-local gotos. */
6395 expand_builtin_setjmp_setup (buf_addr, label_r);
6396 nonlocal_goto_handler_labels
6397 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6398 nonlocal_goto_handler_labels);
6399 /* ??? Do not let expand_label treat us as such since we would
6400 not want to be both on the list of non-local labels and on
6401 the list of forced labels. */
6402 FORCED_LABEL (label) = 0;
6403 return const0_rtx;
6405 break;
6407 case BUILT_IN_SETJMP_RECEIVER:
6408 /* __builtin_setjmp_receiver is passed the receiver label. */
6409 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6411 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6412 rtx label_r = label_rtx (label);
6414 expand_builtin_setjmp_receiver (label_r);
6415 return const0_rtx;
6417 break;
6419 /* __builtin_longjmp is passed a pointer to an array of five words.
6420 It's similar to the C library longjmp function but works with
6421 __builtin_setjmp above. */
6422 case BUILT_IN_LONGJMP:
6423 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6425 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6426 VOIDmode, EXPAND_NORMAL);
6427 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6429 if (value != const1_rtx)
6431 error ("%<__builtin_longjmp%> second argument must be 1");
6432 return const0_rtx;
6435 expand_builtin_longjmp (buf_addr, value);
6436 return const0_rtx;
6438 break;
6440 case BUILT_IN_NONLOCAL_GOTO:
6441 target = expand_builtin_nonlocal_goto (exp);
6442 if (target)
6443 return target;
6444 break;
6446 /* This updates the setjmp buffer that is its argument with the value
6447 of the current stack pointer. */
6448 case BUILT_IN_UPDATE_SETJMP_BUF:
6449 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6451 rtx buf_addr
6452 = expand_normal (CALL_EXPR_ARG (exp, 0));
6454 expand_builtin_update_setjmp_buf (buf_addr);
6455 return const0_rtx;
6457 break;
6459 case BUILT_IN_TRAP:
6460 expand_builtin_trap ();
6461 return const0_rtx;
6463 case BUILT_IN_UNREACHABLE:
6464 expand_builtin_unreachable ();
6465 return const0_rtx;
6467 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6468 case BUILT_IN_SIGNBITD32:
6469 case BUILT_IN_SIGNBITD64:
6470 case BUILT_IN_SIGNBITD128:
6471 target = expand_builtin_signbit (exp, target);
6472 if (target)
6473 return target;
6474 break;
6476 /* Various hooks for the DWARF 2 __throw routine. */
6477 case BUILT_IN_UNWIND_INIT:
6478 expand_builtin_unwind_init ();
6479 return const0_rtx;
6480 case BUILT_IN_DWARF_CFA:
6481 return virtual_cfa_rtx;
6482 #ifdef DWARF2_UNWIND_INFO
6483 case BUILT_IN_DWARF_SP_COLUMN:
6484 return expand_builtin_dwarf_sp_column ();
6485 case BUILT_IN_INIT_DWARF_REG_SIZES:
6486 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6487 return const0_rtx;
6488 #endif
6489 case BUILT_IN_FROB_RETURN_ADDR:
6490 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6491 case BUILT_IN_EXTRACT_RETURN_ADDR:
6492 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6493 case BUILT_IN_EH_RETURN:
6494 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6495 CALL_EXPR_ARG (exp, 1));
6496 return const0_rtx;
6497 #ifdef EH_RETURN_DATA_REGNO
6498 case BUILT_IN_EH_RETURN_DATA_REGNO:
6499 return expand_builtin_eh_return_data_regno (exp);
6500 #endif
6501 case BUILT_IN_EXTEND_POINTER:
6502 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6503 case BUILT_IN_EH_POINTER:
6504 return expand_builtin_eh_pointer (exp);
6505 case BUILT_IN_EH_FILTER:
6506 return expand_builtin_eh_filter (exp);
6507 case BUILT_IN_EH_COPY_VALUES:
6508 return expand_builtin_eh_copy_values (exp);
6510 case BUILT_IN_VA_START:
6511 return expand_builtin_va_start (exp);
6512 case BUILT_IN_VA_END:
6513 return expand_builtin_va_end (exp);
6514 case BUILT_IN_VA_COPY:
6515 return expand_builtin_va_copy (exp);
6516 case BUILT_IN_EXPECT:
6517 return expand_builtin_expect (exp, target);
6518 case BUILT_IN_ASSUME_ALIGNED:
6519 return expand_builtin_assume_aligned (exp, target);
6520 case BUILT_IN_PREFETCH:
6521 expand_builtin_prefetch (exp);
6522 return const0_rtx;
6524 case BUILT_IN_INIT_TRAMPOLINE:
6525 return expand_builtin_init_trampoline (exp, true);
6526 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6527 return expand_builtin_init_trampoline (exp, false);
6528 case BUILT_IN_ADJUST_TRAMPOLINE:
6529 return expand_builtin_adjust_trampoline (exp);
6531 case BUILT_IN_FORK:
6532 case BUILT_IN_EXECL:
6533 case BUILT_IN_EXECV:
6534 case BUILT_IN_EXECLP:
6535 case BUILT_IN_EXECLE:
6536 case BUILT_IN_EXECVP:
6537 case BUILT_IN_EXECVE:
6538 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6539 if (target)
6540 return target;
6541 break;
6543 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6544 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6545 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6546 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6547 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6549 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6550 if (target)
6551 return target;
6552 break;
6554 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6555 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6556 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6557 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6558 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6560 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6561 if (target)
6562 return target;
6563 break;
6565 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6566 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6567 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6568 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6569 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6570 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6571 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6572 if (target)
6573 return target;
6574 break;
6576 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6577 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6578 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6579 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6580 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6581 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6582 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6583 if (target)
6584 return target;
6585 break;
6587 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6588 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6589 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6590 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6591 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6592 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6593 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6594 if (target)
6595 return target;
6596 break;
6598 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6599 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6600 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6601 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6602 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6604 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6605 if (target)
6606 return target;
6607 break;
6609 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6610 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6611 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6612 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6613 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6615 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6616 if (target)
6617 return target;
6618 break;
6620 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6621 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6622 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6623 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6624 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6626 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6627 if (target)
6628 return target;
6629 break;
6631 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6632 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6633 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6634 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6635 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6637 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6638 if (target)
6639 return target;
6640 break;
6642 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6643 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6644 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6645 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6646 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6647 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6648 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6649 if (target)
6650 return target;
6651 break;
6653 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6654 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6655 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6656 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6657 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6658 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6659 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6660 if (target)
6661 return target;
6662 break;
6664 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6665 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6666 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6667 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6668 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6669 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6670 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6671 if (target)
6672 return target;
6673 break;
6675 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6676 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6677 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6678 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6679 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6680 if (mode == VOIDmode)
6681 mode = TYPE_MODE (boolean_type_node);
6682 if (!target || !register_operand (target, mode))
6683 target = gen_reg_rtx (mode);
6685 mode = get_builtin_sync_mode
6686 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6687 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6693 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6694 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6695 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6697 mode = get_builtin_sync_mode
6698 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6699 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6700 if (target)
6701 return target;
6702 break;
6704 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6705 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6706 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6707 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6708 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6709 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6710 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6711 if (target)
6712 return target;
6713 break;
6715 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6716 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6717 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6718 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6719 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6720 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6721 expand_builtin_sync_lock_release (mode, exp);
6722 return const0_rtx;
6724 case BUILT_IN_SYNC_SYNCHRONIZE:
6725 expand_builtin_sync_synchronize ();
6726 return const0_rtx;
6728 case BUILT_IN_ATOMIC_EXCHANGE_1:
6729 case BUILT_IN_ATOMIC_EXCHANGE_2:
6730 case BUILT_IN_ATOMIC_EXCHANGE_4:
6731 case BUILT_IN_ATOMIC_EXCHANGE_8:
6732 case BUILT_IN_ATOMIC_EXCHANGE_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6734 target = expand_builtin_atomic_exchange (mode, exp, target);
6735 if (target)
6736 return target;
6737 break;
6739 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6740 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6741 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6742 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6743 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6745 unsigned int nargs, z;
6746 vec<tree, va_gc> *vec;
6748 mode =
6749 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6750 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6751 if (target)
6752 return target;
6754 /* If this is turned into an external library call, the weak parameter
6755 must be dropped to match the expected parameter list. */
6756 nargs = call_expr_nargs (exp);
6757 vec_alloc (vec, nargs - 1);
6758 for (z = 0; z < 3; z++)
6759 vec->quick_push (CALL_EXPR_ARG (exp, z));
6760 /* Skip the boolean weak parameter. */
6761 for (z = 4; z < 6; z++)
6762 vec->quick_push (CALL_EXPR_ARG (exp, z));
6763 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6764 break;
6767 case BUILT_IN_ATOMIC_LOAD_1:
6768 case BUILT_IN_ATOMIC_LOAD_2:
6769 case BUILT_IN_ATOMIC_LOAD_4:
6770 case BUILT_IN_ATOMIC_LOAD_8:
6771 case BUILT_IN_ATOMIC_LOAD_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6773 target = expand_builtin_atomic_load (mode, exp, target);
6774 if (target)
6775 return target;
6776 break;
6778 case BUILT_IN_ATOMIC_STORE_1:
6779 case BUILT_IN_ATOMIC_STORE_2:
6780 case BUILT_IN_ATOMIC_STORE_4:
6781 case BUILT_IN_ATOMIC_STORE_8:
6782 case BUILT_IN_ATOMIC_STORE_16:
6783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6784 target = expand_builtin_atomic_store (mode, exp);
6785 if (target)
6786 return const0_rtx;
6787 break;
6789 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6790 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6791 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6792 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6793 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6795 enum built_in_function lib;
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6797 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6798 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6799 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6800 ignore, lib);
6801 if (target)
6802 return target;
6803 break;
6805 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6806 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6807 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6808 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6809 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6811 enum built_in_function lib;
6812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6813 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6814 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6815 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6816 ignore, lib);
6817 if (target)
6818 return target;
6819 break;
6821 case BUILT_IN_ATOMIC_AND_FETCH_1:
6822 case BUILT_IN_ATOMIC_AND_FETCH_2:
6823 case BUILT_IN_ATOMIC_AND_FETCH_4:
6824 case BUILT_IN_ATOMIC_AND_FETCH_8:
6825 case BUILT_IN_ATOMIC_AND_FETCH_16:
6827 enum built_in_function lib;
6828 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6829 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6830 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6831 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6832 ignore, lib);
6833 if (target)
6834 return target;
6835 break;
6837 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6838 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6839 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6840 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6841 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6843 enum built_in_function lib;
6844 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6845 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6846 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6847 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6848 ignore, lib);
6849 if (target)
6850 return target;
6851 break;
6853 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6854 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6855 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6856 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6857 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6859 enum built_in_function lib;
6860 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6861 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6862 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6863 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6864 ignore, lib);
6865 if (target)
6866 return target;
6867 break;
6869 case BUILT_IN_ATOMIC_OR_FETCH_1:
6870 case BUILT_IN_ATOMIC_OR_FETCH_2:
6871 case BUILT_IN_ATOMIC_OR_FETCH_4:
6872 case BUILT_IN_ATOMIC_OR_FETCH_8:
6873 case BUILT_IN_ATOMIC_OR_FETCH_16:
6875 enum built_in_function lib;
6876 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6877 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6878 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6879 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6880 ignore, lib);
6881 if (target)
6882 return target;
6883 break;
6885 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6886 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6887 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6888 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6889 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6890 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6891 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6892 ignore, BUILT_IN_NONE);
6893 if (target)
6894 return target;
6895 break;
6897 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6898 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6899 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6900 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6901 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6902 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6903 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6904 ignore, BUILT_IN_NONE);
6905 if (target)
6906 return target;
6907 break;
6909 case BUILT_IN_ATOMIC_FETCH_AND_1:
6910 case BUILT_IN_ATOMIC_FETCH_AND_2:
6911 case BUILT_IN_ATOMIC_FETCH_AND_4:
6912 case BUILT_IN_ATOMIC_FETCH_AND_8:
6913 case BUILT_IN_ATOMIC_FETCH_AND_16:
6914 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6915 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6916 ignore, BUILT_IN_NONE);
6917 if (target)
6918 return target;
6919 break;
6921 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6922 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6923 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6924 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6925 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6926 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6927 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6928 ignore, BUILT_IN_NONE);
6929 if (target)
6930 return target;
6931 break;
6933 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6934 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6935 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6936 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6937 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6938 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6939 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6940 ignore, BUILT_IN_NONE);
6941 if (target)
6942 return target;
6943 break;
6945 case BUILT_IN_ATOMIC_FETCH_OR_1:
6946 case BUILT_IN_ATOMIC_FETCH_OR_2:
6947 case BUILT_IN_ATOMIC_FETCH_OR_4:
6948 case BUILT_IN_ATOMIC_FETCH_OR_8:
6949 case BUILT_IN_ATOMIC_FETCH_OR_16:
6950 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6951 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6952 ignore, BUILT_IN_NONE);
6953 if (target)
6954 return target;
6955 break;
6957 case BUILT_IN_ATOMIC_TEST_AND_SET:
6958 return expand_builtin_atomic_test_and_set (exp, target);
6960 case BUILT_IN_ATOMIC_CLEAR:
6961 return expand_builtin_atomic_clear (exp);
6963 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6964 return expand_builtin_atomic_always_lock_free (exp);
6966 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6967 target = expand_builtin_atomic_is_lock_free (exp);
6968 if (target)
6969 return target;
6970 break;
6972 case BUILT_IN_ATOMIC_THREAD_FENCE:
6973 expand_builtin_atomic_thread_fence (exp);
6974 return const0_rtx;
6976 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6977 expand_builtin_atomic_signal_fence (exp);
6978 return const0_rtx;
6980 case BUILT_IN_OBJECT_SIZE:
6981 return expand_builtin_object_size (exp);
6983 case BUILT_IN_MEMCPY_CHK:
6984 case BUILT_IN_MEMPCPY_CHK:
6985 case BUILT_IN_MEMMOVE_CHK:
6986 case BUILT_IN_MEMSET_CHK:
6987 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6988 if (target)
6989 return target;
6990 break;
6992 case BUILT_IN_STRCPY_CHK:
6993 case BUILT_IN_STPCPY_CHK:
6994 case BUILT_IN_STRNCPY_CHK:
6995 case BUILT_IN_STPNCPY_CHK:
6996 case BUILT_IN_STRCAT_CHK:
6997 case BUILT_IN_STRNCAT_CHK:
6998 case BUILT_IN_SNPRINTF_CHK:
6999 case BUILT_IN_VSNPRINTF_CHK:
7000 maybe_emit_chk_warning (exp, fcode);
7001 break;
7003 case BUILT_IN_SPRINTF_CHK:
7004 case BUILT_IN_VSPRINTF_CHK:
7005 maybe_emit_sprintf_chk_warning (exp, fcode);
7006 break;
7008 case BUILT_IN_FREE:
7009 if (warn_free_nonheap_object)
7010 maybe_emit_free_warning (exp);
7011 break;
7013 case BUILT_IN_THREAD_POINTER:
7014 return expand_builtin_thread_pointer (exp, target);
7016 case BUILT_IN_SET_THREAD_POINTER:
7017 expand_builtin_set_thread_pointer (exp);
7018 return const0_rtx;
7020 case BUILT_IN_CILK_DETACH:
7021 expand_builtin_cilk_detach (exp);
7022 return const0_rtx;
7024 case BUILT_IN_CILK_POP_FRAME:
7025 expand_builtin_cilk_pop_frame (exp);
7026 return const0_rtx;
7028 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7029 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7030 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7031 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7032 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7033 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7034 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7035 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7036 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7037 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7038 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7039 /* We allow user CHKP builtins if Pointer Bounds
7040 Checker is off. */
7041 if (!chkp_function_instrumented_p (current_function_decl))
7043 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7044 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7045 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7046 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7047 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7048 return expand_normal (CALL_EXPR_ARG (exp, 0));
7049 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7050 return expand_normal (size_zero_node);
7051 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7052 return expand_normal (size_int (-1));
7053 else
7054 return const0_rtx;
7056 /* FALLTHROUGH */
7058 case BUILT_IN_CHKP_BNDMK:
7059 case BUILT_IN_CHKP_BNDSTX:
7060 case BUILT_IN_CHKP_BNDCL:
7061 case BUILT_IN_CHKP_BNDCU:
7062 case BUILT_IN_CHKP_BNDLDX:
7063 case BUILT_IN_CHKP_BNDRET:
7064 case BUILT_IN_CHKP_INTERSECT:
7065 case BUILT_IN_CHKP_NARROW:
7066 case BUILT_IN_CHKP_EXTRACT_LOWER:
7067 case BUILT_IN_CHKP_EXTRACT_UPPER:
7068 /* Software implementation of Pointer Bounds Checker is NYI.
7069 Target support is required. */
7070 error ("Your target platform does not support -fcheck-pointer-bounds");
7071 break;
7073 case BUILT_IN_ACC_ON_DEVICE:
7074 target = expand_builtin_acc_on_device (exp, target);
7075 if (target)
7076 return target;
7077 break;
7079 default: /* just do library call, if unknown builtin */
7080 break;
7083 /* The switch statement above can drop through to cause the function
7084 to be called normally. */
7085 return expand_call (exp, target, ignore);
7088 /* Similar to expand_builtin but is used for instrumented calls. */
7091 expand_builtin_with_bounds (tree exp, rtx target,
7092 rtx subtarget ATTRIBUTE_UNUSED,
7093 machine_mode mode, int ignore)
7095 tree fndecl = get_callee_fndecl (exp);
7096 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7098 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7100 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7101 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7103 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7104 && fcode < END_CHKP_BUILTINS);
7106 switch (fcode)
7108 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7109 target = expand_builtin_memcpy_with_bounds (exp, target);
7110 if (target)
7111 return target;
7112 break;
7114 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7115 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7116 if (target)
7117 return target;
7118 break;
7120 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7121 target = expand_builtin_memset_with_bounds (exp, target, mode);
7122 if (target)
7123 return target;
7124 break;
7126 default:
7127 break;
7130 /* The switch statement above can drop through to cause the function
7131 to be called normally. */
7132 return expand_call (exp, target, ignore);
7135 /* Determine whether a tree node represents a call to a built-in
7136 function. If the tree T is a call to a built-in function with
7137 the right number of arguments of the appropriate types, return
7138 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7139 Otherwise the return value is END_BUILTINS. */
7141 enum built_in_function
7142 builtin_mathfn_code (const_tree t)
7144 const_tree fndecl, arg, parmlist;
7145 const_tree argtype, parmtype;
7146 const_call_expr_arg_iterator iter;
7148 if (TREE_CODE (t) != CALL_EXPR
7149 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7150 return END_BUILTINS;
7152 fndecl = get_callee_fndecl (t);
7153 if (fndecl == NULL_TREE
7154 || TREE_CODE (fndecl) != FUNCTION_DECL
7155 || ! DECL_BUILT_IN (fndecl)
7156 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7157 return END_BUILTINS;
7159 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7160 init_const_call_expr_arg_iterator (t, &iter);
7161 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7163 /* If a function doesn't take a variable number of arguments,
7164 the last element in the list will have type `void'. */
7165 parmtype = TREE_VALUE (parmlist);
7166 if (VOID_TYPE_P (parmtype))
7168 if (more_const_call_expr_args_p (&iter))
7169 return END_BUILTINS;
7170 return DECL_FUNCTION_CODE (fndecl);
7173 if (! more_const_call_expr_args_p (&iter))
7174 return END_BUILTINS;
7176 arg = next_const_call_expr_arg (&iter);
7177 argtype = TREE_TYPE (arg);
7179 if (SCALAR_FLOAT_TYPE_P (parmtype))
7181 if (! SCALAR_FLOAT_TYPE_P (argtype))
7182 return END_BUILTINS;
7184 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7186 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7187 return END_BUILTINS;
7189 else if (POINTER_TYPE_P (parmtype))
7191 if (! POINTER_TYPE_P (argtype))
7192 return END_BUILTINS;
7194 else if (INTEGRAL_TYPE_P (parmtype))
7196 if (! INTEGRAL_TYPE_P (argtype))
7197 return END_BUILTINS;
7199 else
7200 return END_BUILTINS;
7203 /* Variable-length argument list. */
7204 return DECL_FUNCTION_CODE (fndecl);
7207 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7208 evaluate to a constant. */
7210 static tree
7211 fold_builtin_constant_p (tree arg)
7213 /* We return 1 for a numeric type that's known to be a constant
7214 value at compile-time or for an aggregate type that's a
7215 literal constant. */
7216 STRIP_NOPS (arg);
7218 /* If we know this is a constant, emit the constant of one. */
7219 if (CONSTANT_CLASS_P (arg)
7220 || (TREE_CODE (arg) == CONSTRUCTOR
7221 && TREE_CONSTANT (arg)))
7222 return integer_one_node;
7223 if (TREE_CODE (arg) == ADDR_EXPR)
7225 tree op = TREE_OPERAND (arg, 0);
7226 if (TREE_CODE (op) == STRING_CST
7227 || (TREE_CODE (op) == ARRAY_REF
7228 && integer_zerop (TREE_OPERAND (op, 1))
7229 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7230 return integer_one_node;
7233 /* If this expression has side effects, show we don't know it to be a
7234 constant. Likewise if it's a pointer or aggregate type since in
7235 those case we only want literals, since those are only optimized
7236 when generating RTL, not later.
7237 And finally, if we are compiling an initializer, not code, we
7238 need to return a definite result now; there's not going to be any
7239 more optimization done. */
7240 if (TREE_SIDE_EFFECTS (arg)
7241 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7242 || POINTER_TYPE_P (TREE_TYPE (arg))
7243 || cfun == 0
7244 || folding_initializer
7245 || force_folding_builtin_constant_p)
7246 return integer_zero_node;
7248 return NULL_TREE;
7251 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7252 return it as a truthvalue. */
7254 static tree
7255 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7256 tree predictor)
7258 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7260 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7261 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7262 ret_type = TREE_TYPE (TREE_TYPE (fn));
7263 pred_type = TREE_VALUE (arg_types);
7264 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7266 pred = fold_convert_loc (loc, pred_type, pred);
7267 expected = fold_convert_loc (loc, expected_type, expected);
7268 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7269 predictor);
7271 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7272 build_int_cst (ret_type, 0));
7275 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7276 NULL_TREE if no simplification is possible. */
7278 tree
7279 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7281 tree inner, fndecl, inner_arg0;
7282 enum tree_code code;
7284 /* Distribute the expected value over short-circuiting operators.
7285 See through the cast from truthvalue_type_node to long. */
7286 inner_arg0 = arg0;
7287 while (CONVERT_EXPR_P (inner_arg0)
7288 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7289 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7290 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7292 /* If this is a builtin_expect within a builtin_expect keep the
7293 inner one. See through a comparison against a constant. It
7294 might have been added to create a thruthvalue. */
7295 inner = inner_arg0;
7297 if (COMPARISON_CLASS_P (inner)
7298 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7299 inner = TREE_OPERAND (inner, 0);
7301 if (TREE_CODE (inner) == CALL_EXPR
7302 && (fndecl = get_callee_fndecl (inner))
7303 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7304 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7305 return arg0;
7307 inner = inner_arg0;
7308 code = TREE_CODE (inner);
7309 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7311 tree op0 = TREE_OPERAND (inner, 0);
7312 tree op1 = TREE_OPERAND (inner, 1);
7314 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7315 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7316 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7318 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7321 /* If the argument isn't invariant then there's nothing else we can do. */
7322 if (!TREE_CONSTANT (inner_arg0))
7323 return NULL_TREE;
7325 /* If we expect that a comparison against the argument will fold to
7326 a constant return the constant. In practice, this means a true
7327 constant or the address of a non-weak symbol. */
7328 inner = inner_arg0;
7329 STRIP_NOPS (inner);
7330 if (TREE_CODE (inner) == ADDR_EXPR)
7334 inner = TREE_OPERAND (inner, 0);
7336 while (TREE_CODE (inner) == COMPONENT_REF
7337 || TREE_CODE (inner) == ARRAY_REF);
7338 if ((TREE_CODE (inner) == VAR_DECL
7339 || TREE_CODE (inner) == FUNCTION_DECL)
7340 && DECL_WEAK (inner))
7341 return NULL_TREE;
7344 /* Otherwise, ARG0 already has the proper type for the return value. */
7345 return arg0;
7348 /* Fold a call to __builtin_classify_type with argument ARG. */
7350 static tree
7351 fold_builtin_classify_type (tree arg)
7353 if (arg == 0)
7354 return build_int_cst (integer_type_node, no_type_class);
7356 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7359 /* Fold a call to __builtin_strlen with argument ARG. */
7361 static tree
7362 fold_builtin_strlen (location_t loc, tree type, tree arg)
7364 if (!validate_arg (arg, POINTER_TYPE))
7365 return NULL_TREE;
7366 else
7368 tree len = c_strlen (arg, 0);
7370 if (len)
7371 return fold_convert_loc (loc, type, len);
7373 return NULL_TREE;
7377 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7379 static tree
7380 fold_builtin_inf (location_t loc, tree type, int warn)
7382 REAL_VALUE_TYPE real;
7384 /* __builtin_inff is intended to be usable to define INFINITY on all
7385 targets. If an infinity is not available, INFINITY expands "to a
7386 positive constant of type float that overflows at translation
7387 time", footnote "In this case, using INFINITY will violate the
7388 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7389 Thus we pedwarn to ensure this constraint violation is
7390 diagnosed. */
7391 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7392 pedwarn (loc, 0, "target format does not support infinity");
7394 real_inf (&real);
7395 return build_real (type, real);
7398 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7400 static tree
7401 fold_builtin_nan (tree arg, tree type, int quiet)
7403 REAL_VALUE_TYPE real;
7404 const char *str;
7406 if (!validate_arg (arg, POINTER_TYPE))
7407 return NULL_TREE;
7408 str = c_getstr (arg);
7409 if (!str)
7410 return NULL_TREE;
7412 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7413 return NULL_TREE;
7415 return build_real (type, real);
7418 /* Return true if the floating point expression T has an integer value.
7419 We also allow +Inf, -Inf and NaN to be considered integer values. */
7421 static bool
7422 integer_valued_real_p (tree t)
7424 switch (TREE_CODE (t))
7426 case FLOAT_EXPR:
7427 return true;
7429 case ABS_EXPR:
7430 case SAVE_EXPR:
7431 return integer_valued_real_p (TREE_OPERAND (t, 0));
7433 case COMPOUND_EXPR:
7434 case MODIFY_EXPR:
7435 case BIND_EXPR:
7436 return integer_valued_real_p (TREE_OPERAND (t, 1));
7438 case PLUS_EXPR:
7439 case MINUS_EXPR:
7440 case MULT_EXPR:
7441 case MIN_EXPR:
7442 case MAX_EXPR:
7443 return integer_valued_real_p (TREE_OPERAND (t, 0))
7444 && integer_valued_real_p (TREE_OPERAND (t, 1));
7446 case COND_EXPR:
7447 return integer_valued_real_p (TREE_OPERAND (t, 1))
7448 && integer_valued_real_p (TREE_OPERAND (t, 2));
7450 case REAL_CST:
7451 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7453 CASE_CONVERT:
7455 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7456 if (TREE_CODE (type) == INTEGER_TYPE)
7457 return true;
7458 if (TREE_CODE (type) == REAL_TYPE)
7459 return integer_valued_real_p (TREE_OPERAND (t, 0));
7460 break;
7463 case CALL_EXPR:
7464 switch (builtin_mathfn_code (t))
7466 CASE_FLT_FN (BUILT_IN_CEIL):
7467 CASE_FLT_FN (BUILT_IN_FLOOR):
7468 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7469 CASE_FLT_FN (BUILT_IN_RINT):
7470 CASE_FLT_FN (BUILT_IN_ROUND):
7471 CASE_FLT_FN (BUILT_IN_TRUNC):
7472 return true;
7474 CASE_FLT_FN (BUILT_IN_FMIN):
7475 CASE_FLT_FN (BUILT_IN_FMAX):
7476 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7477 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7479 default:
7480 break;
7482 break;
7484 default:
7485 break;
7487 return false;
7490 /* FNDECL is assumed to be a builtin where truncation can be propagated
7491 across (for instance floor((double)f) == (double)floorf (f).
7492 Do the transformation for a call with argument ARG. */
7494 static tree
7495 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7497 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7499 if (!validate_arg (arg, REAL_TYPE))
7500 return NULL_TREE;
7502 /* Integer rounding functions are idempotent. */
7503 if (fcode == builtin_mathfn_code (arg))
7504 return arg;
7506 /* If argument is already integer valued, and we don't need to worry
7507 about setting errno, there's no need to perform rounding. */
7508 if (! flag_errno_math && integer_valued_real_p (arg))
7509 return arg;
7511 if (optimize)
7513 tree arg0 = strip_float_extensions (arg);
7514 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7515 tree newtype = TREE_TYPE (arg0);
7516 tree decl;
7518 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7519 && (decl = mathfn_built_in (newtype, fcode)))
7520 return fold_convert_loc (loc, ftype,
7521 build_call_expr_loc (loc, decl, 1,
7522 fold_convert_loc (loc,
7523 newtype,
7524 arg0)));
7526 return NULL_TREE;
7529 /* FNDECL is assumed to be builtin which can narrow the FP type of
7530 the argument, for instance lround((double)f) -> lroundf (f).
7531 Do the transformation for a call with argument ARG. */
7533 static tree
7534 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7536 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7538 if (!validate_arg (arg, REAL_TYPE))
7539 return NULL_TREE;
7541 /* If argument is already integer valued, and we don't need to worry
7542 about setting errno, there's no need to perform rounding. */
7543 if (! flag_errno_math && integer_valued_real_p (arg))
7544 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7545 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7547 if (optimize)
7549 tree ftype = TREE_TYPE (arg);
7550 tree arg0 = strip_float_extensions (arg);
7551 tree newtype = TREE_TYPE (arg0);
7552 tree decl;
7554 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7555 && (decl = mathfn_built_in (newtype, fcode)))
7556 return build_call_expr_loc (loc, decl, 1,
7557 fold_convert_loc (loc, newtype, arg0));
7560 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7561 sizeof (int) == sizeof (long). */
7562 if (TYPE_PRECISION (integer_type_node)
7563 == TYPE_PRECISION (long_integer_type_node))
7565 tree newfn = NULL_TREE;
7566 switch (fcode)
7568 CASE_FLT_FN (BUILT_IN_ICEIL):
7569 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7570 break;
7572 CASE_FLT_FN (BUILT_IN_IFLOOR):
7573 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7574 break;
7576 CASE_FLT_FN (BUILT_IN_IROUND):
7577 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7578 break;
7580 CASE_FLT_FN (BUILT_IN_IRINT):
7581 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7582 break;
7584 default:
7585 break;
7588 if (newfn)
7590 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7591 return fold_convert_loc (loc,
7592 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7596 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7597 sizeof (long long) == sizeof (long). */
7598 if (TYPE_PRECISION (long_long_integer_type_node)
7599 == TYPE_PRECISION (long_integer_type_node))
7601 tree newfn = NULL_TREE;
7602 switch (fcode)
7604 CASE_FLT_FN (BUILT_IN_LLCEIL):
7605 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7606 break;
7608 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7609 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7610 break;
7612 CASE_FLT_FN (BUILT_IN_LLROUND):
7613 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7614 break;
7616 CASE_FLT_FN (BUILT_IN_LLRINT):
7617 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7618 break;
7620 default:
7621 break;
7624 if (newfn)
7626 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7627 return fold_convert_loc (loc,
7628 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7632 return NULL_TREE;
7635 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7636 return type. Return NULL_TREE if no simplification can be made. */
7638 static tree
7639 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7641 tree res;
7643 if (!validate_arg (arg, COMPLEX_TYPE)
7644 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7645 return NULL_TREE;
7647 /* Calculate the result when the argument is a constant. */
7648 if (TREE_CODE (arg) == COMPLEX_CST
7649 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7650 type, mpfr_hypot)))
7651 return res;
7653 if (TREE_CODE (arg) == COMPLEX_EXPR)
7655 tree real = TREE_OPERAND (arg, 0);
7656 tree imag = TREE_OPERAND (arg, 1);
7658 /* If either part is zero, cabs is fabs of the other. */
7659 if (real_zerop (real))
7660 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7661 if (real_zerop (imag))
7662 return fold_build1_loc (loc, ABS_EXPR, type, real);
7664 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7665 if (flag_unsafe_math_optimizations
7666 && operand_equal_p (real, imag, OEP_PURE_SAME))
7668 const REAL_VALUE_TYPE sqrt2_trunc
7669 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7670 STRIP_NOPS (real);
7671 return fold_build2_loc (loc, MULT_EXPR, type,
7672 fold_build1_loc (loc, ABS_EXPR, type, real),
7673 build_real (type, sqrt2_trunc));
7677 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7678 if (TREE_CODE (arg) == NEGATE_EXPR
7679 || TREE_CODE (arg) == CONJ_EXPR)
7680 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7682 /* Don't do this when optimizing for size. */
7683 if (flag_unsafe_math_optimizations
7684 && optimize && optimize_function_for_speed_p (cfun))
7686 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7688 if (sqrtfn != NULL_TREE)
7690 tree rpart, ipart, result;
7692 arg = builtin_save_expr (arg);
7694 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7695 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7697 rpart = builtin_save_expr (rpart);
7698 ipart = builtin_save_expr (ipart);
7700 result = fold_build2_loc (loc, PLUS_EXPR, type,
7701 fold_build2_loc (loc, MULT_EXPR, type,
7702 rpart, rpart),
7703 fold_build2_loc (loc, MULT_EXPR, type,
7704 ipart, ipart));
7706 return build_call_expr_loc (loc, sqrtfn, 1, result);
7710 return NULL_TREE;
7713 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7714 complex tree type of the result. If NEG is true, the imaginary
7715 zero is negative. */
7717 static tree
7718 build_complex_cproj (tree type, bool neg)
7720 REAL_VALUE_TYPE rinf, rzero = dconst0;
7722 real_inf (&rinf);
7723 rzero.sign = neg;
7724 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7725 build_real (TREE_TYPE (type), rzero));
7728 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7729 return type. Return NULL_TREE if no simplification can be made. */
7731 static tree
7732 fold_builtin_cproj (location_t loc, tree arg, tree type)
7734 if (!validate_arg (arg, COMPLEX_TYPE)
7735 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7736 return NULL_TREE;
7738 /* If there are no infinities, return arg. */
7739 if (! HONOR_INFINITIES (type))
7740 return non_lvalue_loc (loc, arg);
7742 /* Calculate the result when the argument is a constant. */
7743 if (TREE_CODE (arg) == COMPLEX_CST)
7745 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7746 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7748 if (real_isinf (real) || real_isinf (imag))
7749 return build_complex_cproj (type, imag->sign);
7750 else
7751 return arg;
7753 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7755 tree real = TREE_OPERAND (arg, 0);
7756 tree imag = TREE_OPERAND (arg, 1);
7758 STRIP_NOPS (real);
7759 STRIP_NOPS (imag);
7761 /* If the real part is inf and the imag part is known to be
7762 nonnegative, return (inf + 0i). Remember side-effects are
7763 possible in the imag part. */
7764 if (TREE_CODE (real) == REAL_CST
7765 && real_isinf (TREE_REAL_CST_PTR (real))
7766 && tree_expr_nonnegative_p (imag))
7767 return omit_one_operand_loc (loc, type,
7768 build_complex_cproj (type, false),
7769 arg);
7771 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7772 Remember side-effects are possible in the real part. */
7773 if (TREE_CODE (imag) == REAL_CST
7774 && real_isinf (TREE_REAL_CST_PTR (imag)))
7775 return
7776 omit_one_operand_loc (loc, type,
7777 build_complex_cproj (type, TREE_REAL_CST_PTR
7778 (imag)->sign), arg);
7781 return NULL_TREE;
7784 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7785 Return NULL_TREE if no simplification can be made. */
7787 static tree
7788 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7791 enum built_in_function fcode;
7792 tree res;
7794 if (!validate_arg (arg, REAL_TYPE))
7795 return NULL_TREE;
7797 /* Calculate the result when the argument is a constant. */
7798 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7799 return res;
7801 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7802 fcode = builtin_mathfn_code (arg);
7803 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7805 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7806 arg = fold_build2_loc (loc, MULT_EXPR, type,
7807 CALL_EXPR_ARG (arg, 0),
7808 build_real (type, dconsthalf));
7809 return build_call_expr_loc (loc, expfn, 1, arg);
7812 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7813 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7815 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7817 if (powfn)
7819 tree arg0 = CALL_EXPR_ARG (arg, 0);
7820 tree tree_root;
7821 /* The inner root was either sqrt or cbrt. */
7822 /* This was a conditional expression but it triggered a bug
7823 in Sun C 5.5. */
7824 REAL_VALUE_TYPE dconstroot;
7825 if (BUILTIN_SQRT_P (fcode))
7826 dconstroot = dconsthalf;
7827 else
7828 dconstroot = dconst_third ();
7830 /* Adjust for the outer root. */
7831 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7832 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7833 tree_root = build_real (type, dconstroot);
7834 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7838 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7839 if (flag_unsafe_math_optimizations
7840 && (fcode == BUILT_IN_POW
7841 || fcode == BUILT_IN_POWF
7842 || fcode == BUILT_IN_POWL))
7844 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7845 tree arg0 = CALL_EXPR_ARG (arg, 0);
7846 tree arg1 = CALL_EXPR_ARG (arg, 1);
7847 tree narg1;
7848 if (!tree_expr_nonnegative_p (arg0))
7849 arg0 = build1 (ABS_EXPR, type, arg0);
7850 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7851 build_real (type, dconsthalf));
7852 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7855 return NULL_TREE;
7858 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7859 Return NULL_TREE if no simplification can be made. */
7861 static tree
7862 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7864 const enum built_in_function fcode = builtin_mathfn_code (arg);
7865 tree res;
7867 if (!validate_arg (arg, REAL_TYPE))
7868 return NULL_TREE;
7870 /* Calculate the result when the argument is a constant. */
7871 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7872 return res;
7874 if (flag_unsafe_math_optimizations)
7876 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7877 if (BUILTIN_EXPONENT_P (fcode))
7879 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7880 const REAL_VALUE_TYPE third_trunc =
7881 real_value_truncate (TYPE_MODE (type), dconst_third ());
7882 arg = fold_build2_loc (loc, MULT_EXPR, type,
7883 CALL_EXPR_ARG (arg, 0),
7884 build_real (type, third_trunc));
7885 return build_call_expr_loc (loc, expfn, 1, arg);
7888 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7889 if (BUILTIN_SQRT_P (fcode))
7891 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7893 if (powfn)
7895 tree arg0 = CALL_EXPR_ARG (arg, 0);
7896 tree tree_root;
7897 REAL_VALUE_TYPE dconstroot = dconst_third ();
7899 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7900 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7901 tree_root = build_real (type, dconstroot);
7902 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7906 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7907 if (BUILTIN_CBRT_P (fcode))
7909 tree arg0 = CALL_EXPR_ARG (arg, 0);
7910 if (tree_expr_nonnegative_p (arg0))
7912 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7914 if (powfn)
7916 tree tree_root;
7917 REAL_VALUE_TYPE dconstroot;
7919 real_arithmetic (&dconstroot, MULT_EXPR,
7920 dconst_third_ptr (), dconst_third_ptr ());
7921 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7922 tree_root = build_real (type, dconstroot);
7923 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7928 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7929 if (fcode == BUILT_IN_POW
7930 || fcode == BUILT_IN_POWF
7931 || fcode == BUILT_IN_POWL)
7933 tree arg00 = CALL_EXPR_ARG (arg, 0);
7934 tree arg01 = CALL_EXPR_ARG (arg, 1);
7935 if (tree_expr_nonnegative_p (arg00))
7937 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7938 const REAL_VALUE_TYPE dconstroot
7939 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7940 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7941 build_real (type, dconstroot));
7942 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7946 return NULL_TREE;
7949 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7950 TYPE is the type of the return value. Return NULL_TREE if no
7951 simplification can be made. */
7953 static tree
7954 fold_builtin_cos (location_t loc,
7955 tree arg, tree type, tree fndecl)
7957 tree res, narg;
7959 if (!validate_arg (arg, REAL_TYPE))
7960 return NULL_TREE;
7962 /* Calculate the result when the argument is a constant. */
7963 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7964 return res;
7966 /* Optimize cos(-x) into cos (x). */
7967 if ((narg = fold_strip_sign_ops (arg)))
7968 return build_call_expr_loc (loc, fndecl, 1, narg);
7970 return NULL_TREE;
7973 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7974 Return NULL_TREE if no simplification can be made. */
7976 static tree
7977 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7979 if (validate_arg (arg, REAL_TYPE))
7981 tree res, narg;
7983 /* Calculate the result when the argument is a constant. */
7984 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7985 return res;
7987 /* Optimize cosh(-x) into cosh (x). */
7988 if ((narg = fold_strip_sign_ops (arg)))
7989 return build_call_expr_loc (loc, fndecl, 1, narg);
7992 return NULL_TREE;
7995 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7996 argument ARG. TYPE is the type of the return value. Return
7997 NULL_TREE if no simplification can be made. */
7999 static tree
8000 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8001 bool hyper)
8003 if (validate_arg (arg, COMPLEX_TYPE)
8004 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8006 tree tmp;
8008 /* Calculate the result when the argument is a constant. */
8009 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8010 return tmp;
8012 /* Optimize fn(-x) into fn(x). */
8013 if ((tmp = fold_strip_sign_ops (arg)))
8014 return build_call_expr_loc (loc, fndecl, 1, tmp);
8017 return NULL_TREE;
8020 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8021 Return NULL_TREE if no simplification can be made. */
8023 static tree
8024 fold_builtin_tan (tree arg, tree type)
8026 enum built_in_function fcode;
8027 tree res;
8029 if (!validate_arg (arg, REAL_TYPE))
8030 return NULL_TREE;
8032 /* Calculate the result when the argument is a constant. */
8033 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8034 return res;
8036 /* Optimize tan(atan(x)) = x. */
8037 fcode = builtin_mathfn_code (arg);
8038 if (flag_unsafe_math_optimizations
8039 && (fcode == BUILT_IN_ATAN
8040 || fcode == BUILT_IN_ATANF
8041 || fcode == BUILT_IN_ATANL))
8042 return CALL_EXPR_ARG (arg, 0);
8044 return NULL_TREE;
8047 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8048 NULL_TREE if no simplification can be made. */
8050 static tree
8051 fold_builtin_sincos (location_t loc,
8052 tree arg0, tree arg1, tree arg2)
8054 tree type;
8055 tree res, fn, call;
8057 if (!validate_arg (arg0, REAL_TYPE)
8058 || !validate_arg (arg1, POINTER_TYPE)
8059 || !validate_arg (arg2, POINTER_TYPE))
8060 return NULL_TREE;
8062 type = TREE_TYPE (arg0);
8064 /* Calculate the result when the argument is a constant. */
8065 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8066 return res;
8068 /* Canonicalize sincos to cexpi. */
8069 if (!targetm.libc_has_function (function_c99_math_complex))
8070 return NULL_TREE;
8071 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8072 if (!fn)
8073 return NULL_TREE;
8075 call = build_call_expr_loc (loc, fn, 1, arg0);
8076 call = builtin_save_expr (call);
8078 return build2 (COMPOUND_EXPR, void_type_node,
8079 build2 (MODIFY_EXPR, void_type_node,
8080 build_fold_indirect_ref_loc (loc, arg1),
8081 build1 (IMAGPART_EXPR, type, call)),
8082 build2 (MODIFY_EXPR, void_type_node,
8083 build_fold_indirect_ref_loc (loc, arg2),
8084 build1 (REALPART_EXPR, type, call)));
8087 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8088 NULL_TREE if no simplification can be made. */
8090 static tree
8091 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8093 tree rtype;
8094 tree realp, imagp, ifn;
8095 tree res;
8097 if (!validate_arg (arg0, COMPLEX_TYPE)
8098 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8099 return NULL_TREE;
8101 /* Calculate the result when the argument is a constant. */
8102 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8103 return res;
8105 rtype = TREE_TYPE (TREE_TYPE (arg0));
8107 /* In case we can figure out the real part of arg0 and it is constant zero
8108 fold to cexpi. */
8109 if (!targetm.libc_has_function (function_c99_math_complex))
8110 return NULL_TREE;
8111 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8112 if (!ifn)
8113 return NULL_TREE;
8115 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8116 && real_zerop (realp))
8118 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8119 return build_call_expr_loc (loc, ifn, 1, narg);
8122 /* In case we can easily decompose real and imaginary parts split cexp
8123 to exp (r) * cexpi (i). */
8124 if (flag_unsafe_math_optimizations
8125 && realp)
8127 tree rfn, rcall, icall;
8129 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8130 if (!rfn)
8131 return NULL_TREE;
8133 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8134 if (!imagp)
8135 return NULL_TREE;
8137 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8138 icall = builtin_save_expr (icall);
8139 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8140 rcall = builtin_save_expr (rcall);
8141 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8142 fold_build2_loc (loc, MULT_EXPR, rtype,
8143 rcall,
8144 fold_build1_loc (loc, REALPART_EXPR,
8145 rtype, icall)),
8146 fold_build2_loc (loc, MULT_EXPR, rtype,
8147 rcall,
8148 fold_build1_loc (loc, IMAGPART_EXPR,
8149 rtype, icall)));
8152 return NULL_TREE;
8155 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8156 Return NULL_TREE if no simplification can be made. */
8158 static tree
8159 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8161 if (!validate_arg (arg, REAL_TYPE))
8162 return NULL_TREE;
8164 /* Optimize trunc of constant value. */
8165 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8167 REAL_VALUE_TYPE r, x;
8168 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8170 x = TREE_REAL_CST (arg);
8171 real_trunc (&r, TYPE_MODE (type), &x);
8172 return build_real (type, r);
8175 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8178 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8179 Return NULL_TREE if no simplification can be made. */
8181 static tree
8182 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8184 if (!validate_arg (arg, REAL_TYPE))
8185 return NULL_TREE;
8187 /* Optimize floor of constant value. */
8188 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8190 REAL_VALUE_TYPE x;
8192 x = TREE_REAL_CST (arg);
8193 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8195 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8196 REAL_VALUE_TYPE r;
8198 real_floor (&r, TYPE_MODE (type), &x);
8199 return build_real (type, r);
8203 /* Fold floor (x) where x is nonnegative to trunc (x). */
8204 if (tree_expr_nonnegative_p (arg))
8206 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8207 if (truncfn)
8208 return build_call_expr_loc (loc, truncfn, 1, arg);
8211 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8214 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8215 Return NULL_TREE if no simplification can be made. */
8217 static tree
8218 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8220 if (!validate_arg (arg, REAL_TYPE))
8221 return NULL_TREE;
8223 /* Optimize ceil of constant value. */
8224 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8226 REAL_VALUE_TYPE x;
8228 x = TREE_REAL_CST (arg);
8229 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8231 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8232 REAL_VALUE_TYPE r;
8234 real_ceil (&r, TYPE_MODE (type), &x);
8235 return build_real (type, r);
8239 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8242 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8243 Return NULL_TREE if no simplification can be made. */
8245 static tree
8246 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8248 if (!validate_arg (arg, REAL_TYPE))
8249 return NULL_TREE;
8251 /* Optimize round of constant value. */
8252 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8254 REAL_VALUE_TYPE x;
8256 x = TREE_REAL_CST (arg);
8257 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8260 REAL_VALUE_TYPE r;
8262 real_round (&r, TYPE_MODE (type), &x);
8263 return build_real (type, r);
8267 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8270 /* Fold function call to builtin lround, lroundf or lroundl (or the
8271 corresponding long long versions) and other rounding functions. ARG
8272 is the argument to the call. Return NULL_TREE if no simplification
8273 can be made. */
8275 static tree
8276 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8278 if (!validate_arg (arg, REAL_TYPE))
8279 return NULL_TREE;
8281 /* Optimize lround of constant value. */
8282 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8284 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8286 if (real_isfinite (&x))
8288 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8289 tree ftype = TREE_TYPE (arg);
8290 REAL_VALUE_TYPE r;
8291 bool fail = false;
8293 switch (DECL_FUNCTION_CODE (fndecl))
8295 CASE_FLT_FN (BUILT_IN_IFLOOR):
8296 CASE_FLT_FN (BUILT_IN_LFLOOR):
8297 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8298 real_floor (&r, TYPE_MODE (ftype), &x);
8299 break;
8301 CASE_FLT_FN (BUILT_IN_ICEIL):
8302 CASE_FLT_FN (BUILT_IN_LCEIL):
8303 CASE_FLT_FN (BUILT_IN_LLCEIL):
8304 real_ceil (&r, TYPE_MODE (ftype), &x);
8305 break;
8307 CASE_FLT_FN (BUILT_IN_IROUND):
8308 CASE_FLT_FN (BUILT_IN_LROUND):
8309 CASE_FLT_FN (BUILT_IN_LLROUND):
8310 real_round (&r, TYPE_MODE (ftype), &x);
8311 break;
8313 default:
8314 gcc_unreachable ();
8317 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8318 if (!fail)
8319 return wide_int_to_tree (itype, val);
8323 switch (DECL_FUNCTION_CODE (fndecl))
8325 CASE_FLT_FN (BUILT_IN_LFLOOR):
8326 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8327 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8328 if (tree_expr_nonnegative_p (arg))
8329 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8330 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8331 break;
8332 default:;
8335 return fold_fixed_mathfn (loc, fndecl, arg);
8338 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8339 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8340 the argument to the call. Return NULL_TREE if no simplification can
8341 be made. */
8343 static tree
8344 fold_builtin_bitop (tree fndecl, tree arg)
8346 if (!validate_arg (arg, INTEGER_TYPE))
8347 return NULL_TREE;
8349 /* Optimize for constant argument. */
8350 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8352 tree type = TREE_TYPE (arg);
8353 int result;
8355 switch (DECL_FUNCTION_CODE (fndecl))
8357 CASE_INT_FN (BUILT_IN_FFS):
8358 result = wi::ffs (arg);
8359 break;
8361 CASE_INT_FN (BUILT_IN_CLZ):
8362 if (wi::ne_p (arg, 0))
8363 result = wi::clz (arg);
8364 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8365 result = TYPE_PRECISION (type);
8366 break;
8368 CASE_INT_FN (BUILT_IN_CTZ):
8369 if (wi::ne_p (arg, 0))
8370 result = wi::ctz (arg);
8371 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8372 result = TYPE_PRECISION (type);
8373 break;
8375 CASE_INT_FN (BUILT_IN_CLRSB):
8376 result = wi::clrsb (arg);
8377 break;
8379 CASE_INT_FN (BUILT_IN_POPCOUNT):
8380 result = wi::popcount (arg);
8381 break;
8383 CASE_INT_FN (BUILT_IN_PARITY):
8384 result = wi::parity (arg);
8385 break;
8387 default:
8388 gcc_unreachable ();
8391 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8394 return NULL_TREE;
8397 /* Fold function call to builtin_bswap and the short, long and long long
8398 variants. Return NULL_TREE if no simplification can be made. */
8399 static tree
8400 fold_builtin_bswap (tree fndecl, tree arg)
8402 if (! validate_arg (arg, INTEGER_TYPE))
8403 return NULL_TREE;
8405 /* Optimize constant value. */
8406 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8408 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8410 switch (DECL_FUNCTION_CODE (fndecl))
8412 case BUILT_IN_BSWAP16:
8413 case BUILT_IN_BSWAP32:
8414 case BUILT_IN_BSWAP64:
8416 signop sgn = TYPE_SIGN (type);
8417 tree result =
8418 wide_int_to_tree (type,
8419 wide_int::from (arg, TYPE_PRECISION (type),
8420 sgn).bswap ());
8421 return result;
8423 default:
8424 gcc_unreachable ();
8428 return NULL_TREE;
8431 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8432 NULL_TREE if no simplification can be made. */
8434 static tree
8435 fold_builtin_hypot (location_t loc, tree fndecl,
8436 tree arg0, tree arg1, tree type)
8438 tree res, narg0, narg1;
8440 if (!validate_arg (arg0, REAL_TYPE)
8441 || !validate_arg (arg1, REAL_TYPE))
8442 return NULL_TREE;
8444 /* Calculate the result when the argument is a constant. */
8445 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8446 return res;
8448 /* If either argument to hypot has a negate or abs, strip that off.
8449 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8450 narg0 = fold_strip_sign_ops (arg0);
8451 narg1 = fold_strip_sign_ops (arg1);
8452 if (narg0 || narg1)
8454 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8455 narg1 ? narg1 : arg1);
8458 /* If either argument is zero, hypot is fabs of the other. */
8459 if (real_zerop (arg0))
8460 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8461 else if (real_zerop (arg1))
8462 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8464 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8465 if (flag_unsafe_math_optimizations
8466 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8468 const REAL_VALUE_TYPE sqrt2_trunc
8469 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8470 return fold_build2_loc (loc, MULT_EXPR, type,
8471 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8472 build_real (type, sqrt2_trunc));
8475 return NULL_TREE;
8479 /* Fold a builtin function call to pow, powf, or powl. Return
8480 NULL_TREE if no simplification can be made. */
8481 static tree
8482 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8484 tree res;
8486 if (!validate_arg (arg0, REAL_TYPE)
8487 || !validate_arg (arg1, REAL_TYPE))
8488 return NULL_TREE;
8490 /* Calculate the result when the argument is a constant. */
8491 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8492 return res;
8494 /* Optimize pow(1.0,y) = 1.0. */
8495 if (real_onep (arg0))
8496 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8498 if (TREE_CODE (arg1) == REAL_CST
8499 && !TREE_OVERFLOW (arg1))
8501 REAL_VALUE_TYPE cint;
8502 REAL_VALUE_TYPE c;
8503 HOST_WIDE_INT n;
8505 c = TREE_REAL_CST (arg1);
8507 /* Optimize pow(x,0.0) = 1.0. */
8508 if (REAL_VALUES_EQUAL (c, dconst0))
8509 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8510 arg0);
8512 /* Optimize pow(x,1.0) = x. */
8513 if (REAL_VALUES_EQUAL (c, dconst1))
8514 return arg0;
8516 /* Optimize pow(x,-1.0) = 1.0/x. */
8517 if (REAL_VALUES_EQUAL (c, dconstm1))
8518 return fold_build2_loc (loc, RDIV_EXPR, type,
8519 build_real (type, dconst1), arg0);
8521 /* Optimize pow(x,0.5) = sqrt(x). */
8522 if (flag_unsafe_math_optimizations
8523 && REAL_VALUES_EQUAL (c, dconsthalf))
8525 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8527 if (sqrtfn != NULL_TREE)
8528 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8531 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8532 if (flag_unsafe_math_optimizations)
8534 const REAL_VALUE_TYPE dconstroot
8535 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8537 if (REAL_VALUES_EQUAL (c, dconstroot))
8539 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8540 if (cbrtfn != NULL_TREE)
8541 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8545 /* Check for an integer exponent. */
8546 n = real_to_integer (&c);
8547 real_from_integer (&cint, VOIDmode, n, SIGNED);
8548 if (real_identical (&c, &cint))
8550 /* Attempt to evaluate pow at compile-time, unless this should
8551 raise an exception. */
8552 if (TREE_CODE (arg0) == REAL_CST
8553 && !TREE_OVERFLOW (arg0)
8554 && (n > 0
8555 || (!flag_trapping_math && !flag_errno_math)
8556 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8558 REAL_VALUE_TYPE x;
8559 bool inexact;
8561 x = TREE_REAL_CST (arg0);
8562 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8563 if (flag_unsafe_math_optimizations || !inexact)
8564 return build_real (type, x);
8567 /* Strip sign ops from even integer powers. */
8568 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8570 tree narg0 = fold_strip_sign_ops (arg0);
8571 if (narg0)
8572 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8577 if (flag_unsafe_math_optimizations)
8579 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8581 /* Optimize pow(expN(x),y) = expN(x*y). */
8582 if (BUILTIN_EXPONENT_P (fcode))
8584 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8585 tree arg = CALL_EXPR_ARG (arg0, 0);
8586 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8587 return build_call_expr_loc (loc, expfn, 1, arg);
8590 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8591 if (BUILTIN_SQRT_P (fcode))
8593 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8594 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8595 build_real (type, dconsthalf));
8596 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8599 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8600 if (BUILTIN_CBRT_P (fcode))
8602 tree arg = CALL_EXPR_ARG (arg0, 0);
8603 if (tree_expr_nonnegative_p (arg))
8605 const REAL_VALUE_TYPE dconstroot
8606 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8607 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8608 build_real (type, dconstroot));
8609 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8613 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8614 if (fcode == BUILT_IN_POW
8615 || fcode == BUILT_IN_POWF
8616 || fcode == BUILT_IN_POWL)
8618 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8619 if (tree_expr_nonnegative_p (arg00))
8621 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8622 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8623 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8628 return NULL_TREE;
8631 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8632 Return NULL_TREE if no simplification can be made. */
8633 static tree
8634 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8635 tree arg0, tree arg1, tree type)
8637 if (!validate_arg (arg0, REAL_TYPE)
8638 || !validate_arg (arg1, INTEGER_TYPE))
8639 return NULL_TREE;
8641 /* Optimize pow(1.0,y) = 1.0. */
8642 if (real_onep (arg0))
8643 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8645 if (tree_fits_shwi_p (arg1))
8647 HOST_WIDE_INT c = tree_to_shwi (arg1);
8649 /* Evaluate powi at compile-time. */
8650 if (TREE_CODE (arg0) == REAL_CST
8651 && !TREE_OVERFLOW (arg0))
8653 REAL_VALUE_TYPE x;
8654 x = TREE_REAL_CST (arg0);
8655 real_powi (&x, TYPE_MODE (type), &x, c);
8656 return build_real (type, x);
8659 /* Optimize pow(x,0) = 1.0. */
8660 if (c == 0)
8661 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8662 arg0);
8664 /* Optimize pow(x,1) = x. */
8665 if (c == 1)
8666 return arg0;
8668 /* Optimize pow(x,-1) = 1.0/x. */
8669 if (c == -1)
8670 return fold_build2_loc (loc, RDIV_EXPR, type,
8671 build_real (type, dconst1), arg0);
8674 return NULL_TREE;
8677 /* A subroutine of fold_builtin to fold the various exponent
8678 functions. Return NULL_TREE if no simplification can be made.
8679 FUNC is the corresponding MPFR exponent function. */
8681 static tree
8682 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8683 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8685 if (validate_arg (arg, REAL_TYPE))
8687 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8688 tree res;
8690 /* Calculate the result when the argument is a constant. */
8691 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8692 return res;
8694 /* Optimize expN(logN(x)) = x. */
8695 if (flag_unsafe_math_optimizations)
8697 const enum built_in_function fcode = builtin_mathfn_code (arg);
8699 if ((func == mpfr_exp
8700 && (fcode == BUILT_IN_LOG
8701 || fcode == BUILT_IN_LOGF
8702 || fcode == BUILT_IN_LOGL))
8703 || (func == mpfr_exp2
8704 && (fcode == BUILT_IN_LOG2
8705 || fcode == BUILT_IN_LOG2F
8706 || fcode == BUILT_IN_LOG2L))
8707 || (func == mpfr_exp10
8708 && (fcode == BUILT_IN_LOG10
8709 || fcode == BUILT_IN_LOG10F
8710 || fcode == BUILT_IN_LOG10L)))
8711 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8715 return NULL_TREE;
8718 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8719 arguments to the call, and TYPE is its return type.
8720 Return NULL_TREE if no simplification can be made. */
8722 static tree
8723 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8725 if (!validate_arg (arg1, POINTER_TYPE)
8726 || !validate_arg (arg2, INTEGER_TYPE)
8727 || !validate_arg (len, INTEGER_TYPE))
8728 return NULL_TREE;
8729 else
8731 const char *p1;
8733 if (TREE_CODE (arg2) != INTEGER_CST
8734 || !tree_fits_uhwi_p (len))
8735 return NULL_TREE;
8737 p1 = c_getstr (arg1);
8738 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8740 char c;
8741 const char *r;
8742 tree tem;
8744 if (target_char_cast (arg2, &c))
8745 return NULL_TREE;
8747 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8749 if (r == NULL)
8750 return build_int_cst (TREE_TYPE (arg1), 0);
8752 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8753 return fold_convert_loc (loc, type, tem);
8755 return NULL_TREE;
8759 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8760 Return NULL_TREE if no simplification can be made. */
8762 static tree
8763 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8765 const char *p1, *p2;
8767 if (!validate_arg (arg1, POINTER_TYPE)
8768 || !validate_arg (arg2, POINTER_TYPE)
8769 || !validate_arg (len, INTEGER_TYPE))
8770 return NULL_TREE;
8772 /* If the LEN parameter is zero, return zero. */
8773 if (integer_zerop (len))
8774 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8775 arg1, arg2);
8777 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8778 if (operand_equal_p (arg1, arg2, 0))
8779 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8781 p1 = c_getstr (arg1);
8782 p2 = c_getstr (arg2);
8784 /* If all arguments are constant, and the value of len is not greater
8785 than the lengths of arg1 and arg2, evaluate at compile-time. */
8786 if (tree_fits_uhwi_p (len) && p1 && p2
8787 && compare_tree_int (len, strlen (p1) + 1) <= 0
8788 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8790 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8792 if (r > 0)
8793 return integer_one_node;
8794 else if (r < 0)
8795 return integer_minus_one_node;
8796 else
8797 return integer_zero_node;
8800 /* If len parameter is one, return an expression corresponding to
8801 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8802 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8804 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8805 tree cst_uchar_ptr_node
8806 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8808 tree ind1
8809 = fold_convert_loc (loc, integer_type_node,
8810 build1 (INDIRECT_REF, cst_uchar_node,
8811 fold_convert_loc (loc,
8812 cst_uchar_ptr_node,
8813 arg1)));
8814 tree ind2
8815 = fold_convert_loc (loc, integer_type_node,
8816 build1 (INDIRECT_REF, cst_uchar_node,
8817 fold_convert_loc (loc,
8818 cst_uchar_ptr_node,
8819 arg2)));
8820 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8823 return NULL_TREE;
8826 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8827 Return NULL_TREE if no simplification can be made. */
8829 static tree
8830 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8832 const char *p1, *p2;
8834 if (!validate_arg (arg1, POINTER_TYPE)
8835 || !validate_arg (arg2, POINTER_TYPE))
8836 return NULL_TREE;
8838 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8839 if (operand_equal_p (arg1, arg2, 0))
8840 return integer_zero_node;
8842 p1 = c_getstr (arg1);
8843 p2 = c_getstr (arg2);
8845 if (p1 && p2)
8847 const int i = strcmp (p1, p2);
8848 if (i < 0)
8849 return integer_minus_one_node;
8850 else if (i > 0)
8851 return integer_one_node;
8852 else
8853 return integer_zero_node;
8856 /* If the second arg is "", return *(const unsigned char*)arg1. */
8857 if (p2 && *p2 == '\0')
8859 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8860 tree cst_uchar_ptr_node
8861 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8863 return fold_convert_loc (loc, integer_type_node,
8864 build1 (INDIRECT_REF, cst_uchar_node,
8865 fold_convert_loc (loc,
8866 cst_uchar_ptr_node,
8867 arg1)));
8870 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8871 if (p1 && *p1 == '\0')
8873 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8874 tree cst_uchar_ptr_node
8875 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8877 tree temp
8878 = fold_convert_loc (loc, integer_type_node,
8879 build1 (INDIRECT_REF, cst_uchar_node,
8880 fold_convert_loc (loc,
8881 cst_uchar_ptr_node,
8882 arg2)));
8883 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8886 return NULL_TREE;
8889 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8890 Return NULL_TREE if no simplification can be made. */
8892 static tree
8893 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8895 const char *p1, *p2;
8897 if (!validate_arg (arg1, POINTER_TYPE)
8898 || !validate_arg (arg2, POINTER_TYPE)
8899 || !validate_arg (len, INTEGER_TYPE))
8900 return NULL_TREE;
8902 /* If the LEN parameter is zero, return zero. */
8903 if (integer_zerop (len))
8904 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8905 arg1, arg2);
8907 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8908 if (operand_equal_p (arg1, arg2, 0))
8909 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8911 p1 = c_getstr (arg1);
8912 p2 = c_getstr (arg2);
8914 if (tree_fits_uhwi_p (len) && p1 && p2)
8916 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8917 if (i > 0)
8918 return integer_one_node;
8919 else if (i < 0)
8920 return integer_minus_one_node;
8921 else
8922 return integer_zero_node;
8925 /* If the second arg is "", and the length is greater than zero,
8926 return *(const unsigned char*)arg1. */
8927 if (p2 && *p2 == '\0'
8928 && TREE_CODE (len) == INTEGER_CST
8929 && tree_int_cst_sgn (len) == 1)
8931 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8932 tree cst_uchar_ptr_node
8933 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8935 return fold_convert_loc (loc, integer_type_node,
8936 build1 (INDIRECT_REF, cst_uchar_node,
8937 fold_convert_loc (loc,
8938 cst_uchar_ptr_node,
8939 arg1)));
8942 /* If the first arg is "", and the length is greater than zero,
8943 return -*(const unsigned char*)arg2. */
8944 if (p1 && *p1 == '\0'
8945 && TREE_CODE (len) == INTEGER_CST
8946 && tree_int_cst_sgn (len) == 1)
8948 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8949 tree cst_uchar_ptr_node
8950 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8952 tree temp = fold_convert_loc (loc, integer_type_node,
8953 build1 (INDIRECT_REF, cst_uchar_node,
8954 fold_convert_loc (loc,
8955 cst_uchar_ptr_node,
8956 arg2)));
8957 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8960 /* If len parameter is one, return an expression corresponding to
8961 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8962 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8964 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8965 tree cst_uchar_ptr_node
8966 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8968 tree ind1 = fold_convert_loc (loc, integer_type_node,
8969 build1 (INDIRECT_REF, cst_uchar_node,
8970 fold_convert_loc (loc,
8971 cst_uchar_ptr_node,
8972 arg1)));
8973 tree ind2 = fold_convert_loc (loc, integer_type_node,
8974 build1 (INDIRECT_REF, cst_uchar_node,
8975 fold_convert_loc (loc,
8976 cst_uchar_ptr_node,
8977 arg2)));
8978 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8981 return NULL_TREE;
8984 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8985 ARG. Return NULL_TREE if no simplification can be made. */
8987 static tree
8988 fold_builtin_signbit (location_t loc, tree arg, tree type)
8990 if (!validate_arg (arg, REAL_TYPE))
8991 return NULL_TREE;
8993 /* If ARG is a compile-time constant, determine the result. */
8994 if (TREE_CODE (arg) == REAL_CST
8995 && !TREE_OVERFLOW (arg))
8997 REAL_VALUE_TYPE c;
8999 c = TREE_REAL_CST (arg);
9000 return (REAL_VALUE_NEGATIVE (c)
9001 ? build_one_cst (type)
9002 : build_zero_cst (type));
9005 /* If ARG is non-negative, the result is always zero. */
9006 if (tree_expr_nonnegative_p (arg))
9007 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9009 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9010 if (!HONOR_SIGNED_ZEROS (arg))
9011 return fold_convert (type,
9012 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9013 build_real (TREE_TYPE (arg), dconst0)));
9015 return NULL_TREE;
9018 /* Fold function call to builtin copysign, copysignf or copysignl with
9019 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9020 be made. */
9022 static tree
9023 fold_builtin_copysign (location_t loc, tree fndecl,
9024 tree arg1, tree arg2, tree type)
9026 tree tem;
9028 if (!validate_arg (arg1, REAL_TYPE)
9029 || !validate_arg (arg2, REAL_TYPE))
9030 return NULL_TREE;
9032 /* copysign(X,X) is X. */
9033 if (operand_equal_p (arg1, arg2, 0))
9034 return fold_convert_loc (loc, type, arg1);
9036 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9037 if (TREE_CODE (arg1) == REAL_CST
9038 && TREE_CODE (arg2) == REAL_CST
9039 && !TREE_OVERFLOW (arg1)
9040 && !TREE_OVERFLOW (arg2))
9042 REAL_VALUE_TYPE c1, c2;
9044 c1 = TREE_REAL_CST (arg1);
9045 c2 = TREE_REAL_CST (arg2);
9046 /* c1.sign := c2.sign. */
9047 real_copysign (&c1, &c2);
9048 return build_real (type, c1);
9051 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9052 Remember to evaluate Y for side-effects. */
9053 if (tree_expr_nonnegative_p (arg2))
9054 return omit_one_operand_loc (loc, type,
9055 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9056 arg2);
9058 /* Strip sign changing operations for the first argument. */
9059 tem = fold_strip_sign_ops (arg1);
9060 if (tem)
9061 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9063 return NULL_TREE;
9066 /* Fold a call to builtin isascii with argument ARG. */
9068 static tree
9069 fold_builtin_isascii (location_t loc, tree arg)
9071 if (!validate_arg (arg, INTEGER_TYPE))
9072 return NULL_TREE;
9073 else
9075 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9076 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9077 build_int_cst (integer_type_node,
9078 ~ (unsigned HOST_WIDE_INT) 0x7f));
9079 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9080 arg, integer_zero_node);
9084 /* Fold a call to builtin toascii with argument ARG. */
9086 static tree
9087 fold_builtin_toascii (location_t loc, tree arg)
9089 if (!validate_arg (arg, INTEGER_TYPE))
9090 return NULL_TREE;
9092 /* Transform toascii(c) -> (c & 0x7f). */
9093 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9094 build_int_cst (integer_type_node, 0x7f));
9097 /* Fold a call to builtin isdigit with argument ARG. */
9099 static tree
9100 fold_builtin_isdigit (location_t loc, tree arg)
9102 if (!validate_arg (arg, INTEGER_TYPE))
9103 return NULL_TREE;
9104 else
9106 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9107 /* According to the C standard, isdigit is unaffected by locale.
9108 However, it definitely is affected by the target character set. */
9109 unsigned HOST_WIDE_INT target_digit0
9110 = lang_hooks.to_target_charset ('0');
9112 if (target_digit0 == 0)
9113 return NULL_TREE;
9115 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9116 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9117 build_int_cst (unsigned_type_node, target_digit0));
9118 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9119 build_int_cst (unsigned_type_node, 9));
9123 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9125 static tree
9126 fold_builtin_fabs (location_t loc, tree arg, tree type)
9128 if (!validate_arg (arg, REAL_TYPE))
9129 return NULL_TREE;
9131 arg = fold_convert_loc (loc, type, arg);
9132 if (TREE_CODE (arg) == REAL_CST)
9133 return fold_abs_const (arg, type);
9134 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9137 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9139 static tree
9140 fold_builtin_abs (location_t loc, tree arg, tree type)
9142 if (!validate_arg (arg, INTEGER_TYPE))
9143 return NULL_TREE;
9145 arg = fold_convert_loc (loc, type, arg);
9146 if (TREE_CODE (arg) == INTEGER_CST)
9147 return fold_abs_const (arg, type);
9148 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9151 /* Fold a fma operation with arguments ARG[012]. */
9153 tree
9154 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9155 tree type, tree arg0, tree arg1, tree arg2)
9157 if (TREE_CODE (arg0) == REAL_CST
9158 && TREE_CODE (arg1) == REAL_CST
9159 && TREE_CODE (arg2) == REAL_CST)
9160 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9162 return NULL_TREE;
9165 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9167 static tree
9168 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9170 if (validate_arg (arg0, REAL_TYPE)
9171 && validate_arg (arg1, REAL_TYPE)
9172 && validate_arg (arg2, REAL_TYPE))
9174 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9175 if (tem)
9176 return tem;
9178 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9179 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9180 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9182 return NULL_TREE;
9185 /* Fold a call to builtin fmin or fmax. */
9187 static tree
9188 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9189 tree type, bool max)
9191 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9193 /* Calculate the result when the argument is a constant. */
9194 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9196 if (res)
9197 return res;
9199 /* If either argument is NaN, return the other one. Avoid the
9200 transformation if we get (and honor) a signalling NaN. Using
9201 omit_one_operand() ensures we create a non-lvalue. */
9202 if (TREE_CODE (arg0) == REAL_CST
9203 && real_isnan (&TREE_REAL_CST (arg0))
9204 && (! HONOR_SNANS (arg0)
9205 || ! TREE_REAL_CST (arg0).signalling))
9206 return omit_one_operand_loc (loc, type, arg1, arg0);
9207 if (TREE_CODE (arg1) == REAL_CST
9208 && real_isnan (&TREE_REAL_CST (arg1))
9209 && (! HONOR_SNANS (arg1)
9210 || ! TREE_REAL_CST (arg1).signalling))
9211 return omit_one_operand_loc (loc, type, arg0, arg1);
9213 /* Transform fmin/fmax(x,x) -> x. */
9214 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9215 return omit_one_operand_loc (loc, type, arg0, arg1);
9217 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9218 functions to return the numeric arg if the other one is NaN.
9219 These tree codes don't honor that, so only transform if
9220 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9221 handled, so we don't have to worry about it either. */
9222 if (flag_finite_math_only)
9223 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9224 fold_convert_loc (loc, type, arg0),
9225 fold_convert_loc (loc, type, arg1));
9227 return NULL_TREE;
9230 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9232 static tree
9233 fold_builtin_carg (location_t loc, tree arg, tree type)
9235 if (validate_arg (arg, COMPLEX_TYPE)
9236 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9238 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9240 if (atan2_fn)
9242 tree new_arg = builtin_save_expr (arg);
9243 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9244 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9245 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9249 return NULL_TREE;
9252 /* Fold a call to builtin logb/ilogb. */
9254 static tree
9255 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9257 if (! validate_arg (arg, REAL_TYPE))
9258 return NULL_TREE;
9260 STRIP_NOPS (arg);
9262 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9264 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9266 switch (value->cl)
9268 case rvc_nan:
9269 case rvc_inf:
9270 /* If arg is Inf or NaN and we're logb, return it. */
9271 if (TREE_CODE (rettype) == REAL_TYPE)
9273 /* For logb(-Inf) we have to return +Inf. */
9274 if (real_isinf (value) && real_isneg (value))
9276 REAL_VALUE_TYPE tem;
9277 real_inf (&tem);
9278 return build_real (rettype, tem);
9280 return fold_convert_loc (loc, rettype, arg);
9282 /* Fall through... */
9283 case rvc_zero:
9284 /* Zero may set errno and/or raise an exception for logb, also
9285 for ilogb we don't know FP_ILOGB0. */
9286 return NULL_TREE;
9287 case rvc_normal:
9288 /* For normal numbers, proceed iff radix == 2. In GCC,
9289 normalized significands are in the range [0.5, 1.0). We
9290 want the exponent as if they were [1.0, 2.0) so get the
9291 exponent and subtract 1. */
9292 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9293 return fold_convert_loc (loc, rettype,
9294 build_int_cst (integer_type_node,
9295 REAL_EXP (value)-1));
9296 break;
9300 return NULL_TREE;
9303 /* Fold a call to builtin significand, if radix == 2. */
9305 static tree
9306 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9308 if (! validate_arg (arg, REAL_TYPE))
9309 return NULL_TREE;
9311 STRIP_NOPS (arg);
9313 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9315 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9317 switch (value->cl)
9319 case rvc_zero:
9320 case rvc_nan:
9321 case rvc_inf:
9322 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9323 return fold_convert_loc (loc, rettype, arg);
9324 case rvc_normal:
9325 /* For normal numbers, proceed iff radix == 2. */
9326 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9328 REAL_VALUE_TYPE result = *value;
9329 /* In GCC, normalized significands are in the range [0.5,
9330 1.0). We want them to be [1.0, 2.0) so set the
9331 exponent to 1. */
9332 SET_REAL_EXP (&result, 1);
9333 return build_real (rettype, result);
9335 break;
9339 return NULL_TREE;
9342 /* Fold a call to builtin frexp, we can assume the base is 2. */
9344 static tree
9345 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9347 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9348 return NULL_TREE;
9350 STRIP_NOPS (arg0);
9352 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9353 return NULL_TREE;
9355 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9357 /* Proceed if a valid pointer type was passed in. */
9358 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9360 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9361 tree frac, exp;
9363 switch (value->cl)
9365 case rvc_zero:
9366 /* For +-0, return (*exp = 0, +-0). */
9367 exp = integer_zero_node;
9368 frac = arg0;
9369 break;
9370 case rvc_nan:
9371 case rvc_inf:
9372 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9373 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9374 case rvc_normal:
9376 /* Since the frexp function always expects base 2, and in
9377 GCC normalized significands are already in the range
9378 [0.5, 1.0), we have exactly what frexp wants. */
9379 REAL_VALUE_TYPE frac_rvt = *value;
9380 SET_REAL_EXP (&frac_rvt, 0);
9381 frac = build_real (rettype, frac_rvt);
9382 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9384 break;
9385 default:
9386 gcc_unreachable ();
9389 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9390 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9391 TREE_SIDE_EFFECTS (arg1) = 1;
9392 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9395 return NULL_TREE;
9398 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9399 then we can assume the base is two. If it's false, then we have to
9400 check the mode of the TYPE parameter in certain cases. */
9402 static tree
9403 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9404 tree type, bool ldexp)
9406 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9408 STRIP_NOPS (arg0);
9409 STRIP_NOPS (arg1);
9411 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9412 if (real_zerop (arg0) || integer_zerop (arg1)
9413 || (TREE_CODE (arg0) == REAL_CST
9414 && !real_isfinite (&TREE_REAL_CST (arg0))))
9415 return omit_one_operand_loc (loc, type, arg0, arg1);
9417 /* If both arguments are constant, then try to evaluate it. */
9418 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9419 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9420 && tree_fits_shwi_p (arg1))
9422 /* Bound the maximum adjustment to twice the range of the
9423 mode's valid exponents. Use abs to ensure the range is
9424 positive as a sanity check. */
9425 const long max_exp_adj = 2 *
9426 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9427 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9429 /* Get the user-requested adjustment. */
9430 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9432 /* The requested adjustment must be inside this range. This
9433 is a preliminary cap to avoid things like overflow, we
9434 may still fail to compute the result for other reasons. */
9435 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9437 REAL_VALUE_TYPE initial_result;
9439 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9441 /* Ensure we didn't overflow. */
9442 if (! real_isinf (&initial_result))
9444 const REAL_VALUE_TYPE trunc_result
9445 = real_value_truncate (TYPE_MODE (type), initial_result);
9447 /* Only proceed if the target mode can hold the
9448 resulting value. */
9449 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9450 return build_real (type, trunc_result);
9456 return NULL_TREE;
9459 /* Fold a call to builtin modf. */
9461 static tree
9462 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9464 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9465 return NULL_TREE;
9467 STRIP_NOPS (arg0);
9469 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9470 return NULL_TREE;
9472 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9474 /* Proceed if a valid pointer type was passed in. */
9475 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9477 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9478 REAL_VALUE_TYPE trunc, frac;
9480 switch (value->cl)
9482 case rvc_nan:
9483 case rvc_zero:
9484 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9485 trunc = frac = *value;
9486 break;
9487 case rvc_inf:
9488 /* For +-Inf, return (*arg1 = arg0, +-0). */
9489 frac = dconst0;
9490 frac.sign = value->sign;
9491 trunc = *value;
9492 break;
9493 case rvc_normal:
9494 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9495 real_trunc (&trunc, VOIDmode, value);
9496 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9497 /* If the original number was negative and already
9498 integral, then the fractional part is -0.0. */
9499 if (value->sign && frac.cl == rvc_zero)
9500 frac.sign = value->sign;
9501 break;
9504 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9505 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9506 build_real (rettype, trunc));
9507 TREE_SIDE_EFFECTS (arg1) = 1;
9508 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9509 build_real (rettype, frac));
9512 return NULL_TREE;
9515 /* Given a location LOC, an interclass builtin function decl FNDECL
9516 and its single argument ARG, return an folded expression computing
9517 the same, or NULL_TREE if we either couldn't or didn't want to fold
9518 (the latter happen if there's an RTL instruction available). */
9520 static tree
9521 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9523 machine_mode mode;
9525 if (!validate_arg (arg, REAL_TYPE))
9526 return NULL_TREE;
9528 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9529 return NULL_TREE;
9531 mode = TYPE_MODE (TREE_TYPE (arg));
9533 /* If there is no optab, try generic code. */
9534 switch (DECL_FUNCTION_CODE (fndecl))
9536 tree result;
9538 CASE_FLT_FN (BUILT_IN_ISINF):
9540 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9541 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9542 tree const type = TREE_TYPE (arg);
9543 REAL_VALUE_TYPE r;
9544 char buf[128];
9546 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9547 real_from_string (&r, buf);
9548 result = build_call_expr (isgr_fn, 2,
9549 fold_build1_loc (loc, ABS_EXPR, type, arg),
9550 build_real (type, r));
9551 return result;
9553 CASE_FLT_FN (BUILT_IN_FINITE):
9554 case BUILT_IN_ISFINITE:
9556 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9557 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9558 tree const type = TREE_TYPE (arg);
9559 REAL_VALUE_TYPE r;
9560 char buf[128];
9562 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9563 real_from_string (&r, buf);
9564 result = build_call_expr (isle_fn, 2,
9565 fold_build1_loc (loc, ABS_EXPR, type, arg),
9566 build_real (type, r));
9567 /*result = fold_build2_loc (loc, UNGT_EXPR,
9568 TREE_TYPE (TREE_TYPE (fndecl)),
9569 fold_build1_loc (loc, ABS_EXPR, type, arg),
9570 build_real (type, r));
9571 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9572 TREE_TYPE (TREE_TYPE (fndecl)),
9573 result);*/
9574 return result;
9576 case BUILT_IN_ISNORMAL:
9578 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9579 islessequal(fabs(x),DBL_MAX). */
9580 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9581 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9582 tree const type = TREE_TYPE (arg);
9583 REAL_VALUE_TYPE rmax, rmin;
9584 char buf[128];
9586 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9587 real_from_string (&rmax, buf);
9588 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9589 real_from_string (&rmin, buf);
9590 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9591 result = build_call_expr (isle_fn, 2, arg,
9592 build_real (type, rmax));
9593 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9594 build_call_expr (isge_fn, 2, arg,
9595 build_real (type, rmin)));
9596 return result;
9598 default:
9599 break;
9602 return NULL_TREE;
9605 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9606 ARG is the argument for the call. */
9608 static tree
9609 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9611 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9612 REAL_VALUE_TYPE r;
9614 if (!validate_arg (arg, REAL_TYPE))
9615 return NULL_TREE;
9617 switch (builtin_index)
9619 case BUILT_IN_ISINF:
9620 if (!HONOR_INFINITIES (arg))
9621 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9623 if (TREE_CODE (arg) == REAL_CST)
9625 r = TREE_REAL_CST (arg);
9626 if (real_isinf (&r))
9627 return real_compare (GT_EXPR, &r, &dconst0)
9628 ? integer_one_node : integer_minus_one_node;
9629 else
9630 return integer_zero_node;
9633 return NULL_TREE;
9635 case BUILT_IN_ISINF_SIGN:
9637 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9638 /* In a boolean context, GCC will fold the inner COND_EXPR to
9639 1. So e.g. "if (isinf_sign(x))" would be folded to just
9640 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9641 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9642 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9643 tree tmp = NULL_TREE;
9645 arg = builtin_save_expr (arg);
9647 if (signbit_fn && isinf_fn)
9649 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9650 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9652 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9653 signbit_call, integer_zero_node);
9654 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9655 isinf_call, integer_zero_node);
9657 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9658 integer_minus_one_node, integer_one_node);
9659 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9660 isinf_call, tmp,
9661 integer_zero_node);
9664 return tmp;
9667 case BUILT_IN_ISFINITE:
9668 if (!HONOR_NANS (arg)
9669 && !HONOR_INFINITIES (arg))
9670 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9672 if (TREE_CODE (arg) == REAL_CST)
9674 r = TREE_REAL_CST (arg);
9675 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9678 return NULL_TREE;
9680 case BUILT_IN_ISNAN:
9681 if (!HONOR_NANS (arg))
9682 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9684 if (TREE_CODE (arg) == REAL_CST)
9686 r = TREE_REAL_CST (arg);
9687 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9690 arg = builtin_save_expr (arg);
9691 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9693 default:
9694 gcc_unreachable ();
9698 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9699 This builtin will generate code to return the appropriate floating
9700 point classification depending on the value of the floating point
9701 number passed in. The possible return values must be supplied as
9702 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9703 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9704 one floating point argument which is "type generic". */
9706 static tree
9707 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9709 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9710 arg, type, res, tmp;
9711 machine_mode mode;
9712 REAL_VALUE_TYPE r;
9713 char buf[128];
9715 /* Verify the required arguments in the original call. */
9716 if (nargs != 6
9717 || !validate_arg (args[0], INTEGER_TYPE)
9718 || !validate_arg (args[1], INTEGER_TYPE)
9719 || !validate_arg (args[2], INTEGER_TYPE)
9720 || !validate_arg (args[3], INTEGER_TYPE)
9721 || !validate_arg (args[4], INTEGER_TYPE)
9722 || !validate_arg (args[5], REAL_TYPE))
9723 return NULL_TREE;
9725 fp_nan = args[0];
9726 fp_infinite = args[1];
9727 fp_normal = args[2];
9728 fp_subnormal = args[3];
9729 fp_zero = args[4];
9730 arg = args[5];
9731 type = TREE_TYPE (arg);
9732 mode = TYPE_MODE (type);
9733 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9735 /* fpclassify(x) ->
9736 isnan(x) ? FP_NAN :
9737 (fabs(x) == Inf ? FP_INFINITE :
9738 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9739 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9741 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9742 build_real (type, dconst0));
9743 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9744 tmp, fp_zero, fp_subnormal);
9746 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9747 real_from_string (&r, buf);
9748 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9749 arg, build_real (type, r));
9750 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9752 if (HONOR_INFINITIES (mode))
9754 real_inf (&r);
9755 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9756 build_real (type, r));
9757 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9758 fp_infinite, res);
9761 if (HONOR_NANS (mode))
9763 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9764 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9767 return res;
9770 /* Fold a call to an unordered comparison function such as
9771 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9772 being called and ARG0 and ARG1 are the arguments for the call.
9773 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9774 the opposite of the desired result. UNORDERED_CODE is used
9775 for modes that can hold NaNs and ORDERED_CODE is used for
9776 the rest. */
9778 static tree
9779 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9780 enum tree_code unordered_code,
9781 enum tree_code ordered_code)
9783 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9784 enum tree_code code;
9785 tree type0, type1;
9786 enum tree_code code0, code1;
9787 tree cmp_type = NULL_TREE;
9789 type0 = TREE_TYPE (arg0);
9790 type1 = TREE_TYPE (arg1);
9792 code0 = TREE_CODE (type0);
9793 code1 = TREE_CODE (type1);
9795 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9796 /* Choose the wider of two real types. */
9797 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9798 ? type0 : type1;
9799 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9800 cmp_type = type0;
9801 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9802 cmp_type = type1;
9804 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9805 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9807 if (unordered_code == UNORDERED_EXPR)
9809 if (!HONOR_NANS (arg0))
9810 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9811 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9814 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9815 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9816 fold_build2_loc (loc, code, type, arg0, arg1));
9819 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9820 arithmetics if it can never overflow, or into internal functions that
9821 return both result of arithmetics and overflowed boolean flag in
9822 a complex integer result, or some other check for overflow. */
9824 static tree
9825 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9826 tree arg0, tree arg1, tree arg2)
9828 enum internal_fn ifn = IFN_LAST;
9829 tree type = TREE_TYPE (TREE_TYPE (arg2));
9830 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9831 switch (fcode)
9833 case BUILT_IN_ADD_OVERFLOW:
9834 case BUILT_IN_SADD_OVERFLOW:
9835 case BUILT_IN_SADDL_OVERFLOW:
9836 case BUILT_IN_SADDLL_OVERFLOW:
9837 case BUILT_IN_UADD_OVERFLOW:
9838 case BUILT_IN_UADDL_OVERFLOW:
9839 case BUILT_IN_UADDLL_OVERFLOW:
9840 ifn = IFN_ADD_OVERFLOW;
9841 break;
9842 case BUILT_IN_SUB_OVERFLOW:
9843 case BUILT_IN_SSUB_OVERFLOW:
9844 case BUILT_IN_SSUBL_OVERFLOW:
9845 case BUILT_IN_SSUBLL_OVERFLOW:
9846 case BUILT_IN_USUB_OVERFLOW:
9847 case BUILT_IN_USUBL_OVERFLOW:
9848 case BUILT_IN_USUBLL_OVERFLOW:
9849 ifn = IFN_SUB_OVERFLOW;
9850 break;
9851 case BUILT_IN_MUL_OVERFLOW:
9852 case BUILT_IN_SMUL_OVERFLOW:
9853 case BUILT_IN_SMULL_OVERFLOW:
9854 case BUILT_IN_SMULLL_OVERFLOW:
9855 case BUILT_IN_UMUL_OVERFLOW:
9856 case BUILT_IN_UMULL_OVERFLOW:
9857 case BUILT_IN_UMULLL_OVERFLOW:
9858 ifn = IFN_MUL_OVERFLOW;
9859 break;
9860 default:
9861 gcc_unreachable ();
9863 tree ctype = build_complex_type (type);
9864 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9865 2, arg0, arg1);
9866 tree tgt = save_expr (call);
9867 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9868 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9869 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9870 tree store
9871 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9872 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9875 /* Fold a call to built-in function FNDECL with 0 arguments.
9876 This function returns NULL_TREE if no simplification was possible. */
9878 static tree
9879 fold_builtin_0 (location_t loc, tree fndecl)
9881 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9882 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9883 switch (fcode)
9885 CASE_FLT_FN (BUILT_IN_INF):
9886 case BUILT_IN_INFD32:
9887 case BUILT_IN_INFD64:
9888 case BUILT_IN_INFD128:
9889 return fold_builtin_inf (loc, type, true);
9891 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9892 return fold_builtin_inf (loc, type, false);
9894 case BUILT_IN_CLASSIFY_TYPE:
9895 return fold_builtin_classify_type (NULL_TREE);
9897 default:
9898 break;
9900 return NULL_TREE;
9903 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9904 This function returns NULL_TREE if no simplification was possible. */
9906 static tree
9907 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9909 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9910 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9911 switch (fcode)
9913 case BUILT_IN_CONSTANT_P:
9915 tree val = fold_builtin_constant_p (arg0);
9917 /* Gimplification will pull the CALL_EXPR for the builtin out of
9918 an if condition. When not optimizing, we'll not CSE it back.
9919 To avoid link error types of regressions, return false now. */
9920 if (!val && !optimize)
9921 val = integer_zero_node;
9923 return val;
9926 case BUILT_IN_CLASSIFY_TYPE:
9927 return fold_builtin_classify_type (arg0);
9929 case BUILT_IN_STRLEN:
9930 return fold_builtin_strlen (loc, type, arg0);
9932 CASE_FLT_FN (BUILT_IN_FABS):
9933 case BUILT_IN_FABSD32:
9934 case BUILT_IN_FABSD64:
9935 case BUILT_IN_FABSD128:
9936 return fold_builtin_fabs (loc, arg0, type);
9938 case BUILT_IN_ABS:
9939 case BUILT_IN_LABS:
9940 case BUILT_IN_LLABS:
9941 case BUILT_IN_IMAXABS:
9942 return fold_builtin_abs (loc, arg0, type);
9944 CASE_FLT_FN (BUILT_IN_CONJ):
9945 if (validate_arg (arg0, COMPLEX_TYPE)
9946 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9947 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9948 break;
9950 CASE_FLT_FN (BUILT_IN_CREAL):
9951 if (validate_arg (arg0, COMPLEX_TYPE)
9952 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9953 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9954 break;
9956 CASE_FLT_FN (BUILT_IN_CIMAG):
9957 if (validate_arg (arg0, COMPLEX_TYPE)
9958 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9959 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9960 break;
9962 CASE_FLT_FN (BUILT_IN_CCOS):
9963 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9965 CASE_FLT_FN (BUILT_IN_CCOSH):
9966 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9968 CASE_FLT_FN (BUILT_IN_CPROJ):
9969 return fold_builtin_cproj (loc, arg0, type);
9971 CASE_FLT_FN (BUILT_IN_CSIN):
9972 if (validate_arg (arg0, COMPLEX_TYPE)
9973 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9974 return do_mpc_arg1 (arg0, type, mpc_sin);
9975 break;
9977 CASE_FLT_FN (BUILT_IN_CSINH):
9978 if (validate_arg (arg0, COMPLEX_TYPE)
9979 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9980 return do_mpc_arg1 (arg0, type, mpc_sinh);
9981 break;
9983 CASE_FLT_FN (BUILT_IN_CTAN):
9984 if (validate_arg (arg0, COMPLEX_TYPE)
9985 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9986 return do_mpc_arg1 (arg0, type, mpc_tan);
9987 break;
9989 CASE_FLT_FN (BUILT_IN_CTANH):
9990 if (validate_arg (arg0, COMPLEX_TYPE)
9991 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9992 return do_mpc_arg1 (arg0, type, mpc_tanh);
9993 break;
9995 CASE_FLT_FN (BUILT_IN_CLOG):
9996 if (validate_arg (arg0, COMPLEX_TYPE)
9997 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9998 return do_mpc_arg1 (arg0, type, mpc_log);
9999 break;
10001 CASE_FLT_FN (BUILT_IN_CSQRT):
10002 if (validate_arg (arg0, COMPLEX_TYPE)
10003 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10004 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10005 break;
10007 CASE_FLT_FN (BUILT_IN_CASIN):
10008 if (validate_arg (arg0, COMPLEX_TYPE)
10009 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10010 return do_mpc_arg1 (arg0, type, mpc_asin);
10011 break;
10013 CASE_FLT_FN (BUILT_IN_CACOS):
10014 if (validate_arg (arg0, COMPLEX_TYPE)
10015 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10016 return do_mpc_arg1 (arg0, type, mpc_acos);
10017 break;
10019 CASE_FLT_FN (BUILT_IN_CATAN):
10020 if (validate_arg (arg0, COMPLEX_TYPE)
10021 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10022 return do_mpc_arg1 (arg0, type, mpc_atan);
10023 break;
10025 CASE_FLT_FN (BUILT_IN_CASINH):
10026 if (validate_arg (arg0, COMPLEX_TYPE)
10027 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10028 return do_mpc_arg1 (arg0, type, mpc_asinh);
10029 break;
10031 CASE_FLT_FN (BUILT_IN_CACOSH):
10032 if (validate_arg (arg0, COMPLEX_TYPE)
10033 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10034 return do_mpc_arg1 (arg0, type, mpc_acosh);
10035 break;
10037 CASE_FLT_FN (BUILT_IN_CATANH):
10038 if (validate_arg (arg0, COMPLEX_TYPE)
10039 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10040 return do_mpc_arg1 (arg0, type, mpc_atanh);
10041 break;
10043 CASE_FLT_FN (BUILT_IN_CABS):
10044 return fold_builtin_cabs (loc, arg0, type, fndecl);
10046 CASE_FLT_FN (BUILT_IN_CARG):
10047 return fold_builtin_carg (loc, arg0, type);
10049 CASE_FLT_FN (BUILT_IN_SQRT):
10050 return fold_builtin_sqrt (loc, arg0, type);
10052 CASE_FLT_FN (BUILT_IN_CBRT):
10053 return fold_builtin_cbrt (loc, arg0, type);
10055 CASE_FLT_FN (BUILT_IN_ASIN):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10058 &dconstm1, &dconst1, true);
10059 break;
10061 CASE_FLT_FN (BUILT_IN_ACOS):
10062 if (validate_arg (arg0, REAL_TYPE))
10063 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10064 &dconstm1, &dconst1, true);
10065 break;
10067 CASE_FLT_FN (BUILT_IN_ATAN):
10068 if (validate_arg (arg0, REAL_TYPE))
10069 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10070 break;
10072 CASE_FLT_FN (BUILT_IN_ASINH):
10073 if (validate_arg (arg0, REAL_TYPE))
10074 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10075 break;
10077 CASE_FLT_FN (BUILT_IN_ACOSH):
10078 if (validate_arg (arg0, REAL_TYPE))
10079 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10080 &dconst1, NULL, true);
10081 break;
10083 CASE_FLT_FN (BUILT_IN_ATANH):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10086 &dconstm1, &dconst1, false);
10087 break;
10089 CASE_FLT_FN (BUILT_IN_SIN):
10090 if (validate_arg (arg0, REAL_TYPE))
10091 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10092 break;
10094 CASE_FLT_FN (BUILT_IN_COS):
10095 return fold_builtin_cos (loc, arg0, type, fndecl);
10097 CASE_FLT_FN (BUILT_IN_TAN):
10098 return fold_builtin_tan (arg0, type);
10100 CASE_FLT_FN (BUILT_IN_CEXP):
10101 return fold_builtin_cexp (loc, arg0, type);
10103 CASE_FLT_FN (BUILT_IN_CEXPI):
10104 if (validate_arg (arg0, REAL_TYPE))
10105 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10106 break;
10108 CASE_FLT_FN (BUILT_IN_SINH):
10109 if (validate_arg (arg0, REAL_TYPE))
10110 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10111 break;
10113 CASE_FLT_FN (BUILT_IN_COSH):
10114 return fold_builtin_cosh (loc, arg0, type, fndecl);
10116 CASE_FLT_FN (BUILT_IN_TANH):
10117 if (validate_arg (arg0, REAL_TYPE))
10118 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10119 break;
10121 CASE_FLT_FN (BUILT_IN_ERF):
10122 if (validate_arg (arg0, REAL_TYPE))
10123 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10124 break;
10126 CASE_FLT_FN (BUILT_IN_ERFC):
10127 if (validate_arg (arg0, REAL_TYPE))
10128 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10129 break;
10131 CASE_FLT_FN (BUILT_IN_TGAMMA):
10132 if (validate_arg (arg0, REAL_TYPE))
10133 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10134 break;
10136 CASE_FLT_FN (BUILT_IN_EXP):
10137 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10139 CASE_FLT_FN (BUILT_IN_EXP2):
10140 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10142 CASE_FLT_FN (BUILT_IN_EXP10):
10143 CASE_FLT_FN (BUILT_IN_POW10):
10144 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10146 CASE_FLT_FN (BUILT_IN_EXPM1):
10147 if (validate_arg (arg0, REAL_TYPE))
10148 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10149 break;
10151 CASE_FLT_FN (BUILT_IN_LOG):
10152 if (validate_arg (arg0, REAL_TYPE))
10153 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10154 break;
10156 CASE_FLT_FN (BUILT_IN_LOG2):
10157 if (validate_arg (arg0, REAL_TYPE))
10158 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10159 break;
10161 CASE_FLT_FN (BUILT_IN_LOG10):
10162 if (validate_arg (arg0, REAL_TYPE))
10163 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10164 break;
10166 CASE_FLT_FN (BUILT_IN_LOG1P):
10167 if (validate_arg (arg0, REAL_TYPE))
10168 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10169 &dconstm1, NULL, false);
10170 break;
10172 CASE_FLT_FN (BUILT_IN_J0):
10173 if (validate_arg (arg0, REAL_TYPE))
10174 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10175 NULL, NULL, 0);
10176 break;
10178 CASE_FLT_FN (BUILT_IN_J1):
10179 if (validate_arg (arg0, REAL_TYPE))
10180 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10181 NULL, NULL, 0);
10182 break;
10184 CASE_FLT_FN (BUILT_IN_Y0):
10185 if (validate_arg (arg0, REAL_TYPE))
10186 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10187 &dconst0, NULL, false);
10188 break;
10190 CASE_FLT_FN (BUILT_IN_Y1):
10191 if (validate_arg (arg0, REAL_TYPE))
10192 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10193 &dconst0, NULL, false);
10194 break;
10196 CASE_FLT_FN (BUILT_IN_NAN):
10197 case BUILT_IN_NAND32:
10198 case BUILT_IN_NAND64:
10199 case BUILT_IN_NAND128:
10200 return fold_builtin_nan (arg0, type, true);
10202 CASE_FLT_FN (BUILT_IN_NANS):
10203 return fold_builtin_nan (arg0, type, false);
10205 CASE_FLT_FN (BUILT_IN_FLOOR):
10206 return fold_builtin_floor (loc, fndecl, arg0);
10208 CASE_FLT_FN (BUILT_IN_CEIL):
10209 return fold_builtin_ceil (loc, fndecl, arg0);
10211 CASE_FLT_FN (BUILT_IN_TRUNC):
10212 return fold_builtin_trunc (loc, fndecl, arg0);
10214 CASE_FLT_FN (BUILT_IN_ROUND):
10215 return fold_builtin_round (loc, fndecl, arg0);
10217 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10218 CASE_FLT_FN (BUILT_IN_RINT):
10219 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10221 CASE_FLT_FN (BUILT_IN_ICEIL):
10222 CASE_FLT_FN (BUILT_IN_LCEIL):
10223 CASE_FLT_FN (BUILT_IN_LLCEIL):
10224 CASE_FLT_FN (BUILT_IN_LFLOOR):
10225 CASE_FLT_FN (BUILT_IN_IFLOOR):
10226 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10227 CASE_FLT_FN (BUILT_IN_IROUND):
10228 CASE_FLT_FN (BUILT_IN_LROUND):
10229 CASE_FLT_FN (BUILT_IN_LLROUND):
10230 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10232 CASE_FLT_FN (BUILT_IN_IRINT):
10233 CASE_FLT_FN (BUILT_IN_LRINT):
10234 CASE_FLT_FN (BUILT_IN_LLRINT):
10235 return fold_fixed_mathfn (loc, fndecl, arg0);
10237 case BUILT_IN_BSWAP16:
10238 case BUILT_IN_BSWAP32:
10239 case BUILT_IN_BSWAP64:
10240 return fold_builtin_bswap (fndecl, arg0);
10242 CASE_INT_FN (BUILT_IN_FFS):
10243 CASE_INT_FN (BUILT_IN_CLZ):
10244 CASE_INT_FN (BUILT_IN_CTZ):
10245 CASE_INT_FN (BUILT_IN_CLRSB):
10246 CASE_INT_FN (BUILT_IN_POPCOUNT):
10247 CASE_INT_FN (BUILT_IN_PARITY):
10248 return fold_builtin_bitop (fndecl, arg0);
10250 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10251 return fold_builtin_signbit (loc, arg0, type);
10253 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10254 return fold_builtin_significand (loc, arg0, type);
10256 CASE_FLT_FN (BUILT_IN_ILOGB):
10257 CASE_FLT_FN (BUILT_IN_LOGB):
10258 return fold_builtin_logb (loc, arg0, type);
10260 case BUILT_IN_ISASCII:
10261 return fold_builtin_isascii (loc, arg0);
10263 case BUILT_IN_TOASCII:
10264 return fold_builtin_toascii (loc, arg0);
10266 case BUILT_IN_ISDIGIT:
10267 return fold_builtin_isdigit (loc, arg0);
10269 CASE_FLT_FN (BUILT_IN_FINITE):
10270 case BUILT_IN_FINITED32:
10271 case BUILT_IN_FINITED64:
10272 case BUILT_IN_FINITED128:
10273 case BUILT_IN_ISFINITE:
10275 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10276 if (ret)
10277 return ret;
10278 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10281 CASE_FLT_FN (BUILT_IN_ISINF):
10282 case BUILT_IN_ISINFD32:
10283 case BUILT_IN_ISINFD64:
10284 case BUILT_IN_ISINFD128:
10286 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10287 if (ret)
10288 return ret;
10289 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10292 case BUILT_IN_ISNORMAL:
10293 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10295 case BUILT_IN_ISINF_SIGN:
10296 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10298 CASE_FLT_FN (BUILT_IN_ISNAN):
10299 case BUILT_IN_ISNAND32:
10300 case BUILT_IN_ISNAND64:
10301 case BUILT_IN_ISNAND128:
10302 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10304 case BUILT_IN_FREE:
10305 if (integer_zerop (arg0))
10306 return build_empty_stmt (loc);
10307 break;
10309 default:
10310 break;
10313 return NULL_TREE;
10317 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10318 This function returns NULL_TREE if no simplification was possible. */
10320 static tree
10321 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10323 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10324 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10326 switch (fcode)
10328 CASE_FLT_FN (BUILT_IN_JN):
10329 if (validate_arg (arg0, INTEGER_TYPE)
10330 && validate_arg (arg1, REAL_TYPE))
10331 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10332 break;
10334 CASE_FLT_FN (BUILT_IN_YN):
10335 if (validate_arg (arg0, INTEGER_TYPE)
10336 && validate_arg (arg1, REAL_TYPE))
10337 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10338 &dconst0, false);
10339 break;
10341 CASE_FLT_FN (BUILT_IN_DREM):
10342 CASE_FLT_FN (BUILT_IN_REMAINDER):
10343 if (validate_arg (arg0, REAL_TYPE)
10344 && validate_arg (arg1, REAL_TYPE))
10345 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10346 break;
10348 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10349 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10350 if (validate_arg (arg0, REAL_TYPE)
10351 && validate_arg (arg1, POINTER_TYPE))
10352 return do_mpfr_lgamma_r (arg0, arg1, type);
10353 break;
10355 CASE_FLT_FN (BUILT_IN_ATAN2):
10356 if (validate_arg (arg0, REAL_TYPE)
10357 && validate_arg (arg1, REAL_TYPE))
10358 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10359 break;
10361 CASE_FLT_FN (BUILT_IN_FDIM):
10362 if (validate_arg (arg0, REAL_TYPE)
10363 && validate_arg (arg1, REAL_TYPE))
10364 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10365 break;
10367 CASE_FLT_FN (BUILT_IN_HYPOT):
10368 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10370 CASE_FLT_FN (BUILT_IN_CPOW):
10371 if (validate_arg (arg0, COMPLEX_TYPE)
10372 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10373 && validate_arg (arg1, COMPLEX_TYPE)
10374 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10375 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10376 break;
10378 CASE_FLT_FN (BUILT_IN_LDEXP):
10379 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10380 CASE_FLT_FN (BUILT_IN_SCALBN):
10381 CASE_FLT_FN (BUILT_IN_SCALBLN):
10382 return fold_builtin_load_exponent (loc, arg0, arg1,
10383 type, /*ldexp=*/false);
10385 CASE_FLT_FN (BUILT_IN_FREXP):
10386 return fold_builtin_frexp (loc, arg0, arg1, type);
10388 CASE_FLT_FN (BUILT_IN_MODF):
10389 return fold_builtin_modf (loc, arg0, arg1, type);
10391 case BUILT_IN_STRSTR:
10392 return fold_builtin_strstr (loc, arg0, arg1, type);
10394 case BUILT_IN_STRSPN:
10395 return fold_builtin_strspn (loc, arg0, arg1);
10397 case BUILT_IN_STRCSPN:
10398 return fold_builtin_strcspn (loc, arg0, arg1);
10400 case BUILT_IN_STRCHR:
10401 case BUILT_IN_INDEX:
10402 return fold_builtin_strchr (loc, arg0, arg1, type);
10404 case BUILT_IN_STRRCHR:
10405 case BUILT_IN_RINDEX:
10406 return fold_builtin_strrchr (loc, arg0, arg1, type);
10408 case BUILT_IN_STRCMP:
10409 return fold_builtin_strcmp (loc, arg0, arg1);
10411 case BUILT_IN_STRPBRK:
10412 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10414 case BUILT_IN_EXPECT:
10415 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10417 CASE_FLT_FN (BUILT_IN_POW):
10418 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10420 CASE_FLT_FN (BUILT_IN_POWI):
10421 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10423 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10424 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10426 CASE_FLT_FN (BUILT_IN_FMIN):
10427 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10429 CASE_FLT_FN (BUILT_IN_FMAX):
10430 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10432 case BUILT_IN_ISGREATER:
10433 return fold_builtin_unordered_cmp (loc, fndecl,
10434 arg0, arg1, UNLE_EXPR, LE_EXPR);
10435 case BUILT_IN_ISGREATEREQUAL:
10436 return fold_builtin_unordered_cmp (loc, fndecl,
10437 arg0, arg1, UNLT_EXPR, LT_EXPR);
10438 case BUILT_IN_ISLESS:
10439 return fold_builtin_unordered_cmp (loc, fndecl,
10440 arg0, arg1, UNGE_EXPR, GE_EXPR);
10441 case BUILT_IN_ISLESSEQUAL:
10442 return fold_builtin_unordered_cmp (loc, fndecl,
10443 arg0, arg1, UNGT_EXPR, GT_EXPR);
10444 case BUILT_IN_ISLESSGREATER:
10445 return fold_builtin_unordered_cmp (loc, fndecl,
10446 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10447 case BUILT_IN_ISUNORDERED:
10448 return fold_builtin_unordered_cmp (loc, fndecl,
10449 arg0, arg1, UNORDERED_EXPR,
10450 NOP_EXPR);
10452 /* We do the folding for va_start in the expander. */
10453 case BUILT_IN_VA_START:
10454 break;
10456 case BUILT_IN_OBJECT_SIZE:
10457 return fold_builtin_object_size (arg0, arg1);
10459 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10460 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10462 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10463 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10465 default:
10466 break;
10468 return NULL_TREE;
10471 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10472 and ARG2.
10473 This function returns NULL_TREE if no simplification was possible. */
10475 static tree
10476 fold_builtin_3 (location_t loc, tree fndecl,
10477 tree arg0, tree arg1, tree arg2)
10479 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10480 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10481 switch (fcode)
10484 CASE_FLT_FN (BUILT_IN_SINCOS):
10485 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10487 CASE_FLT_FN (BUILT_IN_FMA):
10488 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10489 break;
10491 CASE_FLT_FN (BUILT_IN_REMQUO):
10492 if (validate_arg (arg0, REAL_TYPE)
10493 && validate_arg (arg1, REAL_TYPE)
10494 && validate_arg (arg2, POINTER_TYPE))
10495 return do_mpfr_remquo (arg0, arg1, arg2);
10496 break;
10498 case BUILT_IN_STRNCMP:
10499 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10501 case BUILT_IN_MEMCHR:
10502 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10504 case BUILT_IN_BCMP:
10505 case BUILT_IN_MEMCMP:
10506 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10508 case BUILT_IN_EXPECT:
10509 return fold_builtin_expect (loc, arg0, arg1, arg2);
10511 case BUILT_IN_ADD_OVERFLOW:
10512 case BUILT_IN_SUB_OVERFLOW:
10513 case BUILT_IN_MUL_OVERFLOW:
10514 case BUILT_IN_SADD_OVERFLOW:
10515 case BUILT_IN_SADDL_OVERFLOW:
10516 case BUILT_IN_SADDLL_OVERFLOW:
10517 case BUILT_IN_SSUB_OVERFLOW:
10518 case BUILT_IN_SSUBL_OVERFLOW:
10519 case BUILT_IN_SSUBLL_OVERFLOW:
10520 case BUILT_IN_SMUL_OVERFLOW:
10521 case BUILT_IN_SMULL_OVERFLOW:
10522 case BUILT_IN_SMULLL_OVERFLOW:
10523 case BUILT_IN_UADD_OVERFLOW:
10524 case BUILT_IN_UADDL_OVERFLOW:
10525 case BUILT_IN_UADDLL_OVERFLOW:
10526 case BUILT_IN_USUB_OVERFLOW:
10527 case BUILT_IN_USUBL_OVERFLOW:
10528 case BUILT_IN_USUBLL_OVERFLOW:
10529 case BUILT_IN_UMUL_OVERFLOW:
10530 case BUILT_IN_UMULL_OVERFLOW:
10531 case BUILT_IN_UMULLL_OVERFLOW:
10532 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10534 default:
10535 break;
10537 return NULL_TREE;
10540 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10541 arguments. IGNORE is true if the result of the
10542 function call is ignored. This function returns NULL_TREE if no
10543 simplification was possible. */
10545 tree
10546 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10548 tree ret = NULL_TREE;
10550 switch (nargs)
10552 case 0:
10553 ret = fold_builtin_0 (loc, fndecl);
10554 break;
10555 case 1:
10556 ret = fold_builtin_1 (loc, fndecl, args[0]);
10557 break;
10558 case 2:
10559 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10560 break;
10561 case 3:
10562 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10563 break;
10564 default:
10565 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10566 break;
10568 if (ret)
10570 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10571 SET_EXPR_LOCATION (ret, loc);
10572 TREE_NO_WARNING (ret) = 1;
10573 return ret;
10575 return NULL_TREE;
10578 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10579 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10580 of arguments in ARGS to be omitted. OLDNARGS is the number of
10581 elements in ARGS. */
10583 static tree
10584 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10585 int skip, tree fndecl, int n, va_list newargs)
10587 int nargs = oldnargs - skip + n;
10588 tree *buffer;
10590 if (n > 0)
10592 int i, j;
10594 buffer = XALLOCAVEC (tree, nargs);
10595 for (i = 0; i < n; i++)
10596 buffer[i] = va_arg (newargs, tree);
10597 for (j = skip; j < oldnargs; j++, i++)
10598 buffer[i] = args[j];
10600 else
10601 buffer = args + skip;
10603 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10606 /* Return true if FNDECL shouldn't be folded right now.
10607 If a built-in function has an inline attribute always_inline
10608 wrapper, defer folding it after always_inline functions have
10609 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10610 might not be performed. */
10612 bool
10613 avoid_folding_inline_builtin (tree fndecl)
10615 return (DECL_DECLARED_INLINE_P (fndecl)
10616 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10617 && cfun
10618 && !cfun->always_inline_functions_inlined
10619 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10622 /* A wrapper function for builtin folding that prevents warnings for
10623 "statement without effect" and the like, caused by removing the
10624 call node earlier than the warning is generated. */
10626 tree
10627 fold_call_expr (location_t loc, tree exp, bool ignore)
10629 tree ret = NULL_TREE;
10630 tree fndecl = get_callee_fndecl (exp);
10631 if (fndecl
10632 && TREE_CODE (fndecl) == FUNCTION_DECL
10633 && DECL_BUILT_IN (fndecl)
10634 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10635 yet. Defer folding until we see all the arguments
10636 (after inlining). */
10637 && !CALL_EXPR_VA_ARG_PACK (exp))
10639 int nargs = call_expr_nargs (exp);
10641 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10642 instead last argument is __builtin_va_arg_pack (). Defer folding
10643 even in that case, until arguments are finalized. */
10644 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10646 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10647 if (fndecl2
10648 && TREE_CODE (fndecl2) == FUNCTION_DECL
10649 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10650 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10651 return NULL_TREE;
10654 if (avoid_folding_inline_builtin (fndecl))
10655 return NULL_TREE;
10657 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10658 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10659 CALL_EXPR_ARGP (exp), ignore);
10660 else
10662 tree *args = CALL_EXPR_ARGP (exp);
10663 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10664 if (ret)
10665 return ret;
10668 return NULL_TREE;
10671 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10672 N arguments are passed in the array ARGARRAY. Return a folded
10673 expression or NULL_TREE if no simplification was possible. */
10675 tree
10676 fold_builtin_call_array (location_t loc, tree,
10677 tree fn,
10678 int n,
10679 tree *argarray)
10681 if (TREE_CODE (fn) != ADDR_EXPR)
10682 return NULL_TREE;
10684 tree fndecl = TREE_OPERAND (fn, 0);
10685 if (TREE_CODE (fndecl) == FUNCTION_DECL
10686 && DECL_BUILT_IN (fndecl))
10688 /* If last argument is __builtin_va_arg_pack (), arguments to this
10689 function are not finalized yet. Defer folding until they are. */
10690 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10692 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10693 if (fndecl2
10694 && TREE_CODE (fndecl2) == FUNCTION_DECL
10695 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10696 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10697 return NULL_TREE;
10699 if (avoid_folding_inline_builtin (fndecl))
10700 return NULL_TREE;
10701 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10702 return targetm.fold_builtin (fndecl, n, argarray, false);
10703 else
10704 return fold_builtin_n (loc, fndecl, argarray, n, false);
10707 return NULL_TREE;
10710 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10711 along with N new arguments specified as the "..." parameters. SKIP
10712 is the number of arguments in EXP to be omitted. This function is used
10713 to do varargs-to-varargs transformations. */
10715 static tree
10716 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10718 va_list ap;
10719 tree t;
10721 va_start (ap, n);
10722 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10723 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10724 va_end (ap);
10726 return t;
10729 /* Validate a single argument ARG against a tree code CODE representing
10730 a type. */
10732 static bool
10733 validate_arg (const_tree arg, enum tree_code code)
10735 if (!arg)
10736 return false;
10737 else if (code == POINTER_TYPE)
10738 return POINTER_TYPE_P (TREE_TYPE (arg));
10739 else if (code == INTEGER_TYPE)
10740 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10741 return code == TREE_CODE (TREE_TYPE (arg));
10744 /* This function validates the types of a function call argument list
10745 against a specified list of tree_codes. If the last specifier is a 0,
10746 that represents an ellipses, otherwise the last specifier must be a
10747 VOID_TYPE.
10749 This is the GIMPLE version of validate_arglist. Eventually we want to
10750 completely convert builtins.c to work from GIMPLEs and the tree based
10751 validate_arglist will then be removed. */
10753 bool
10754 validate_gimple_arglist (const gcall *call, ...)
10756 enum tree_code code;
10757 bool res = 0;
10758 va_list ap;
10759 const_tree arg;
10760 size_t i;
10762 va_start (ap, call);
10763 i = 0;
10767 code = (enum tree_code) va_arg (ap, int);
10768 switch (code)
10770 case 0:
10771 /* This signifies an ellipses, any further arguments are all ok. */
10772 res = true;
10773 goto end;
10774 case VOID_TYPE:
10775 /* This signifies an endlink, if no arguments remain, return
10776 true, otherwise return false. */
10777 res = (i == gimple_call_num_args (call));
10778 goto end;
10779 default:
10780 /* If no parameters remain or the parameter's code does not
10781 match the specified code, return false. Otherwise continue
10782 checking any remaining arguments. */
10783 arg = gimple_call_arg (call, i++);
10784 if (!validate_arg (arg, code))
10785 goto end;
10786 break;
10789 while (1);
10791 /* We need gotos here since we can only have one VA_CLOSE in a
10792 function. */
10793 end: ;
10794 va_end (ap);
10796 return res;
10799 /* Default target-specific builtin expander that does nothing. */
10802 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10803 rtx target ATTRIBUTE_UNUSED,
10804 rtx subtarget ATTRIBUTE_UNUSED,
10805 machine_mode mode ATTRIBUTE_UNUSED,
10806 int ignore ATTRIBUTE_UNUSED)
10808 return NULL_RTX;
10811 /* Returns true is EXP represents data that would potentially reside
10812 in a readonly section. */
10814 bool
10815 readonly_data_expr (tree exp)
10817 STRIP_NOPS (exp);
10819 if (TREE_CODE (exp) != ADDR_EXPR)
10820 return false;
10822 exp = get_base_address (TREE_OPERAND (exp, 0));
10823 if (!exp)
10824 return false;
10826 /* Make sure we call decl_readonly_section only for trees it
10827 can handle (since it returns true for everything it doesn't
10828 understand). */
10829 if (TREE_CODE (exp) == STRING_CST
10830 || TREE_CODE (exp) == CONSTRUCTOR
10831 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10832 return decl_readonly_section (exp, 0);
10833 else
10834 return false;
10837 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10838 to the call, and TYPE is its return type.
10840 Return NULL_TREE if no simplification was possible, otherwise return the
10841 simplified form of the call as a tree.
10843 The simplified form may be a constant or other expression which
10844 computes the same value, but in a more efficient manner (including
10845 calls to other builtin functions).
10847 The call may contain arguments which need to be evaluated, but
10848 which are not useful to determine the result of the call. In
10849 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10850 COMPOUND_EXPR will be an argument which must be evaluated.
10851 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10852 COMPOUND_EXPR in the chain will contain the tree for the simplified
10853 form of the builtin function call. */
10855 static tree
10856 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10858 if (!validate_arg (s1, POINTER_TYPE)
10859 || !validate_arg (s2, POINTER_TYPE))
10860 return NULL_TREE;
10861 else
10863 tree fn;
10864 const char *p1, *p2;
10866 p2 = c_getstr (s2);
10867 if (p2 == NULL)
10868 return NULL_TREE;
10870 p1 = c_getstr (s1);
10871 if (p1 != NULL)
10873 const char *r = strstr (p1, p2);
10874 tree tem;
10876 if (r == NULL)
10877 return build_int_cst (TREE_TYPE (s1), 0);
10879 /* Return an offset into the constant string argument. */
10880 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10881 return fold_convert_loc (loc, type, tem);
10884 /* The argument is const char *, and the result is char *, so we need
10885 a type conversion here to avoid a warning. */
10886 if (p2[0] == '\0')
10887 return fold_convert_loc (loc, type, s1);
10889 if (p2[1] != '\0')
10890 return NULL_TREE;
10892 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10893 if (!fn)
10894 return NULL_TREE;
10896 /* New argument list transforming strstr(s1, s2) to
10897 strchr(s1, s2[0]). */
10898 return build_call_expr_loc (loc, fn, 2, s1,
10899 build_int_cst (integer_type_node, p2[0]));
10903 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10904 the call, and TYPE is its return type.
10906 Return NULL_TREE if no simplification was possible, otherwise return the
10907 simplified form of the call as a tree.
10909 The simplified form may be a constant or other expression which
10910 computes the same value, but in a more efficient manner (including
10911 calls to other builtin functions).
10913 The call may contain arguments which need to be evaluated, but
10914 which are not useful to determine the result of the call. In
10915 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10916 COMPOUND_EXPR will be an argument which must be evaluated.
10917 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10918 COMPOUND_EXPR in the chain will contain the tree for the simplified
10919 form of the builtin function call. */
10921 static tree
10922 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10924 if (!validate_arg (s1, POINTER_TYPE)
10925 || !validate_arg (s2, INTEGER_TYPE))
10926 return NULL_TREE;
10927 else
10929 const char *p1;
10931 if (TREE_CODE (s2) != INTEGER_CST)
10932 return NULL_TREE;
10934 p1 = c_getstr (s1);
10935 if (p1 != NULL)
10937 char c;
10938 const char *r;
10939 tree tem;
10941 if (target_char_cast (s2, &c))
10942 return NULL_TREE;
10944 r = strchr (p1, c);
10946 if (r == NULL)
10947 return build_int_cst (TREE_TYPE (s1), 0);
10949 /* Return an offset into the constant string argument. */
10950 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10951 return fold_convert_loc (loc, type, tem);
10953 return NULL_TREE;
10957 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10958 the call, and TYPE is its return type.
10960 Return NULL_TREE if no simplification was possible, otherwise return the
10961 simplified form of the call as a tree.
10963 The simplified form may be a constant or other expression which
10964 computes the same value, but in a more efficient manner (including
10965 calls to other builtin functions).
10967 The call may contain arguments which need to be evaluated, but
10968 which are not useful to determine the result of the call. In
10969 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10970 COMPOUND_EXPR will be an argument which must be evaluated.
10971 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10972 COMPOUND_EXPR in the chain will contain the tree for the simplified
10973 form of the builtin function call. */
10975 static tree
10976 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10978 if (!validate_arg (s1, POINTER_TYPE)
10979 || !validate_arg (s2, INTEGER_TYPE))
10980 return NULL_TREE;
10981 else
10983 tree fn;
10984 const char *p1;
10986 if (TREE_CODE (s2) != INTEGER_CST)
10987 return NULL_TREE;
10989 p1 = c_getstr (s1);
10990 if (p1 != NULL)
10992 char c;
10993 const char *r;
10994 tree tem;
10996 if (target_char_cast (s2, &c))
10997 return NULL_TREE;
10999 r = strrchr (p1, c);
11001 if (r == NULL)
11002 return build_int_cst (TREE_TYPE (s1), 0);
11004 /* Return an offset into the constant string argument. */
11005 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11006 return fold_convert_loc (loc, type, tem);
11009 if (! integer_zerop (s2))
11010 return NULL_TREE;
11012 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11013 if (!fn)
11014 return NULL_TREE;
11016 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11017 return build_call_expr_loc (loc, fn, 2, s1, s2);
11021 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11022 to the call, and TYPE is its return type.
11024 Return NULL_TREE if no simplification was possible, otherwise return the
11025 simplified form of the call as a tree.
11027 The simplified form may be a constant or other expression which
11028 computes the same value, but in a more efficient manner (including
11029 calls to other builtin functions).
11031 The call may contain arguments which need to be evaluated, but
11032 which are not useful to determine the result of the call. In
11033 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11034 COMPOUND_EXPR will be an argument which must be evaluated.
11035 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11036 COMPOUND_EXPR in the chain will contain the tree for the simplified
11037 form of the builtin function call. */
11039 static tree
11040 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11042 if (!validate_arg (s1, POINTER_TYPE)
11043 || !validate_arg (s2, POINTER_TYPE))
11044 return NULL_TREE;
11045 else
11047 tree fn;
11048 const char *p1, *p2;
11050 p2 = c_getstr (s2);
11051 if (p2 == NULL)
11052 return NULL_TREE;
11054 p1 = c_getstr (s1);
11055 if (p1 != NULL)
11057 const char *r = strpbrk (p1, p2);
11058 tree tem;
11060 if (r == NULL)
11061 return build_int_cst (TREE_TYPE (s1), 0);
11063 /* Return an offset into the constant string argument. */
11064 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11065 return fold_convert_loc (loc, type, tem);
11068 if (p2[0] == '\0')
11069 /* strpbrk(x, "") == NULL.
11070 Evaluate and ignore s1 in case it had side-effects. */
11071 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11073 if (p2[1] != '\0')
11074 return NULL_TREE; /* Really call strpbrk. */
11076 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11077 if (!fn)
11078 return NULL_TREE;
11080 /* New argument list transforming strpbrk(s1, s2) to
11081 strchr(s1, s2[0]). */
11082 return build_call_expr_loc (loc, fn, 2, s1,
11083 build_int_cst (integer_type_node, p2[0]));
11087 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11088 to the call.
11090 Return NULL_TREE if no simplification was possible, otherwise return the
11091 simplified form of the call as a tree.
11093 The simplified form may be a constant or other expression which
11094 computes the same value, but in a more efficient manner (including
11095 calls to other builtin functions).
11097 The call may contain arguments which need to be evaluated, but
11098 which are not useful to determine the result of the call. In
11099 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11100 COMPOUND_EXPR will be an argument which must be evaluated.
11101 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11102 COMPOUND_EXPR in the chain will contain the tree for the simplified
11103 form of the builtin function call. */
11105 static tree
11106 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11108 if (!validate_arg (s1, POINTER_TYPE)
11109 || !validate_arg (s2, POINTER_TYPE))
11110 return NULL_TREE;
11111 else
11113 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11115 /* If both arguments are constants, evaluate at compile-time. */
11116 if (p1 && p2)
11118 const size_t r = strspn (p1, p2);
11119 return build_int_cst (size_type_node, r);
11122 /* If either argument is "", return NULL_TREE. */
11123 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11124 /* Evaluate and ignore both arguments in case either one has
11125 side-effects. */
11126 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11127 s1, s2);
11128 return NULL_TREE;
11132 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11133 to the call.
11135 Return NULL_TREE if no simplification was possible, otherwise return the
11136 simplified form of the call as a tree.
11138 The simplified form may be a constant or other expression which
11139 computes the same value, but in a more efficient manner (including
11140 calls to other builtin functions).
11142 The call may contain arguments which need to be evaluated, but
11143 which are not useful to determine the result of the call. In
11144 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11145 COMPOUND_EXPR will be an argument which must be evaluated.
11146 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11147 COMPOUND_EXPR in the chain will contain the tree for the simplified
11148 form of the builtin function call. */
11150 static tree
11151 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11153 if (!validate_arg (s1, POINTER_TYPE)
11154 || !validate_arg (s2, POINTER_TYPE))
11155 return NULL_TREE;
11156 else
11158 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11160 /* If both arguments are constants, evaluate at compile-time. */
11161 if (p1 && p2)
11163 const size_t r = strcspn (p1, p2);
11164 return build_int_cst (size_type_node, r);
11167 /* If the first argument is "", return NULL_TREE. */
11168 if (p1 && *p1 == '\0')
11170 /* Evaluate and ignore argument s2 in case it has
11171 side-effects. */
11172 return omit_one_operand_loc (loc, size_type_node,
11173 size_zero_node, s2);
11176 /* If the second argument is "", return __builtin_strlen(s1). */
11177 if (p2 && *p2 == '\0')
11179 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11181 /* If the replacement _DECL isn't initialized, don't do the
11182 transformation. */
11183 if (!fn)
11184 return NULL_TREE;
11186 return build_call_expr_loc (loc, fn, 1, s1);
11188 return NULL_TREE;
11192 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11193 produced. False otherwise. This is done so that we don't output the error
11194 or warning twice or three times. */
11196 bool
11197 fold_builtin_next_arg (tree exp, bool va_start_p)
11199 tree fntype = TREE_TYPE (current_function_decl);
11200 int nargs = call_expr_nargs (exp);
11201 tree arg;
11202 /* There is good chance the current input_location points inside the
11203 definition of the va_start macro (perhaps on the token for
11204 builtin) in a system header, so warnings will not be emitted.
11205 Use the location in real source code. */
11206 source_location current_location =
11207 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11208 NULL);
11210 if (!stdarg_p (fntype))
11212 error ("%<va_start%> used in function with fixed args");
11213 return true;
11216 if (va_start_p)
11218 if (va_start_p && (nargs != 2))
11220 error ("wrong number of arguments to function %<va_start%>");
11221 return true;
11223 arg = CALL_EXPR_ARG (exp, 1);
11225 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11226 when we checked the arguments and if needed issued a warning. */
11227 else
11229 if (nargs == 0)
11231 /* Evidently an out of date version of <stdarg.h>; can't validate
11232 va_start's second argument, but can still work as intended. */
11233 warning_at (current_location,
11234 OPT_Wvarargs,
11235 "%<__builtin_next_arg%> called without an argument");
11236 return true;
11238 else if (nargs > 1)
11240 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11241 return true;
11243 arg = CALL_EXPR_ARG (exp, 0);
11246 if (TREE_CODE (arg) == SSA_NAME)
11247 arg = SSA_NAME_VAR (arg);
11249 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11250 or __builtin_next_arg (0) the first time we see it, after checking
11251 the arguments and if needed issuing a warning. */
11252 if (!integer_zerop (arg))
11254 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11256 /* Strip off all nops for the sake of the comparison. This
11257 is not quite the same as STRIP_NOPS. It does more.
11258 We must also strip off INDIRECT_EXPR for C++ reference
11259 parameters. */
11260 while (CONVERT_EXPR_P (arg)
11261 || TREE_CODE (arg) == INDIRECT_REF)
11262 arg = TREE_OPERAND (arg, 0);
11263 if (arg != last_parm)
11265 /* FIXME: Sometimes with the tree optimizers we can get the
11266 not the last argument even though the user used the last
11267 argument. We just warn and set the arg to be the last
11268 argument so that we will get wrong-code because of
11269 it. */
11270 warning_at (current_location,
11271 OPT_Wvarargs,
11272 "second parameter of %<va_start%> not last named argument");
11275 /* Undefined by C99 7.15.1.4p4 (va_start):
11276 "If the parameter parmN is declared with the register storage
11277 class, with a function or array type, or with a type that is
11278 not compatible with the type that results after application of
11279 the default argument promotions, the behavior is undefined."
11281 else if (DECL_REGISTER (arg))
11283 warning_at (current_location,
11284 OPT_Wvarargs,
11285 "undefined behaviour when second parameter of "
11286 "%<va_start%> is declared with %<register%> storage");
11289 /* We want to verify the second parameter just once before the tree
11290 optimizers are run and then avoid keeping it in the tree,
11291 as otherwise we could warn even for correct code like:
11292 void foo (int i, ...)
11293 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11294 if (va_start_p)
11295 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11296 else
11297 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11299 return false;
11303 /* Expand a call EXP to __builtin_object_size. */
11305 static rtx
11306 expand_builtin_object_size (tree exp)
11308 tree ost;
11309 int object_size_type;
11310 tree fndecl = get_callee_fndecl (exp);
11312 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11314 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11315 exp, fndecl);
11316 expand_builtin_trap ();
11317 return const0_rtx;
11320 ost = CALL_EXPR_ARG (exp, 1);
11321 STRIP_NOPS (ost);
11323 if (TREE_CODE (ost) != INTEGER_CST
11324 || tree_int_cst_sgn (ost) < 0
11325 || compare_tree_int (ost, 3) > 0)
11327 error ("%Klast argument of %D is not integer constant between 0 and 3",
11328 exp, fndecl);
11329 expand_builtin_trap ();
11330 return const0_rtx;
11333 object_size_type = tree_to_shwi (ost);
11335 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11338 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11339 FCODE is the BUILT_IN_* to use.
11340 Return NULL_RTX if we failed; the caller should emit a normal call,
11341 otherwise try to get the result in TARGET, if convenient (and in
11342 mode MODE if that's convenient). */
11344 static rtx
11345 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11346 enum built_in_function fcode)
11348 tree dest, src, len, size;
11350 if (!validate_arglist (exp,
11351 POINTER_TYPE,
11352 fcode == BUILT_IN_MEMSET_CHK
11353 ? INTEGER_TYPE : POINTER_TYPE,
11354 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11355 return NULL_RTX;
11357 dest = CALL_EXPR_ARG (exp, 0);
11358 src = CALL_EXPR_ARG (exp, 1);
11359 len = CALL_EXPR_ARG (exp, 2);
11360 size = CALL_EXPR_ARG (exp, 3);
11362 if (! tree_fits_uhwi_p (size))
11363 return NULL_RTX;
11365 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11367 tree fn;
11369 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11371 warning_at (tree_nonartificial_location (exp),
11372 0, "%Kcall to %D will always overflow destination buffer",
11373 exp, get_callee_fndecl (exp));
11374 return NULL_RTX;
11377 fn = NULL_TREE;
11378 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11379 mem{cpy,pcpy,move,set} is available. */
11380 switch (fcode)
11382 case BUILT_IN_MEMCPY_CHK:
11383 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11384 break;
11385 case BUILT_IN_MEMPCPY_CHK:
11386 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11387 break;
11388 case BUILT_IN_MEMMOVE_CHK:
11389 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11390 break;
11391 case BUILT_IN_MEMSET_CHK:
11392 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11393 break;
11394 default:
11395 break;
11398 if (! fn)
11399 return NULL_RTX;
11401 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11402 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11403 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11404 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11406 else if (fcode == BUILT_IN_MEMSET_CHK)
11407 return NULL_RTX;
11408 else
11410 unsigned int dest_align = get_pointer_alignment (dest);
11412 /* If DEST is not a pointer type, call the normal function. */
11413 if (dest_align == 0)
11414 return NULL_RTX;
11416 /* If SRC and DEST are the same (and not volatile), do nothing. */
11417 if (operand_equal_p (src, dest, 0))
11419 tree expr;
11421 if (fcode != BUILT_IN_MEMPCPY_CHK)
11423 /* Evaluate and ignore LEN in case it has side-effects. */
11424 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11425 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11428 expr = fold_build_pointer_plus (dest, len);
11429 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11432 /* __memmove_chk special case. */
11433 if (fcode == BUILT_IN_MEMMOVE_CHK)
11435 unsigned int src_align = get_pointer_alignment (src);
11437 if (src_align == 0)
11438 return NULL_RTX;
11440 /* If src is categorized for a readonly section we can use
11441 normal __memcpy_chk. */
11442 if (readonly_data_expr (src))
11444 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11445 if (!fn)
11446 return NULL_RTX;
11447 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11448 dest, src, len, size);
11449 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11450 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11451 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11454 return NULL_RTX;
11458 /* Emit warning if a buffer overflow is detected at compile time. */
11460 static void
11461 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11463 int is_strlen = 0;
11464 tree len, size;
11465 location_t loc = tree_nonartificial_location (exp);
11467 switch (fcode)
11469 case BUILT_IN_STRCPY_CHK:
11470 case BUILT_IN_STPCPY_CHK:
11471 /* For __strcat_chk the warning will be emitted only if overflowing
11472 by at least strlen (dest) + 1 bytes. */
11473 case BUILT_IN_STRCAT_CHK:
11474 len = CALL_EXPR_ARG (exp, 1);
11475 size = CALL_EXPR_ARG (exp, 2);
11476 is_strlen = 1;
11477 break;
11478 case BUILT_IN_STRNCAT_CHK:
11479 case BUILT_IN_STRNCPY_CHK:
11480 case BUILT_IN_STPNCPY_CHK:
11481 len = CALL_EXPR_ARG (exp, 2);
11482 size = CALL_EXPR_ARG (exp, 3);
11483 break;
11484 case BUILT_IN_SNPRINTF_CHK:
11485 case BUILT_IN_VSNPRINTF_CHK:
11486 len = CALL_EXPR_ARG (exp, 1);
11487 size = CALL_EXPR_ARG (exp, 3);
11488 break;
11489 default:
11490 gcc_unreachable ();
11493 if (!len || !size)
11494 return;
11496 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11497 return;
11499 if (is_strlen)
11501 len = c_strlen (len, 1);
11502 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11503 return;
11505 else if (fcode == BUILT_IN_STRNCAT_CHK)
11507 tree src = CALL_EXPR_ARG (exp, 1);
11508 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11509 return;
11510 src = c_strlen (src, 1);
11511 if (! src || ! tree_fits_uhwi_p (src))
11513 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11514 exp, get_callee_fndecl (exp));
11515 return;
11517 else if (tree_int_cst_lt (src, size))
11518 return;
11520 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11521 return;
11523 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11524 exp, get_callee_fndecl (exp));
11527 /* Emit warning if a buffer overflow is detected at compile time
11528 in __sprintf_chk/__vsprintf_chk calls. */
11530 static void
11531 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11533 tree size, len, fmt;
11534 const char *fmt_str;
11535 int nargs = call_expr_nargs (exp);
11537 /* Verify the required arguments in the original call. */
11539 if (nargs < 4)
11540 return;
11541 size = CALL_EXPR_ARG (exp, 2);
11542 fmt = CALL_EXPR_ARG (exp, 3);
11544 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11545 return;
11547 /* Check whether the format is a literal string constant. */
11548 fmt_str = c_getstr (fmt);
11549 if (fmt_str == NULL)
11550 return;
11552 if (!init_target_chars ())
11553 return;
11555 /* If the format doesn't contain % args or %%, we know its size. */
11556 if (strchr (fmt_str, target_percent) == 0)
11557 len = build_int_cstu (size_type_node, strlen (fmt_str));
11558 /* If the format is "%s" and first ... argument is a string literal,
11559 we know it too. */
11560 else if (fcode == BUILT_IN_SPRINTF_CHK
11561 && strcmp (fmt_str, target_percent_s) == 0)
11563 tree arg;
11565 if (nargs < 5)
11566 return;
11567 arg = CALL_EXPR_ARG (exp, 4);
11568 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11569 return;
11571 len = c_strlen (arg, 1);
11572 if (!len || ! tree_fits_uhwi_p (len))
11573 return;
11575 else
11576 return;
11578 if (! tree_int_cst_lt (len, size))
11579 warning_at (tree_nonartificial_location (exp),
11580 0, "%Kcall to %D will always overflow destination buffer",
11581 exp, get_callee_fndecl (exp));
11584 /* Emit warning if a free is called with address of a variable. */
11586 static void
11587 maybe_emit_free_warning (tree exp)
11589 tree arg = CALL_EXPR_ARG (exp, 0);
11591 STRIP_NOPS (arg);
11592 if (TREE_CODE (arg) != ADDR_EXPR)
11593 return;
11595 arg = get_base_address (TREE_OPERAND (arg, 0));
11596 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11597 return;
11599 if (SSA_VAR_P (arg))
11600 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11601 "%Kattempt to free a non-heap object %qD", exp, arg);
11602 else
11603 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11604 "%Kattempt to free a non-heap object", exp);
11607 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11608 if possible. */
11610 static tree
11611 fold_builtin_object_size (tree ptr, tree ost)
11613 unsigned HOST_WIDE_INT bytes;
11614 int object_size_type;
11616 if (!validate_arg (ptr, POINTER_TYPE)
11617 || !validate_arg (ost, INTEGER_TYPE))
11618 return NULL_TREE;
11620 STRIP_NOPS (ost);
11622 if (TREE_CODE (ost) != INTEGER_CST
11623 || tree_int_cst_sgn (ost) < 0
11624 || compare_tree_int (ost, 3) > 0)
11625 return NULL_TREE;
11627 object_size_type = tree_to_shwi (ost);
11629 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11630 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11631 and (size_t) 0 for types 2 and 3. */
11632 if (TREE_SIDE_EFFECTS (ptr))
11633 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11635 if (TREE_CODE (ptr) == ADDR_EXPR)
11637 bytes = compute_builtin_object_size (ptr, object_size_type);
11638 if (wi::fits_to_tree_p (bytes, size_type_node))
11639 return build_int_cstu (size_type_node, bytes);
11641 else if (TREE_CODE (ptr) == SSA_NAME)
11643 /* If object size is not known yet, delay folding until
11644 later. Maybe subsequent passes will help determining
11645 it. */
11646 bytes = compute_builtin_object_size (ptr, object_size_type);
11647 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11648 && wi::fits_to_tree_p (bytes, size_type_node))
11649 return build_int_cstu (size_type_node, bytes);
11652 return NULL_TREE;
11655 /* Builtins with folding operations that operate on "..." arguments
11656 need special handling; we need to store the arguments in a convenient
11657 data structure before attempting any folding. Fortunately there are
11658 only a few builtins that fall into this category. FNDECL is the
11659 function, EXP is the CALL_EXPR for the call. */
11661 static tree
11662 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11664 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11665 tree ret = NULL_TREE;
11667 switch (fcode)
11669 case BUILT_IN_FPCLASSIFY:
11670 ret = fold_builtin_fpclassify (loc, args, nargs);
11671 break;
11673 default:
11674 break;
11676 if (ret)
11678 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11679 SET_EXPR_LOCATION (ret, loc);
11680 TREE_NO_WARNING (ret) = 1;
11681 return ret;
11683 return NULL_TREE;
11686 /* Initialize format string characters in the target charset. */
11688 bool
11689 init_target_chars (void)
11691 static bool init;
11692 if (!init)
11694 target_newline = lang_hooks.to_target_charset ('\n');
11695 target_percent = lang_hooks.to_target_charset ('%');
11696 target_c = lang_hooks.to_target_charset ('c');
11697 target_s = lang_hooks.to_target_charset ('s');
11698 if (target_newline == 0 || target_percent == 0 || target_c == 0
11699 || target_s == 0)
11700 return false;
11702 target_percent_c[0] = target_percent;
11703 target_percent_c[1] = target_c;
11704 target_percent_c[2] = '\0';
11706 target_percent_s[0] = target_percent;
11707 target_percent_s[1] = target_s;
11708 target_percent_s[2] = '\0';
11710 target_percent_s_newline[0] = target_percent;
11711 target_percent_s_newline[1] = target_s;
11712 target_percent_s_newline[2] = target_newline;
11713 target_percent_s_newline[3] = '\0';
11715 init = true;
11717 return true;
11720 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11721 and no overflow/underflow occurred. INEXACT is true if M was not
11722 exactly calculated. TYPE is the tree type for the result. This
11723 function assumes that you cleared the MPFR flags and then
11724 calculated M to see if anything subsequently set a flag prior to
11725 entering this function. Return NULL_TREE if any checks fail. */
11727 static tree
11728 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11730 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11731 overflow/underflow occurred. If -frounding-math, proceed iff the
11732 result of calling FUNC was exact. */
11733 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11734 && (!flag_rounding_math || !inexact))
11736 REAL_VALUE_TYPE rr;
11738 real_from_mpfr (&rr, m, type, GMP_RNDN);
11739 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11740 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11741 but the mpft_t is not, then we underflowed in the
11742 conversion. */
11743 if (real_isfinite (&rr)
11744 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11746 REAL_VALUE_TYPE rmode;
11748 real_convert (&rmode, TYPE_MODE (type), &rr);
11749 /* Proceed iff the specified mode can hold the value. */
11750 if (real_identical (&rmode, &rr))
11751 return build_real (type, rmode);
11754 return NULL_TREE;
11757 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11758 number and no overflow/underflow occurred. INEXACT is true if M
11759 was not exactly calculated. TYPE is the tree type for the result.
11760 This function assumes that you cleared the MPFR flags and then
11761 calculated M to see if anything subsequently set a flag prior to
11762 entering this function. Return NULL_TREE if any checks fail, if
11763 FORCE_CONVERT is true, then bypass the checks. */
11765 static tree
11766 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11768 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11769 overflow/underflow occurred. If -frounding-math, proceed iff the
11770 result of calling FUNC was exact. */
11771 if (force_convert
11772 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11773 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11774 && (!flag_rounding_math || !inexact)))
11776 REAL_VALUE_TYPE re, im;
11778 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11779 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11780 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11781 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11782 but the mpft_t is not, then we underflowed in the
11783 conversion. */
11784 if (force_convert
11785 || (real_isfinite (&re) && real_isfinite (&im)
11786 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11787 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11789 REAL_VALUE_TYPE re_mode, im_mode;
11791 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11792 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11793 /* Proceed iff the specified mode can hold the value. */
11794 if (force_convert
11795 || (real_identical (&re_mode, &re)
11796 && real_identical (&im_mode, &im)))
11797 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11798 build_real (TREE_TYPE (type), im_mode));
11801 return NULL_TREE;
11804 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11805 FUNC on it and return the resulting value as a tree with type TYPE.
11806 If MIN and/or MAX are not NULL, then the supplied ARG must be
11807 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11808 acceptable values, otherwise they are not. The mpfr precision is
11809 set to the precision of TYPE. We assume that function FUNC returns
11810 zero if the result could be calculated exactly within the requested
11811 precision. */
11813 static tree
11814 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11815 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11816 bool inclusive)
11818 tree result = NULL_TREE;
11820 STRIP_NOPS (arg);
11822 /* To proceed, MPFR must exactly represent the target floating point
11823 format, which only happens when the target base equals two. */
11824 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11825 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11827 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11829 if (real_isfinite (ra)
11830 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11831 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11833 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11834 const int prec = fmt->p;
11835 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11836 int inexact;
11837 mpfr_t m;
11839 mpfr_init2 (m, prec);
11840 mpfr_from_real (m, ra, GMP_RNDN);
11841 mpfr_clear_flags ();
11842 inexact = func (m, m, rnd);
11843 result = do_mpfr_ckconv (m, type, inexact);
11844 mpfr_clear (m);
11848 return result;
11851 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11852 FUNC on it and return the resulting value as a tree with type TYPE.
11853 The mpfr precision is set to the precision of TYPE. We assume that
11854 function FUNC returns zero if the result could be calculated
11855 exactly within the requested precision. */
11857 static tree
11858 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11859 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11861 tree result = NULL_TREE;
11863 STRIP_NOPS (arg1);
11864 STRIP_NOPS (arg2);
11866 /* To proceed, MPFR must exactly represent the target floating point
11867 format, which only happens when the target base equals two. */
11868 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11869 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11870 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11872 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11873 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11875 if (real_isfinite (ra1) && real_isfinite (ra2))
11877 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11878 const int prec = fmt->p;
11879 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11880 int inexact;
11881 mpfr_t m1, m2;
11883 mpfr_inits2 (prec, m1, m2, NULL);
11884 mpfr_from_real (m1, ra1, GMP_RNDN);
11885 mpfr_from_real (m2, ra2, GMP_RNDN);
11886 mpfr_clear_flags ();
11887 inexact = func (m1, m1, m2, rnd);
11888 result = do_mpfr_ckconv (m1, type, inexact);
11889 mpfr_clears (m1, m2, NULL);
11893 return result;
11896 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11897 FUNC on it and return the resulting value as a tree with type TYPE.
11898 The mpfr precision is set to the precision of TYPE. We assume that
11899 function FUNC returns zero if the result could be calculated
11900 exactly within the requested precision. */
11902 static tree
11903 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11904 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11906 tree result = NULL_TREE;
11908 STRIP_NOPS (arg1);
11909 STRIP_NOPS (arg2);
11910 STRIP_NOPS (arg3);
11912 /* To proceed, MPFR must exactly represent the target floating point
11913 format, which only happens when the target base equals two. */
11914 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11915 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11916 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11917 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11919 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11920 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11921 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11923 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11925 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11926 const int prec = fmt->p;
11927 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11928 int inexact;
11929 mpfr_t m1, m2, m3;
11931 mpfr_inits2 (prec, m1, m2, m3, NULL);
11932 mpfr_from_real (m1, ra1, GMP_RNDN);
11933 mpfr_from_real (m2, ra2, GMP_RNDN);
11934 mpfr_from_real (m3, ra3, GMP_RNDN);
11935 mpfr_clear_flags ();
11936 inexact = func (m1, m1, m2, m3, rnd);
11937 result = do_mpfr_ckconv (m1, type, inexact);
11938 mpfr_clears (m1, m2, m3, NULL);
11942 return result;
11945 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11946 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11947 If ARG_SINP and ARG_COSP are NULL then the result is returned
11948 as a complex value.
11949 The type is taken from the type of ARG and is used for setting the
11950 precision of the calculation and results. */
11952 static tree
11953 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11955 tree const type = TREE_TYPE (arg);
11956 tree result = NULL_TREE;
11958 STRIP_NOPS (arg);
11960 /* To proceed, MPFR must exactly represent the target floating point
11961 format, which only happens when the target base equals two. */
11962 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11963 && TREE_CODE (arg) == REAL_CST
11964 && !TREE_OVERFLOW (arg))
11966 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11968 if (real_isfinite (ra))
11970 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11971 const int prec = fmt->p;
11972 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11973 tree result_s, result_c;
11974 int inexact;
11975 mpfr_t m, ms, mc;
11977 mpfr_inits2 (prec, m, ms, mc, NULL);
11978 mpfr_from_real (m, ra, GMP_RNDN);
11979 mpfr_clear_flags ();
11980 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11981 result_s = do_mpfr_ckconv (ms, type, inexact);
11982 result_c = do_mpfr_ckconv (mc, type, inexact);
11983 mpfr_clears (m, ms, mc, NULL);
11984 if (result_s && result_c)
11986 /* If we are to return in a complex value do so. */
11987 if (!arg_sinp && !arg_cosp)
11988 return build_complex (build_complex_type (type),
11989 result_c, result_s);
11991 /* Dereference the sin/cos pointer arguments. */
11992 arg_sinp = build_fold_indirect_ref (arg_sinp);
11993 arg_cosp = build_fold_indirect_ref (arg_cosp);
11994 /* Proceed if valid pointer type were passed in. */
11995 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11996 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11998 /* Set the values. */
11999 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12000 result_s);
12001 TREE_SIDE_EFFECTS (result_s) = 1;
12002 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12003 result_c);
12004 TREE_SIDE_EFFECTS (result_c) = 1;
12005 /* Combine the assignments into a compound expr. */
12006 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12007 result_s, result_c));
12012 return result;
12015 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12016 two-argument mpfr order N Bessel function FUNC on them and return
12017 the resulting value as a tree with type TYPE. The mpfr precision
12018 is set to the precision of TYPE. We assume that function FUNC
12019 returns zero if the result could be calculated exactly within the
12020 requested precision. */
12021 static tree
12022 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12023 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12024 const REAL_VALUE_TYPE *min, bool inclusive)
12026 tree result = NULL_TREE;
12028 STRIP_NOPS (arg1);
12029 STRIP_NOPS (arg2);
12031 /* To proceed, MPFR must exactly represent the target floating point
12032 format, which only happens when the target base equals two. */
12033 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12034 && tree_fits_shwi_p (arg1)
12035 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12037 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12038 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12040 if (n == (long)n
12041 && real_isfinite (ra)
12042 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12044 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12045 const int prec = fmt->p;
12046 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12047 int inexact;
12048 mpfr_t m;
12050 mpfr_init2 (m, prec);
12051 mpfr_from_real (m, ra, GMP_RNDN);
12052 mpfr_clear_flags ();
12053 inexact = func (m, n, m, rnd);
12054 result = do_mpfr_ckconv (m, type, inexact);
12055 mpfr_clear (m);
12059 return result;
12062 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12063 the pointer *(ARG_QUO) and return the result. The type is taken
12064 from the type of ARG0 and is used for setting the precision of the
12065 calculation and results. */
12067 static tree
12068 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12070 tree const type = TREE_TYPE (arg0);
12071 tree result = NULL_TREE;
12073 STRIP_NOPS (arg0);
12074 STRIP_NOPS (arg1);
12076 /* To proceed, MPFR must exactly represent the target floating point
12077 format, which only happens when the target base equals two. */
12078 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12079 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12080 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12082 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12083 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12085 if (real_isfinite (ra0) && real_isfinite (ra1))
12087 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12088 const int prec = fmt->p;
12089 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12090 tree result_rem;
12091 long integer_quo;
12092 mpfr_t m0, m1;
12094 mpfr_inits2 (prec, m0, m1, NULL);
12095 mpfr_from_real (m0, ra0, GMP_RNDN);
12096 mpfr_from_real (m1, ra1, GMP_RNDN);
12097 mpfr_clear_flags ();
12098 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12099 /* Remquo is independent of the rounding mode, so pass
12100 inexact=0 to do_mpfr_ckconv(). */
12101 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12102 mpfr_clears (m0, m1, NULL);
12103 if (result_rem)
12105 /* MPFR calculates quo in the host's long so it may
12106 return more bits in quo than the target int can hold
12107 if sizeof(host long) > sizeof(target int). This can
12108 happen even for native compilers in LP64 mode. In
12109 these cases, modulo the quo value with the largest
12110 number that the target int can hold while leaving one
12111 bit for the sign. */
12112 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12113 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12115 /* Dereference the quo pointer argument. */
12116 arg_quo = build_fold_indirect_ref (arg_quo);
12117 /* Proceed iff a valid pointer type was passed in. */
12118 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12120 /* Set the value. */
12121 tree result_quo
12122 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12123 build_int_cst (TREE_TYPE (arg_quo),
12124 integer_quo));
12125 TREE_SIDE_EFFECTS (result_quo) = 1;
12126 /* Combine the quo assignment with the rem. */
12127 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12128 result_quo, result_rem));
12133 return result;
12136 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12137 resulting value as a tree with type TYPE. The mpfr precision is
12138 set to the precision of TYPE. We assume that this mpfr function
12139 returns zero if the result could be calculated exactly within the
12140 requested precision. In addition, the integer pointer represented
12141 by ARG_SG will be dereferenced and set to the appropriate signgam
12142 (-1,1) value. */
12144 static tree
12145 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12147 tree result = NULL_TREE;
12149 STRIP_NOPS (arg);
12151 /* To proceed, MPFR must exactly represent the target floating point
12152 format, which only happens when the target base equals two. Also
12153 verify ARG is a constant and that ARG_SG is an int pointer. */
12154 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12155 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12156 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12157 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12159 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12161 /* In addition to NaN and Inf, the argument cannot be zero or a
12162 negative integer. */
12163 if (real_isfinite (ra)
12164 && ra->cl != rvc_zero
12165 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12167 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12168 const int prec = fmt->p;
12169 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12170 int inexact, sg;
12171 mpfr_t m;
12172 tree result_lg;
12174 mpfr_init2 (m, prec);
12175 mpfr_from_real (m, ra, GMP_RNDN);
12176 mpfr_clear_flags ();
12177 inexact = mpfr_lgamma (m, &sg, m, rnd);
12178 result_lg = do_mpfr_ckconv (m, type, inexact);
12179 mpfr_clear (m);
12180 if (result_lg)
12182 tree result_sg;
12184 /* Dereference the arg_sg pointer argument. */
12185 arg_sg = build_fold_indirect_ref (arg_sg);
12186 /* Assign the signgam value into *arg_sg. */
12187 result_sg = fold_build2 (MODIFY_EXPR,
12188 TREE_TYPE (arg_sg), arg_sg,
12189 build_int_cst (TREE_TYPE (arg_sg), sg));
12190 TREE_SIDE_EFFECTS (result_sg) = 1;
12191 /* Combine the signgam assignment with the lgamma result. */
12192 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12193 result_sg, result_lg));
12198 return result;
12201 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12202 function FUNC on it and return the resulting value as a tree with
12203 type TYPE. The mpfr precision is set to the precision of TYPE. We
12204 assume that function FUNC returns zero if the result could be
12205 calculated exactly within the requested precision. */
12207 static tree
12208 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12210 tree result = NULL_TREE;
12212 STRIP_NOPS (arg);
12214 /* To proceed, MPFR must exactly represent the target floating point
12215 format, which only happens when the target base equals two. */
12216 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12217 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12218 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12220 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12221 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12223 if (real_isfinite (re) && real_isfinite (im))
12225 const struct real_format *const fmt =
12226 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12227 const int prec = fmt->p;
12228 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12229 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12230 int inexact;
12231 mpc_t m;
12233 mpc_init2 (m, prec);
12234 mpfr_from_real (mpc_realref (m), re, rnd);
12235 mpfr_from_real (mpc_imagref (m), im, rnd);
12236 mpfr_clear_flags ();
12237 inexact = func (m, m, crnd);
12238 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12239 mpc_clear (m);
12243 return result;
12246 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12247 mpc function FUNC on it and return the resulting value as a tree
12248 with type TYPE. The mpfr precision is set to the precision of
12249 TYPE. We assume that function FUNC returns zero if the result
12250 could be calculated exactly within the requested precision. If
12251 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12252 in the arguments and/or results. */
12254 tree
12255 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12256 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12258 tree result = NULL_TREE;
12260 STRIP_NOPS (arg0);
12261 STRIP_NOPS (arg1);
12263 /* To proceed, MPFR must exactly represent the target floating point
12264 format, which only happens when the target base equals two. */
12265 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12266 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12267 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12268 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12269 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12271 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12272 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12273 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12274 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12276 if (do_nonfinite
12277 || (real_isfinite (re0) && real_isfinite (im0)
12278 && real_isfinite (re1) && real_isfinite (im1)))
12280 const struct real_format *const fmt =
12281 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12282 const int prec = fmt->p;
12283 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12284 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12285 int inexact;
12286 mpc_t m0, m1;
12288 mpc_init2 (m0, prec);
12289 mpc_init2 (m1, prec);
12290 mpfr_from_real (mpc_realref (m0), re0, rnd);
12291 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12292 mpfr_from_real (mpc_realref (m1), re1, rnd);
12293 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12294 mpfr_clear_flags ();
12295 inexact = func (m0, m0, m1, crnd);
12296 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12297 mpc_clear (m0);
12298 mpc_clear (m1);
12302 return result;
12305 /* A wrapper function for builtin folding that prevents warnings for
12306 "statement without effect" and the like, caused by removing the
12307 call node earlier than the warning is generated. */
12309 tree
12310 fold_call_stmt (gcall *stmt, bool ignore)
12312 tree ret = NULL_TREE;
12313 tree fndecl = gimple_call_fndecl (stmt);
12314 location_t loc = gimple_location (stmt);
12315 if (fndecl
12316 && TREE_CODE (fndecl) == FUNCTION_DECL
12317 && DECL_BUILT_IN (fndecl)
12318 && !gimple_call_va_arg_pack_p (stmt))
12320 int nargs = gimple_call_num_args (stmt);
12321 tree *args = (nargs > 0
12322 ? gimple_call_arg_ptr (stmt, 0)
12323 : &error_mark_node);
12325 if (avoid_folding_inline_builtin (fndecl))
12326 return NULL_TREE;
12327 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12329 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12331 else
12333 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12334 if (ret)
12336 /* Propagate location information from original call to
12337 expansion of builtin. Otherwise things like
12338 maybe_emit_chk_warning, that operate on the expansion
12339 of a builtin, will use the wrong location information. */
12340 if (gimple_has_location (stmt))
12342 tree realret = ret;
12343 if (TREE_CODE (ret) == NOP_EXPR)
12344 realret = TREE_OPERAND (ret, 0);
12345 if (CAN_HAVE_LOCATION_P (realret)
12346 && !EXPR_HAS_LOCATION (realret))
12347 SET_EXPR_LOCATION (realret, loc);
12348 return realret;
12350 return ret;
12354 return NULL_TREE;
12357 /* Look up the function in builtin_decl that corresponds to DECL
12358 and set ASMSPEC as its user assembler name. DECL must be a
12359 function decl that declares a builtin. */
12361 void
12362 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12364 tree builtin;
12365 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12366 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12367 && asmspec != 0);
12369 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12370 set_user_assembler_name (builtin, asmspec);
12371 switch (DECL_FUNCTION_CODE (decl))
12373 case BUILT_IN_MEMCPY:
12374 init_block_move_fn (asmspec);
12375 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12376 break;
12377 case BUILT_IN_MEMSET:
12378 init_block_clear_fn (asmspec);
12379 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12380 break;
12381 case BUILT_IN_MEMMOVE:
12382 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12383 break;
12384 case BUILT_IN_MEMCMP:
12385 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12386 break;
12387 case BUILT_IN_ABORT:
12388 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12389 break;
12390 case BUILT_IN_FFS:
12391 if (INT_TYPE_SIZE < BITS_PER_WORD)
12393 set_user_assembler_libfunc ("ffs", asmspec);
12394 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12395 MODE_INT, 0), "ffs");
12397 break;
12398 default:
12399 break;
12403 /* Return true if DECL is a builtin that expands to a constant or similarly
12404 simple code. */
12405 bool
12406 is_simple_builtin (tree decl)
12408 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12409 switch (DECL_FUNCTION_CODE (decl))
12411 /* Builtins that expand to constants. */
12412 case BUILT_IN_CONSTANT_P:
12413 case BUILT_IN_EXPECT:
12414 case BUILT_IN_OBJECT_SIZE:
12415 case BUILT_IN_UNREACHABLE:
12416 /* Simple register moves or loads from stack. */
12417 case BUILT_IN_ASSUME_ALIGNED:
12418 case BUILT_IN_RETURN_ADDRESS:
12419 case BUILT_IN_EXTRACT_RETURN_ADDR:
12420 case BUILT_IN_FROB_RETURN_ADDR:
12421 case BUILT_IN_RETURN:
12422 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12423 case BUILT_IN_FRAME_ADDRESS:
12424 case BUILT_IN_VA_END:
12425 case BUILT_IN_STACK_SAVE:
12426 case BUILT_IN_STACK_RESTORE:
12427 /* Exception state returns or moves registers around. */
12428 case BUILT_IN_EH_FILTER:
12429 case BUILT_IN_EH_POINTER:
12430 case BUILT_IN_EH_COPY_VALUES:
12431 return true;
12433 default:
12434 return false;
12437 return false;
12440 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12441 most probably expanded inline into reasonably simple code. This is a
12442 superset of is_simple_builtin. */
12443 bool
12444 is_inexpensive_builtin (tree decl)
12446 if (!decl)
12447 return false;
12448 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12449 return true;
12450 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12451 switch (DECL_FUNCTION_CODE (decl))
12453 case BUILT_IN_ABS:
12454 case BUILT_IN_ALLOCA:
12455 case BUILT_IN_ALLOCA_WITH_ALIGN:
12456 case BUILT_IN_BSWAP16:
12457 case BUILT_IN_BSWAP32:
12458 case BUILT_IN_BSWAP64:
12459 case BUILT_IN_CLZ:
12460 case BUILT_IN_CLZIMAX:
12461 case BUILT_IN_CLZL:
12462 case BUILT_IN_CLZLL:
12463 case BUILT_IN_CTZ:
12464 case BUILT_IN_CTZIMAX:
12465 case BUILT_IN_CTZL:
12466 case BUILT_IN_CTZLL:
12467 case BUILT_IN_FFS:
12468 case BUILT_IN_FFSIMAX:
12469 case BUILT_IN_FFSL:
12470 case BUILT_IN_FFSLL:
12471 case BUILT_IN_IMAXABS:
12472 case BUILT_IN_FINITE:
12473 case BUILT_IN_FINITEF:
12474 case BUILT_IN_FINITEL:
12475 case BUILT_IN_FINITED32:
12476 case BUILT_IN_FINITED64:
12477 case BUILT_IN_FINITED128:
12478 case BUILT_IN_FPCLASSIFY:
12479 case BUILT_IN_ISFINITE:
12480 case BUILT_IN_ISINF_SIGN:
12481 case BUILT_IN_ISINF:
12482 case BUILT_IN_ISINFF:
12483 case BUILT_IN_ISINFL:
12484 case BUILT_IN_ISINFD32:
12485 case BUILT_IN_ISINFD64:
12486 case BUILT_IN_ISINFD128:
12487 case BUILT_IN_ISNAN:
12488 case BUILT_IN_ISNANF:
12489 case BUILT_IN_ISNANL:
12490 case BUILT_IN_ISNAND32:
12491 case BUILT_IN_ISNAND64:
12492 case BUILT_IN_ISNAND128:
12493 case BUILT_IN_ISNORMAL:
12494 case BUILT_IN_ISGREATER:
12495 case BUILT_IN_ISGREATEREQUAL:
12496 case BUILT_IN_ISLESS:
12497 case BUILT_IN_ISLESSEQUAL:
12498 case BUILT_IN_ISLESSGREATER:
12499 case BUILT_IN_ISUNORDERED:
12500 case BUILT_IN_VA_ARG_PACK:
12501 case BUILT_IN_VA_ARG_PACK_LEN:
12502 case BUILT_IN_VA_COPY:
12503 case BUILT_IN_TRAP:
12504 case BUILT_IN_SAVEREGS:
12505 case BUILT_IN_POPCOUNTL:
12506 case BUILT_IN_POPCOUNTLL:
12507 case BUILT_IN_POPCOUNTIMAX:
12508 case BUILT_IN_POPCOUNT:
12509 case BUILT_IN_PARITYL:
12510 case BUILT_IN_PARITYLL:
12511 case BUILT_IN_PARITYIMAX:
12512 case BUILT_IN_PARITY:
12513 case BUILT_IN_LABS:
12514 case BUILT_IN_LLABS:
12515 case BUILT_IN_PREFETCH:
12516 case BUILT_IN_ACC_ON_DEVICE:
12517 return true;
12519 default:
12520 return is_simple_builtin (decl);
12523 return false;