* gcc.dg/vect/slp-perm-1.c (main): Make sure loops aren't vectorized.
[official-gcc.git] / gcc / builtins.c
blob40327e0e858cfc25911d23e64d60e0dd5a041959
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
53 #include "builtins.h"
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
57 #endif
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
64 struct target_builtins default_target_builtins;
65 #if SWITCHABLE_TARGET
66 struct target_builtins *this_target_builtins = &default_target_builtins;
67 #endif
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names[(int) END_BUILTINS] =
76 #include "builtins.def"
78 #undef DEF_BUILTIN
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls[(int) END_BUILTINS];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls[(int) END_BUILTINS];
88 static const char *c_getstr (tree);
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_args_info (tree);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, rtx);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_expect (location_t, tree, tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree fold_builtin_nan (tree, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static bool integer_valued_real_p (tree);
150 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
151 static bool readonly_data_expr (tree);
152 static rtx expand_builtin_fabs (tree, rtx, rtx);
153 static rtx expand_builtin_signbit (tree, rtx);
154 static tree fold_builtin_sqrt (location_t, tree, tree);
155 static tree fold_builtin_cbrt (location_t, tree, tree);
156 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_cos (location_t, tree, tree, tree);
159 static tree fold_builtin_cosh (location_t, tree, tree, tree);
160 static tree fold_builtin_tan (tree, tree);
161 static tree fold_builtin_trunc (location_t, tree, tree);
162 static tree fold_builtin_floor (location_t, tree, tree);
163 static tree fold_builtin_ceil (location_t, tree, tree);
164 static tree fold_builtin_round (location_t, tree, tree);
165 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
166 static tree fold_builtin_bitop (tree, tree);
167 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
168 static tree fold_builtin_strchr (location_t, tree, tree, tree);
169 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
171 static tree fold_builtin_strcmp (location_t, tree, tree);
172 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
173 static tree fold_builtin_signbit (location_t, tree, tree);
174 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
175 static tree fold_builtin_isascii (location_t, tree);
176 static tree fold_builtin_toascii (location_t, tree);
177 static tree fold_builtin_isdigit (location_t, tree);
178 static tree fold_builtin_fabs (location_t, tree, tree);
179 static tree fold_builtin_abs (location_t, tree, tree);
180 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 enum tree_code);
182 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
183 static tree fold_builtin_0 (location_t, tree, bool);
184 static tree fold_builtin_1 (location_t, tree, tree, bool);
185 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
186 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
187 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
188 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
191 static tree fold_builtin_strstr (location_t, tree, tree, tree);
192 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
193 static tree fold_builtin_strcat (location_t, tree, tree);
194 static tree fold_builtin_strncat (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
207 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
208 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
209 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
210 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
211 enum built_in_function);
212 static bool init_target_chars (void);
214 static unsigned HOST_WIDE_INT target_newline;
215 static unsigned HOST_WIDE_INT target_percent;
216 static unsigned HOST_WIDE_INT target_c;
217 static unsigned HOST_WIDE_INT target_s;
218 static char target_percent_c[3];
219 static char target_percent_s[3];
220 static char target_percent_s_newline[4];
221 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_arg2 (tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_arg3 (tree, tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_sincos (tree, tree, tree);
228 static tree do_mpfr_bessel_n (tree, tree, tree,
229 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
230 const REAL_VALUE_TYPE *, bool);
231 static tree do_mpfr_remquo (tree, tree, tree);
232 static tree do_mpfr_lgamma_r (tree, tree, tree);
234 /* Return true if NAME starts with __builtin_ or __sync_. */
236 bool
237 is_builtin_name (const char *name)
239 if (strncmp (name, "__builtin_", 10) == 0)
240 return true;
241 if (strncmp (name, "__sync_", 7) == 0)
242 return true;
243 return false;
247 /* Return true if DECL is a function symbol representing a built-in. */
249 bool
250 is_builtin_fn (tree decl)
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
256 /* Return true if NODE should be considered for inline expansion regardless
257 of the optimization level. This means whenever a function is invoked with
258 its "internal" name, which normally contains the prefix "__builtin". */
260 static bool
261 called_as_built_in (tree node)
263 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
264 we want the name used to call the function, not the name it
265 will have. */
266 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
267 return is_builtin_name (name);
270 /* Return the alignment in bits of EXP, an object.
271 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
272 guessed alignment e.g. from type alignment. */
275 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
277 unsigned int inner;
279 inner = max_align;
280 if (handled_component_p (exp))
282 HOST_WIDE_INT bitsize, bitpos;
283 tree offset;
284 enum machine_mode mode;
285 int unsignedp, volatilep;
287 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
288 &mode, &unsignedp, &volatilep, true);
289 if (bitpos)
290 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
291 while (offset)
293 tree next_offset;
295 if (TREE_CODE (offset) == PLUS_EXPR)
297 next_offset = TREE_OPERAND (offset, 0);
298 offset = TREE_OPERAND (offset, 1);
300 else
301 next_offset = NULL;
302 if (host_integerp (offset, 1))
304 /* Any overflow in calculating offset_bits won't change
305 the alignment. */
306 unsigned offset_bits
307 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
309 if (offset_bits)
310 inner = MIN (inner, (offset_bits & -offset_bits));
312 else if (TREE_CODE (offset) == MULT_EXPR
313 && host_integerp (TREE_OPERAND (offset, 1), 1))
315 /* Any overflow in calculating offset_factor won't change
316 the alignment. */
317 unsigned offset_factor
318 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
319 * BITS_PER_UNIT);
321 if (offset_factor)
322 inner = MIN (inner, (offset_factor & -offset_factor));
324 else
326 inner = MIN (inner, BITS_PER_UNIT);
327 break;
329 offset = next_offset;
332 if (TREE_CODE (exp) == CONST_DECL)
333 exp = DECL_INITIAL (exp);
334 if (DECL_P (exp)
335 && TREE_CODE (exp) != LABEL_DECL)
336 align = MIN (inner, DECL_ALIGN (exp));
337 #ifdef CONSTANT_ALIGNMENT
338 else if (CONSTANT_CLASS_P (exp))
339 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
340 #endif
341 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
342 || TREE_CODE (exp) == INDIRECT_REF)
343 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
344 else
345 align = MIN (align, inner);
346 return MIN (align, max_align);
349 /* Returns true iff we can trust that alignment information has been
350 calculated properly. */
352 bool
353 can_trust_pointer_alignment (void)
355 /* We rely on TER to compute accurate alignment information. */
356 return (optimize && flag_tree_ter);
359 /* Return the alignment in bits of EXP, a pointer valued expression.
360 But don't return more than MAX_ALIGN no matter what.
361 The alignment returned is, by default, the alignment of the thing that
362 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
364 Otherwise, look at the expression to see if we can do better, i.e., if the
365 expression is actually pointing at an object whose alignment is tighter. */
368 get_pointer_alignment (tree exp, unsigned int max_align)
370 unsigned int align, inner;
372 if (!can_trust_pointer_alignment ())
373 return 0;
375 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
376 return 0;
378 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
379 align = MIN (align, max_align);
381 while (1)
383 switch (TREE_CODE (exp))
385 CASE_CONVERT:
386 exp = TREE_OPERAND (exp, 0);
387 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
388 return align;
390 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
391 align = MIN (inner, max_align);
392 break;
394 case POINTER_PLUS_EXPR:
395 /* If sum of pointer + int, restrict our maximum alignment to that
396 imposed by the integer. If not, we can't do any better than
397 ALIGN. */
398 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
399 return align;
401 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
402 & (max_align / BITS_PER_UNIT - 1))
403 != 0)
404 max_align >>= 1;
406 exp = TREE_OPERAND (exp, 0);
407 break;
409 case ADDR_EXPR:
410 /* See what we are pointing at and look at its alignment. */
411 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 default:
414 return align;
419 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
420 way, because it could contain a zero byte in the middle.
421 TREE_STRING_LENGTH is the size of the character array, not the string.
423 ONLY_VALUE should be nonzero if the result is not going to be emitted
424 into the instruction stream and zero if it is going to be expanded.
425 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
426 is returned, otherwise NULL, since
427 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
428 evaluate the side-effects.
430 The value returned is of type `ssizetype'.
432 Unfortunately, string_constant can't access the values of const char
433 arrays with initializers, so neither can we do so here. */
435 tree
436 c_strlen (tree src, int only_value)
438 tree offset_node;
439 HOST_WIDE_INT offset;
440 int max;
441 const char *ptr;
442 location_t loc;
444 STRIP_NOPS (src);
445 if (TREE_CODE (src) == COND_EXPR
446 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
448 tree len1, len2;
450 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
451 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
452 if (tree_int_cst_equal (len1, len2))
453 return len1;
456 if (TREE_CODE (src) == COMPOUND_EXPR
457 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
458 return c_strlen (TREE_OPERAND (src, 1), only_value);
460 if (EXPR_HAS_LOCATION (src))
461 loc = EXPR_LOCATION (src);
462 else
463 loc = input_location;
465 src = string_constant (src, &offset_node);
466 if (src == 0)
467 return NULL_TREE;
469 max = TREE_STRING_LENGTH (src) - 1;
470 ptr = TREE_STRING_POINTER (src);
472 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
474 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
475 compute the offset to the following null if we don't know where to
476 start searching for it. */
477 int i;
479 for (i = 0; i < max; i++)
480 if (ptr[i] == 0)
481 return NULL_TREE;
483 /* We don't know the starting offset, but we do know that the string
484 has no internal zero bytes. We can assume that the offset falls
485 within the bounds of the string; otherwise, the programmer deserves
486 what he gets. Subtract the offset from the length of the string,
487 and return that. This would perhaps not be valid if we were dealing
488 with named arrays in addition to literal string constants. */
490 return size_diffop_loc (loc, size_int (max), offset_node);
493 /* We have a known offset into the string. Start searching there for
494 a null character if we can represent it as a single HOST_WIDE_INT. */
495 if (offset_node == 0)
496 offset = 0;
497 else if (! host_integerp (offset_node, 0))
498 offset = -1;
499 else
500 offset = tree_low_cst (offset_node, 0);
502 /* If the offset is known to be out of bounds, warn, and call strlen at
503 runtime. */
504 if (offset < 0 || offset > max)
506 /* Suppress multiple warnings for propagated constant strings. */
507 if (! TREE_NO_WARNING (src))
509 warning_at (loc, 0, "offset outside bounds of constant string");
510 TREE_NO_WARNING (src) = 1;
512 return NULL_TREE;
515 /* Use strlen to search for the first zero byte. Since any strings
516 constructed with build_string will have nulls appended, we win even
517 if we get handed something like (char[4])"abcd".
519 Since OFFSET is our starting index into the string, no further
520 calculation is needed. */
521 return ssize_int (strlen (ptr + offset));
524 /* Return a char pointer for a C string if it is a string constant
525 or sum of string constant and integer constant. */
527 static const char *
528 c_getstr (tree src)
530 tree offset_node;
532 src = string_constant (src, &offset_node);
533 if (src == 0)
534 return 0;
536 if (offset_node == 0)
537 return TREE_STRING_POINTER (src);
538 else if (!host_integerp (offset_node, 1)
539 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
540 return 0;
542 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
545 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
546 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
548 static rtx
549 c_readstr (const char *str, enum machine_mode mode)
551 HOST_WIDE_INT c[2];
552 HOST_WIDE_INT ch;
553 unsigned int i, j;
555 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
557 c[0] = 0;
558 c[1] = 0;
559 ch = 1;
560 for (i = 0; i < GET_MODE_SIZE (mode); i++)
562 j = i;
563 if (WORDS_BIG_ENDIAN)
564 j = GET_MODE_SIZE (mode) - i - 1;
565 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
566 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
567 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
568 j *= BITS_PER_UNIT;
569 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
571 if (ch)
572 ch = (unsigned char) str[i];
573 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
575 return immed_double_const (c[0], c[1], mode);
578 /* Cast a target constant CST to target CHAR and if that value fits into
579 host char type, return zero and put that value into variable pointed to by
580 P. */
582 static int
583 target_char_cast (tree cst, char *p)
585 unsigned HOST_WIDE_INT val, hostval;
587 if (!host_integerp (cst, 1)
588 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
589 return 1;
591 val = tree_low_cst (cst, 1);
592 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
593 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
595 hostval = val;
596 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
597 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
599 if (val != hostval)
600 return 1;
602 *p = hostval;
603 return 0;
606 /* Similar to save_expr, but assumes that arbitrary code is not executed
607 in between the multiple evaluations. In particular, we assume that a
608 non-addressable local variable will not be modified. */
610 static tree
611 builtin_save_expr (tree exp)
613 if (TREE_ADDRESSABLE (exp) == 0
614 && (TREE_CODE (exp) == PARM_DECL
615 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
616 return exp;
618 return save_expr (exp);
621 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
622 times to get the address of either a higher stack frame, or a return
623 address located within it (depending on FNDECL_CODE). */
625 static rtx
626 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
628 int i;
630 #ifdef INITIAL_FRAME_ADDRESS_RTX
631 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
632 #else
633 rtx tem;
635 /* For a zero count with __builtin_return_address, we don't care what
636 frame address we return, because target-specific definitions will
637 override us. Therefore frame pointer elimination is OK, and using
638 the soft frame pointer is OK.
640 For a nonzero count, or a zero count with __builtin_frame_address,
641 we require a stable offset from the current frame pointer to the
642 previous one, so we must use the hard frame pointer, and
643 we must disable frame pointer elimination. */
644 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
645 tem = frame_pointer_rtx;
646 else
648 tem = hard_frame_pointer_rtx;
650 /* Tell reload not to eliminate the frame pointer. */
651 crtl->accesses_prior_frames = 1;
653 #endif
655 /* Some machines need special handling before we can access
656 arbitrary frames. For example, on the SPARC, we must first flush
657 all register windows to the stack. */
658 #ifdef SETUP_FRAME_ADDRESSES
659 if (count > 0)
660 SETUP_FRAME_ADDRESSES ();
661 #endif
663 /* On the SPARC, the return address is not in the frame, it is in a
664 register. There is no way to access it off of the current frame
665 pointer, but it can be accessed off the previous frame pointer by
666 reading the value from the register window save area. */
667 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
668 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
669 count--;
670 #endif
672 /* Scan back COUNT frames to the specified frame. */
673 for (i = 0; i < count; i++)
675 /* Assume the dynamic chain pointer is in the word that the
676 frame address points to, unless otherwise specified. */
677 #ifdef DYNAMIC_CHAIN_ADDRESS
678 tem = DYNAMIC_CHAIN_ADDRESS (tem);
679 #endif
680 tem = memory_address (Pmode, tem);
681 tem = gen_frame_mem (Pmode, tem);
682 tem = copy_to_reg (tem);
685 /* For __builtin_frame_address, return what we've got. But, on
686 the SPARC for example, we may have to add a bias. */
687 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
688 #ifdef FRAME_ADDR_RTX
689 return FRAME_ADDR_RTX (tem);
690 #else
691 return tem;
692 #endif
694 /* For __builtin_return_address, get the return address from that frame. */
695 #ifdef RETURN_ADDR_RTX
696 tem = RETURN_ADDR_RTX (count, tem);
697 #else
698 tem = memory_address (Pmode,
699 plus_constant (tem, GET_MODE_SIZE (Pmode)));
700 tem = gen_frame_mem (Pmode, tem);
701 #endif
702 return tem;
705 /* Alias set used for setjmp buffer. */
706 static alias_set_type setjmp_alias_set = -1;
708 /* Construct the leading half of a __builtin_setjmp call. Control will
709 return to RECEIVER_LABEL. This is also called directly by the SJLJ
710 exception handling code. */
712 void
713 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
715 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
716 rtx stack_save;
717 rtx mem;
719 if (setjmp_alias_set == -1)
720 setjmp_alias_set = new_alias_set ();
722 buf_addr = convert_memory_address (Pmode, buf_addr);
724 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
726 /* We store the frame pointer and the address of receiver_label in
727 the buffer and use the rest of it for the stack save area, which
728 is machine-dependent. */
730 mem = gen_rtx_MEM (Pmode, buf_addr);
731 set_mem_alias_set (mem, setjmp_alias_set);
732 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
734 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
735 set_mem_alias_set (mem, setjmp_alias_set);
737 emit_move_insn (validize_mem (mem),
738 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
740 stack_save = gen_rtx_MEM (sa_mode,
741 plus_constant (buf_addr,
742 2 * GET_MODE_SIZE (Pmode)));
743 set_mem_alias_set (stack_save, setjmp_alias_set);
744 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
746 /* If there is further processing to do, do it. */
747 #ifdef HAVE_builtin_setjmp_setup
748 if (HAVE_builtin_setjmp_setup)
749 emit_insn (gen_builtin_setjmp_setup (buf_addr));
750 #endif
752 /* Tell optimize_save_area_alloca that extra work is going to
753 need to go on during alloca. */
754 cfun->calls_setjmp = 1;
756 /* We have a nonlocal label. */
757 cfun->has_nonlocal_label = 1;
760 /* Construct the trailing part of a __builtin_setjmp call. This is
761 also called directly by the SJLJ exception handling code. */
763 void
764 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
766 rtx chain;
768 /* Clobber the FP when we get here, so we have to make sure it's
769 marked as used by this function. */
770 emit_use (hard_frame_pointer_rtx);
772 /* Mark the static chain as clobbered here so life information
773 doesn't get messed up for it. */
774 chain = targetm.calls.static_chain (current_function_decl, true);
775 if (chain && REG_P (chain))
776 emit_clobber (chain);
778 /* Now put in the code to restore the frame pointer, and argument
779 pointer, if needed. */
780 #ifdef HAVE_nonlocal_goto
781 if (! HAVE_nonlocal_goto)
782 #endif
784 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
785 /* This might change the hard frame pointer in ways that aren't
786 apparent to early optimization passes, so force a clobber. */
787 emit_clobber (hard_frame_pointer_rtx);
790 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
791 if (fixed_regs[ARG_POINTER_REGNUM])
793 #ifdef ELIMINABLE_REGS
794 size_t i;
795 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
797 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
798 if (elim_regs[i].from == ARG_POINTER_REGNUM
799 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
800 break;
802 if (i == ARRAY_SIZE (elim_regs))
803 #endif
805 /* Now restore our arg pointer from the address at which it
806 was saved in our stack frame. */
807 emit_move_insn (crtl->args.internal_arg_pointer,
808 copy_to_reg (get_arg_pointer_save_area ()));
811 #endif
813 #ifdef HAVE_builtin_setjmp_receiver
814 if (HAVE_builtin_setjmp_receiver)
815 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
816 else
817 #endif
818 #ifdef HAVE_nonlocal_goto_receiver
819 if (HAVE_nonlocal_goto_receiver)
820 emit_insn (gen_nonlocal_goto_receiver ());
821 else
822 #endif
823 { /* Nothing */ }
825 /* We must not allow the code we just generated to be reordered by
826 scheduling. Specifically, the update of the frame pointer must
827 happen immediately, not later. */
828 emit_insn (gen_blockage ());
831 /* __builtin_longjmp is passed a pointer to an array of five words (not
832 all will be used on all machines). It operates similarly to the C
833 library function of the same name, but is more efficient. Much of
834 the code below is copied from the handling of non-local gotos. */
836 static void
837 expand_builtin_longjmp (rtx buf_addr, rtx value)
839 rtx fp, lab, stack, insn, last;
840 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
842 /* DRAP is needed for stack realign if longjmp is expanded to current
843 function */
844 if (SUPPORTS_STACK_ALIGNMENT)
845 crtl->need_drap = true;
847 if (setjmp_alias_set == -1)
848 setjmp_alias_set = new_alias_set ();
850 buf_addr = convert_memory_address (Pmode, buf_addr);
852 buf_addr = force_reg (Pmode, buf_addr);
854 /* We require that the user must pass a second argument of 1, because
855 that is what builtin_setjmp will return. */
856 gcc_assert (value == const1_rtx);
858 last = get_last_insn ();
859 #ifdef HAVE_builtin_longjmp
860 if (HAVE_builtin_longjmp)
861 emit_insn (gen_builtin_longjmp (buf_addr));
862 else
863 #endif
865 fp = gen_rtx_MEM (Pmode, buf_addr);
866 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
867 GET_MODE_SIZE (Pmode)));
869 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
870 2 * GET_MODE_SIZE (Pmode)));
871 set_mem_alias_set (fp, setjmp_alias_set);
872 set_mem_alias_set (lab, setjmp_alias_set);
873 set_mem_alias_set (stack, setjmp_alias_set);
875 /* Pick up FP, label, and SP from the block and jump. This code is
876 from expand_goto in stmt.c; see there for detailed comments. */
877 #ifdef HAVE_nonlocal_goto
878 if (HAVE_nonlocal_goto)
879 /* We have to pass a value to the nonlocal_goto pattern that will
880 get copied into the static_chain pointer, but it does not matter
881 what that value is, because builtin_setjmp does not use it. */
882 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
883 else
884 #endif
886 lab = copy_to_reg (lab);
888 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
889 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
891 emit_move_insn (hard_frame_pointer_rtx, fp);
892 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
894 emit_use (hard_frame_pointer_rtx);
895 emit_use (stack_pointer_rtx);
896 emit_indirect_jump (lab);
900 /* Search backwards and mark the jump insn as a non-local goto.
901 Note that this precludes the use of __builtin_longjmp to a
902 __builtin_setjmp target in the same function. However, we've
903 already cautioned the user that these functions are for
904 internal exception handling use only. */
905 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
907 gcc_assert (insn != last);
909 if (JUMP_P (insn))
911 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
912 break;
914 else if (CALL_P (insn))
915 break;
919 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
920 and the address of the save area. */
922 static rtx
923 expand_builtin_nonlocal_goto (tree exp)
925 tree t_label, t_save_area;
926 rtx r_label, r_save_area, r_fp, r_sp, insn;
928 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
929 return NULL_RTX;
931 t_label = CALL_EXPR_ARG (exp, 0);
932 t_save_area = CALL_EXPR_ARG (exp, 1);
934 r_label = expand_normal (t_label);
935 r_label = convert_memory_address (Pmode, r_label);
936 r_save_area = expand_normal (t_save_area);
937 r_save_area = convert_memory_address (Pmode, r_save_area);
938 /* Copy the address of the save location to a register just in case it was based
939 on the frame pointer. */
940 r_save_area = copy_to_reg (r_save_area);
941 r_fp = gen_rtx_MEM (Pmode, r_save_area);
942 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
943 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
945 crtl->has_nonlocal_goto = 1;
947 #ifdef HAVE_nonlocal_goto
948 /* ??? We no longer need to pass the static chain value, afaik. */
949 if (HAVE_nonlocal_goto)
950 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
951 else
952 #endif
954 r_label = copy_to_reg (r_label);
956 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
957 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
959 /* Restore frame pointer for containing function.
960 This sets the actual hard register used for the frame pointer
961 to the location of the function's incoming static chain info.
962 The non-local goto handler will then adjust it to contain the
963 proper value and reload the argument pointer, if needed. */
964 emit_move_insn (hard_frame_pointer_rtx, r_fp);
965 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
967 /* USE of hard_frame_pointer_rtx added for consistency;
968 not clear if really needed. */
969 emit_use (hard_frame_pointer_rtx);
970 emit_use (stack_pointer_rtx);
972 /* If the architecture is using a GP register, we must
973 conservatively assume that the target function makes use of it.
974 The prologue of functions with nonlocal gotos must therefore
975 initialize the GP register to the appropriate value, and we
976 must then make sure that this value is live at the point
977 of the jump. (Note that this doesn't necessarily apply
978 to targets with a nonlocal_goto pattern; they are free
979 to implement it in their own way. Note also that this is
980 a no-op if the GP register is a global invariant.) */
981 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
982 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
983 emit_use (pic_offset_table_rtx);
985 emit_indirect_jump (r_label);
988 /* Search backwards to the jump insn and mark it as a
989 non-local goto. */
990 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
992 if (JUMP_P (insn))
994 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
995 break;
997 else if (CALL_P (insn))
998 break;
1001 return const0_rtx;
1004 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1005 (not all will be used on all machines) that was passed to __builtin_setjmp.
1006 It updates the stack pointer in that block to correspond to the current
1007 stack pointer. */
1009 static void
1010 expand_builtin_update_setjmp_buf (rtx buf_addr)
1012 enum machine_mode sa_mode = Pmode;
1013 rtx stack_save;
1016 #ifdef HAVE_save_stack_nonlocal
1017 if (HAVE_save_stack_nonlocal)
1018 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1019 #endif
1020 #ifdef STACK_SAVEAREA_MODE
1021 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1022 #endif
1024 stack_save
1025 = gen_rtx_MEM (sa_mode,
1026 memory_address
1027 (sa_mode,
1028 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1030 #ifdef HAVE_setjmp
1031 if (HAVE_setjmp)
1032 emit_insn (gen_setjmp ());
1033 #endif
1035 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1038 /* Expand a call to __builtin_prefetch. For a target that does not support
1039 data prefetch, evaluate the memory address argument in case it has side
1040 effects. */
1042 static void
1043 expand_builtin_prefetch (tree exp)
1045 tree arg0, arg1, arg2;
1046 int nargs;
1047 rtx op0, op1, op2;
1049 if (!validate_arglist (exp, POINTER_TYPE, 0))
1050 return;
1052 arg0 = CALL_EXPR_ARG (exp, 0);
1054 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1055 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1056 locality). */
1057 nargs = call_expr_nargs (exp);
1058 if (nargs > 1)
1059 arg1 = CALL_EXPR_ARG (exp, 1);
1060 else
1061 arg1 = integer_zero_node;
1062 if (nargs > 2)
1063 arg2 = CALL_EXPR_ARG (exp, 2);
1064 else
1065 arg2 = integer_three_node;
1067 /* Argument 0 is an address. */
1068 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1070 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1071 if (TREE_CODE (arg1) != INTEGER_CST)
1073 error ("second argument to %<__builtin_prefetch%> must be a constant");
1074 arg1 = integer_zero_node;
1076 op1 = expand_normal (arg1);
1077 /* Argument 1 must be either zero or one. */
1078 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1080 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1081 " using zero");
1082 op1 = const0_rtx;
1085 /* Argument 2 (locality) must be a compile-time constant int. */
1086 if (TREE_CODE (arg2) != INTEGER_CST)
1088 error ("third argument to %<__builtin_prefetch%> must be a constant");
1089 arg2 = integer_zero_node;
1091 op2 = expand_normal (arg2);
1092 /* Argument 2 must be 0, 1, 2, or 3. */
1093 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1095 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1096 op2 = const0_rtx;
1099 #ifdef HAVE_prefetch
1100 if (HAVE_prefetch)
1102 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1103 (op0,
1104 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1105 || (GET_MODE (op0) != Pmode))
1107 op0 = convert_memory_address (Pmode, op0);
1108 op0 = force_reg (Pmode, op0);
1110 emit_insn (gen_prefetch (op0, op1, op2));
1112 #endif
1114 /* Don't do anything with direct references to volatile memory, but
1115 generate code to handle other side effects. */
1116 if (!MEM_P (op0) && side_effects_p (op0))
1117 emit_insn (op0);
1120 /* Get a MEM rtx for expression EXP which is the address of an operand
1121 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1122 the maximum length of the block of memory that might be accessed or
1123 NULL if unknown. */
1125 static rtx
1126 get_memory_rtx (tree exp, tree len)
1128 tree orig_exp = exp;
1129 rtx addr, mem;
1130 HOST_WIDE_INT off;
1132 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1133 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1134 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1135 exp = TREE_OPERAND (exp, 0);
1137 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1138 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1140 /* Get an expression we can use to find the attributes to assign to MEM.
1141 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1142 we can. First remove any nops. */
1143 while (CONVERT_EXPR_P (exp)
1144 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1145 exp = TREE_OPERAND (exp, 0);
1147 off = 0;
1148 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1149 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1150 && host_integerp (TREE_OPERAND (exp, 1), 0)
1151 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1152 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1153 else if (TREE_CODE (exp) == ADDR_EXPR)
1154 exp = TREE_OPERAND (exp, 0);
1155 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1156 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1157 else
1158 exp = NULL;
1160 /* Honor attributes derived from exp, except for the alias set
1161 (as builtin stringops may alias with anything) and the size
1162 (as stringops may access multiple array elements). */
1163 if (exp)
1165 set_mem_attributes (mem, exp, 0);
1167 if (off)
1168 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1170 /* Allow the string and memory builtins to overflow from one
1171 field into another, see http://gcc.gnu.org/PR23561.
1172 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1173 memory accessed by the string or memory builtin will fit
1174 within the field. */
1175 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1177 tree mem_expr = MEM_EXPR (mem);
1178 HOST_WIDE_INT offset = -1, length = -1;
1179 tree inner = exp;
1181 while (TREE_CODE (inner) == ARRAY_REF
1182 || CONVERT_EXPR_P (inner)
1183 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1184 || TREE_CODE (inner) == SAVE_EXPR)
1185 inner = TREE_OPERAND (inner, 0);
1187 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1189 if (MEM_OFFSET (mem)
1190 && CONST_INT_P (MEM_OFFSET (mem)))
1191 offset = INTVAL (MEM_OFFSET (mem));
1193 if (offset >= 0 && len && host_integerp (len, 0))
1194 length = tree_low_cst (len, 0);
1196 while (TREE_CODE (inner) == COMPONENT_REF)
1198 tree field = TREE_OPERAND (inner, 1);
1199 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1200 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1202 /* Bitfields are generally not byte-addressable. */
1203 gcc_assert (!DECL_BIT_FIELD (field)
1204 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1205 % BITS_PER_UNIT) == 0
1206 && host_integerp (DECL_SIZE (field), 0)
1207 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1208 % BITS_PER_UNIT) == 0));
1210 /* If we can prove that the memory starting at XEXP (mem, 0) and
1211 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1212 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1213 fields without DECL_SIZE_UNIT like flexible array members. */
1214 if (length >= 0
1215 && DECL_SIZE_UNIT (field)
1216 && host_integerp (DECL_SIZE_UNIT (field), 0))
1218 HOST_WIDE_INT size
1219 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1220 if (offset <= size
1221 && length <= size
1222 && offset + length <= size)
1223 break;
1226 if (offset >= 0
1227 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1228 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1229 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1230 / BITS_PER_UNIT;
1231 else
1233 offset = -1;
1234 length = -1;
1237 mem_expr = TREE_OPERAND (mem_expr, 0);
1238 inner = TREE_OPERAND (inner, 0);
1241 if (mem_expr == NULL)
1242 offset = -1;
1243 if (mem_expr != MEM_EXPR (mem))
1245 set_mem_expr (mem, mem_expr);
1246 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1249 set_mem_alias_set (mem, 0);
1250 set_mem_size (mem, NULL_RTX);
1253 return mem;
1256 /* Built-in functions to perform an untyped call and return. */
1258 #define apply_args_mode \
1259 (this_target_builtins->x_apply_args_mode)
1260 #define apply_result_mode \
1261 (this_target_builtins->x_apply_result_mode)
1263 /* Return the size required for the block returned by __builtin_apply_args,
1264 and initialize apply_args_mode. */
1266 static int
1267 apply_args_size (void)
1269 static int size = -1;
1270 int align;
1271 unsigned int regno;
1272 enum machine_mode mode;
1274 /* The values computed by this function never change. */
1275 if (size < 0)
1277 /* The first value is the incoming arg-pointer. */
1278 size = GET_MODE_SIZE (Pmode);
1280 /* The second value is the structure value address unless this is
1281 passed as an "invisible" first argument. */
1282 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1283 size += GET_MODE_SIZE (Pmode);
1285 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1286 if (FUNCTION_ARG_REGNO_P (regno))
1288 mode = reg_raw_mode[regno];
1290 gcc_assert (mode != VOIDmode);
1292 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1293 if (size % align != 0)
1294 size = CEIL (size, align) * align;
1295 size += GET_MODE_SIZE (mode);
1296 apply_args_mode[regno] = mode;
1298 else
1300 apply_args_mode[regno] = VOIDmode;
1303 return size;
1306 /* Return the size required for the block returned by __builtin_apply,
1307 and initialize apply_result_mode. */
1309 static int
1310 apply_result_size (void)
1312 static int size = -1;
1313 int align, regno;
1314 enum machine_mode mode;
1316 /* The values computed by this function never change. */
1317 if (size < 0)
1319 size = 0;
1321 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1322 if (targetm.calls.function_value_regno_p (regno))
1324 mode = reg_raw_mode[regno];
1326 gcc_assert (mode != VOIDmode);
1328 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1329 if (size % align != 0)
1330 size = CEIL (size, align) * align;
1331 size += GET_MODE_SIZE (mode);
1332 apply_result_mode[regno] = mode;
1334 else
1335 apply_result_mode[regno] = VOIDmode;
1337 /* Allow targets that use untyped_call and untyped_return to override
1338 the size so that machine-specific information can be stored here. */
1339 #ifdef APPLY_RESULT_SIZE
1340 size = APPLY_RESULT_SIZE;
1341 #endif
1343 return size;
1346 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1347 /* Create a vector describing the result block RESULT. If SAVEP is true,
1348 the result block is used to save the values; otherwise it is used to
1349 restore the values. */
1351 static rtx
1352 result_vector (int savep, rtx result)
1354 int regno, size, align, nelts;
1355 enum machine_mode mode;
1356 rtx reg, mem;
1357 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1359 size = nelts = 0;
1360 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1361 if ((mode = apply_result_mode[regno]) != VOIDmode)
1363 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1364 if (size % align != 0)
1365 size = CEIL (size, align) * align;
1366 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1367 mem = adjust_address (result, mode, size);
1368 savevec[nelts++] = (savep
1369 ? gen_rtx_SET (VOIDmode, mem, reg)
1370 : gen_rtx_SET (VOIDmode, reg, mem));
1371 size += GET_MODE_SIZE (mode);
1373 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1375 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1377 /* Save the state required to perform an untyped call with the same
1378 arguments as were passed to the current function. */
1380 static rtx
1381 expand_builtin_apply_args_1 (void)
1383 rtx registers, tem;
1384 int size, align, regno;
1385 enum machine_mode mode;
1386 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1388 /* Create a block where the arg-pointer, structure value address,
1389 and argument registers can be saved. */
1390 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1392 /* Walk past the arg-pointer and structure value address. */
1393 size = GET_MODE_SIZE (Pmode);
1394 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1395 size += GET_MODE_SIZE (Pmode);
1397 /* Save each register used in calling a function to the block. */
1398 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1399 if ((mode = apply_args_mode[regno]) != VOIDmode)
1401 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1402 if (size % align != 0)
1403 size = CEIL (size, align) * align;
1405 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1407 emit_move_insn (adjust_address (registers, mode, size), tem);
1408 size += GET_MODE_SIZE (mode);
1411 /* Save the arg pointer to the block. */
1412 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1413 #ifdef STACK_GROWS_DOWNWARD
1414 /* We need the pointer as the caller actually passed them to us, not
1415 as we might have pretended they were passed. Make sure it's a valid
1416 operand, as emit_move_insn isn't expected to handle a PLUS. */
1418 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1419 NULL_RTX);
1420 #endif
1421 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1423 size = GET_MODE_SIZE (Pmode);
1425 /* Save the structure value address unless this is passed as an
1426 "invisible" first argument. */
1427 if (struct_incoming_value)
1429 emit_move_insn (adjust_address (registers, Pmode, size),
1430 copy_to_reg (struct_incoming_value));
1431 size += GET_MODE_SIZE (Pmode);
1434 /* Return the address of the block. */
1435 return copy_addr_to_reg (XEXP (registers, 0));
1438 /* __builtin_apply_args returns block of memory allocated on
1439 the stack into which is stored the arg pointer, structure
1440 value address, static chain, and all the registers that might
1441 possibly be used in performing a function call. The code is
1442 moved to the start of the function so the incoming values are
1443 saved. */
1445 static rtx
1446 expand_builtin_apply_args (void)
1448 /* Don't do __builtin_apply_args more than once in a function.
1449 Save the result of the first call and reuse it. */
1450 if (apply_args_value != 0)
1451 return apply_args_value;
1453 /* When this function is called, it means that registers must be
1454 saved on entry to this function. So we migrate the
1455 call to the first insn of this function. */
1456 rtx temp;
1457 rtx seq;
1459 start_sequence ();
1460 temp = expand_builtin_apply_args_1 ();
1461 seq = get_insns ();
1462 end_sequence ();
1464 apply_args_value = temp;
1466 /* Put the insns after the NOTE that starts the function.
1467 If this is inside a start_sequence, make the outer-level insn
1468 chain current, so the code is placed at the start of the
1469 function. If internal_arg_pointer is a non-virtual pseudo,
1470 it needs to be placed after the function that initializes
1471 that pseudo. */
1472 push_topmost_sequence ();
1473 if (REG_P (crtl->args.internal_arg_pointer)
1474 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1475 emit_insn_before (seq, parm_birth_insn);
1476 else
1477 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1478 pop_topmost_sequence ();
1479 return temp;
1483 /* Perform an untyped call and save the state required to perform an
1484 untyped return of whatever value was returned by the given function. */
1486 static rtx
1487 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1489 int size, align, regno;
1490 enum machine_mode mode;
1491 rtx incoming_args, result, reg, dest, src, call_insn;
1492 rtx old_stack_level = 0;
1493 rtx call_fusage = 0;
1494 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1496 arguments = convert_memory_address (Pmode, arguments);
1498 /* Create a block where the return registers can be saved. */
1499 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1501 /* Fetch the arg pointer from the ARGUMENTS block. */
1502 incoming_args = gen_reg_rtx (Pmode);
1503 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1504 #ifndef STACK_GROWS_DOWNWARD
1505 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1506 incoming_args, 0, OPTAB_LIB_WIDEN);
1507 #endif
1509 /* Push a new argument block and copy the arguments. Do not allow
1510 the (potential) memcpy call below to interfere with our stack
1511 manipulations. */
1512 do_pending_stack_adjust ();
1513 NO_DEFER_POP;
1515 /* Save the stack with nonlocal if available. */
1516 #ifdef HAVE_save_stack_nonlocal
1517 if (HAVE_save_stack_nonlocal)
1518 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1519 else
1520 #endif
1521 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1523 /* Allocate a block of memory onto the stack and copy the memory
1524 arguments to the outgoing arguments address. */
1525 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1527 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1528 may have already set current_function_calls_alloca to true.
1529 current_function_calls_alloca won't be set if argsize is zero,
1530 so we have to guarantee need_drap is true here. */
1531 if (SUPPORTS_STACK_ALIGNMENT)
1532 crtl->need_drap = true;
1534 dest = virtual_outgoing_args_rtx;
1535 #ifndef STACK_GROWS_DOWNWARD
1536 if (CONST_INT_P (argsize))
1537 dest = plus_constant (dest, -INTVAL (argsize));
1538 else
1539 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1540 #endif
1541 dest = gen_rtx_MEM (BLKmode, dest);
1542 set_mem_align (dest, PARM_BOUNDARY);
1543 src = gen_rtx_MEM (BLKmode, incoming_args);
1544 set_mem_align (src, PARM_BOUNDARY);
1545 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1547 /* Refer to the argument block. */
1548 apply_args_size ();
1549 arguments = gen_rtx_MEM (BLKmode, arguments);
1550 set_mem_align (arguments, PARM_BOUNDARY);
1552 /* Walk past the arg-pointer and structure value address. */
1553 size = GET_MODE_SIZE (Pmode);
1554 if (struct_value)
1555 size += GET_MODE_SIZE (Pmode);
1557 /* Restore each of the registers previously saved. Make USE insns
1558 for each of these registers for use in making the call. */
1559 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1560 if ((mode = apply_args_mode[regno]) != VOIDmode)
1562 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1563 if (size % align != 0)
1564 size = CEIL (size, align) * align;
1565 reg = gen_rtx_REG (mode, regno);
1566 emit_move_insn (reg, adjust_address (arguments, mode, size));
1567 use_reg (&call_fusage, reg);
1568 size += GET_MODE_SIZE (mode);
1571 /* Restore the structure value address unless this is passed as an
1572 "invisible" first argument. */
1573 size = GET_MODE_SIZE (Pmode);
1574 if (struct_value)
1576 rtx value = gen_reg_rtx (Pmode);
1577 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1578 emit_move_insn (struct_value, value);
1579 if (REG_P (struct_value))
1580 use_reg (&call_fusage, struct_value);
1581 size += GET_MODE_SIZE (Pmode);
1584 /* All arguments and registers used for the call are set up by now! */
1585 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1587 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1588 and we don't want to load it into a register as an optimization,
1589 because prepare_call_address already did it if it should be done. */
1590 if (GET_CODE (function) != SYMBOL_REF)
1591 function = memory_address (FUNCTION_MODE, function);
1593 /* Generate the actual call instruction and save the return value. */
1594 #ifdef HAVE_untyped_call
1595 if (HAVE_untyped_call)
1596 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1597 result, result_vector (1, result)));
1598 else
1599 #endif
1600 #ifdef HAVE_call_value
1601 if (HAVE_call_value)
1603 rtx valreg = 0;
1605 /* Locate the unique return register. It is not possible to
1606 express a call that sets more than one return register using
1607 call_value; use untyped_call for that. In fact, untyped_call
1608 only needs to save the return registers in the given block. */
1609 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1610 if ((mode = apply_result_mode[regno]) != VOIDmode)
1612 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1614 valreg = gen_rtx_REG (mode, regno);
1617 emit_call_insn (GEN_CALL_VALUE (valreg,
1618 gen_rtx_MEM (FUNCTION_MODE, function),
1619 const0_rtx, NULL_RTX, const0_rtx));
1621 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1623 else
1624 #endif
1625 gcc_unreachable ();
1627 /* Find the CALL insn we just emitted, and attach the register usage
1628 information. */
1629 call_insn = last_call_insn ();
1630 add_function_usage_to (call_insn, call_fusage);
1632 /* Restore the stack. */
1633 #ifdef HAVE_save_stack_nonlocal
1634 if (HAVE_save_stack_nonlocal)
1635 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1636 else
1637 #endif
1638 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1640 OK_DEFER_POP;
1642 /* Return the address of the result block. */
1643 result = copy_addr_to_reg (XEXP (result, 0));
1644 return convert_memory_address (ptr_mode, result);
1647 /* Perform an untyped return. */
1649 static void
1650 expand_builtin_return (rtx result)
1652 int size, align, regno;
1653 enum machine_mode mode;
1654 rtx reg;
1655 rtx call_fusage = 0;
1657 result = convert_memory_address (Pmode, result);
1659 apply_result_size ();
1660 result = gen_rtx_MEM (BLKmode, result);
1662 #ifdef HAVE_untyped_return
1663 if (HAVE_untyped_return)
1665 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1666 emit_barrier ();
1667 return;
1669 #endif
1671 /* Restore the return value and note that each value is used. */
1672 size = 0;
1673 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1674 if ((mode = apply_result_mode[regno]) != VOIDmode)
1676 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1677 if (size % align != 0)
1678 size = CEIL (size, align) * align;
1679 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1680 emit_move_insn (reg, adjust_address (result, mode, size));
1682 push_to_sequence (call_fusage);
1683 emit_use (reg);
1684 call_fusage = get_insns ();
1685 end_sequence ();
1686 size += GET_MODE_SIZE (mode);
1689 /* Put the USE insns before the return. */
1690 emit_insn (call_fusage);
1692 /* Return whatever values was restored by jumping directly to the end
1693 of the function. */
1694 expand_naked_return ();
1697 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1699 static enum type_class
1700 type_to_class (tree type)
1702 switch (TREE_CODE (type))
1704 case VOID_TYPE: return void_type_class;
1705 case INTEGER_TYPE: return integer_type_class;
1706 case ENUMERAL_TYPE: return enumeral_type_class;
1707 case BOOLEAN_TYPE: return boolean_type_class;
1708 case POINTER_TYPE: return pointer_type_class;
1709 case REFERENCE_TYPE: return reference_type_class;
1710 case OFFSET_TYPE: return offset_type_class;
1711 case REAL_TYPE: return real_type_class;
1712 case COMPLEX_TYPE: return complex_type_class;
1713 case FUNCTION_TYPE: return function_type_class;
1714 case METHOD_TYPE: return method_type_class;
1715 case RECORD_TYPE: return record_type_class;
1716 case UNION_TYPE:
1717 case QUAL_UNION_TYPE: return union_type_class;
1718 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1719 ? string_type_class : array_type_class);
1720 case LANG_TYPE: return lang_type_class;
1721 default: return no_type_class;
1725 /* Expand a call EXP to __builtin_classify_type. */
1727 static rtx
1728 expand_builtin_classify_type (tree exp)
1730 if (call_expr_nargs (exp))
1731 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1732 return GEN_INT (no_type_class);
1735 /* This helper macro, meant to be used in mathfn_built_in below,
1736 determines which among a set of three builtin math functions is
1737 appropriate for a given type mode. The `F' and `L' cases are
1738 automatically generated from the `double' case. */
1739 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1740 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1741 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1742 fcodel = BUILT_IN_MATHFN##L ; break;
1743 /* Similar to above, but appends _R after any F/L suffix. */
1744 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1745 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1746 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1747 fcodel = BUILT_IN_MATHFN##L_R ; break;
1749 /* Return mathematic function equivalent to FN but operating directly
1750 on TYPE, if available. If IMPLICIT is true find the function in
1751 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1752 can't do the conversion, return zero. */
1754 static tree
1755 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1757 tree const *const fn_arr
1758 = implicit ? implicit_built_in_decls : built_in_decls;
1759 enum built_in_function fcode, fcodef, fcodel;
1761 switch (fn)
1763 CASE_MATHFN (BUILT_IN_ACOS)
1764 CASE_MATHFN (BUILT_IN_ACOSH)
1765 CASE_MATHFN (BUILT_IN_ASIN)
1766 CASE_MATHFN (BUILT_IN_ASINH)
1767 CASE_MATHFN (BUILT_IN_ATAN)
1768 CASE_MATHFN (BUILT_IN_ATAN2)
1769 CASE_MATHFN (BUILT_IN_ATANH)
1770 CASE_MATHFN (BUILT_IN_CBRT)
1771 CASE_MATHFN (BUILT_IN_CEIL)
1772 CASE_MATHFN (BUILT_IN_CEXPI)
1773 CASE_MATHFN (BUILT_IN_COPYSIGN)
1774 CASE_MATHFN (BUILT_IN_COS)
1775 CASE_MATHFN (BUILT_IN_COSH)
1776 CASE_MATHFN (BUILT_IN_DREM)
1777 CASE_MATHFN (BUILT_IN_ERF)
1778 CASE_MATHFN (BUILT_IN_ERFC)
1779 CASE_MATHFN (BUILT_IN_EXP)
1780 CASE_MATHFN (BUILT_IN_EXP10)
1781 CASE_MATHFN (BUILT_IN_EXP2)
1782 CASE_MATHFN (BUILT_IN_EXPM1)
1783 CASE_MATHFN (BUILT_IN_FABS)
1784 CASE_MATHFN (BUILT_IN_FDIM)
1785 CASE_MATHFN (BUILT_IN_FLOOR)
1786 CASE_MATHFN (BUILT_IN_FMA)
1787 CASE_MATHFN (BUILT_IN_FMAX)
1788 CASE_MATHFN (BUILT_IN_FMIN)
1789 CASE_MATHFN (BUILT_IN_FMOD)
1790 CASE_MATHFN (BUILT_IN_FREXP)
1791 CASE_MATHFN (BUILT_IN_GAMMA)
1792 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1793 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1794 CASE_MATHFN (BUILT_IN_HYPOT)
1795 CASE_MATHFN (BUILT_IN_ILOGB)
1796 CASE_MATHFN (BUILT_IN_INF)
1797 CASE_MATHFN (BUILT_IN_ISINF)
1798 CASE_MATHFN (BUILT_IN_J0)
1799 CASE_MATHFN (BUILT_IN_J1)
1800 CASE_MATHFN (BUILT_IN_JN)
1801 CASE_MATHFN (BUILT_IN_LCEIL)
1802 CASE_MATHFN (BUILT_IN_LDEXP)
1803 CASE_MATHFN (BUILT_IN_LFLOOR)
1804 CASE_MATHFN (BUILT_IN_LGAMMA)
1805 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1806 CASE_MATHFN (BUILT_IN_LLCEIL)
1807 CASE_MATHFN (BUILT_IN_LLFLOOR)
1808 CASE_MATHFN (BUILT_IN_LLRINT)
1809 CASE_MATHFN (BUILT_IN_LLROUND)
1810 CASE_MATHFN (BUILT_IN_LOG)
1811 CASE_MATHFN (BUILT_IN_LOG10)
1812 CASE_MATHFN (BUILT_IN_LOG1P)
1813 CASE_MATHFN (BUILT_IN_LOG2)
1814 CASE_MATHFN (BUILT_IN_LOGB)
1815 CASE_MATHFN (BUILT_IN_LRINT)
1816 CASE_MATHFN (BUILT_IN_LROUND)
1817 CASE_MATHFN (BUILT_IN_MODF)
1818 CASE_MATHFN (BUILT_IN_NAN)
1819 CASE_MATHFN (BUILT_IN_NANS)
1820 CASE_MATHFN (BUILT_IN_NEARBYINT)
1821 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1822 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1823 CASE_MATHFN (BUILT_IN_POW)
1824 CASE_MATHFN (BUILT_IN_POWI)
1825 CASE_MATHFN (BUILT_IN_POW10)
1826 CASE_MATHFN (BUILT_IN_REMAINDER)
1827 CASE_MATHFN (BUILT_IN_REMQUO)
1828 CASE_MATHFN (BUILT_IN_RINT)
1829 CASE_MATHFN (BUILT_IN_ROUND)
1830 CASE_MATHFN (BUILT_IN_SCALB)
1831 CASE_MATHFN (BUILT_IN_SCALBLN)
1832 CASE_MATHFN (BUILT_IN_SCALBN)
1833 CASE_MATHFN (BUILT_IN_SIGNBIT)
1834 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1835 CASE_MATHFN (BUILT_IN_SIN)
1836 CASE_MATHFN (BUILT_IN_SINCOS)
1837 CASE_MATHFN (BUILT_IN_SINH)
1838 CASE_MATHFN (BUILT_IN_SQRT)
1839 CASE_MATHFN (BUILT_IN_TAN)
1840 CASE_MATHFN (BUILT_IN_TANH)
1841 CASE_MATHFN (BUILT_IN_TGAMMA)
1842 CASE_MATHFN (BUILT_IN_TRUNC)
1843 CASE_MATHFN (BUILT_IN_Y0)
1844 CASE_MATHFN (BUILT_IN_Y1)
1845 CASE_MATHFN (BUILT_IN_YN)
1847 default:
1848 return NULL_TREE;
1851 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1852 return fn_arr[fcode];
1853 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1854 return fn_arr[fcodef];
1855 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1856 return fn_arr[fcodel];
1857 else
1858 return NULL_TREE;
1861 /* Like mathfn_built_in_1(), but always use the implicit array. */
1863 tree
1864 mathfn_built_in (tree type, enum built_in_function fn)
1866 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1869 /* If errno must be maintained, expand the RTL to check if the result,
1870 TARGET, of a built-in function call, EXP, is NaN, and if so set
1871 errno to EDOM. */
1873 static void
1874 expand_errno_check (tree exp, rtx target)
1876 rtx lab = gen_label_rtx ();
1878 /* Test the result; if it is NaN, set errno=EDOM because
1879 the argument was not in the domain. */
1880 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1881 NULL_RTX, NULL_RTX, lab,
1882 /* The jump is very likely. */
1883 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1885 #ifdef TARGET_EDOM
1886 /* If this built-in doesn't throw an exception, set errno directly. */
1887 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1889 #ifdef GEN_ERRNO_RTX
1890 rtx errno_rtx = GEN_ERRNO_RTX;
1891 #else
1892 rtx errno_rtx
1893 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1894 #endif
1895 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1896 emit_label (lab);
1897 return;
1899 #endif
1901 /* Make sure the library call isn't expanded as a tail call. */
1902 CALL_EXPR_TAILCALL (exp) = 0;
1904 /* We can't set errno=EDOM directly; let the library call do it.
1905 Pop the arguments right away in case the call gets deleted. */
1906 NO_DEFER_POP;
1907 expand_call (exp, target, 0);
1908 OK_DEFER_POP;
1909 emit_label (lab);
1912 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1913 Return NULL_RTX if a normal call should be emitted rather than expanding
1914 the function in-line. EXP is the expression that is a call to the builtin
1915 function; if convenient, the result should be placed in TARGET.
1916 SUBTARGET may be used as the target for computing one of EXP's operands. */
1918 static rtx
1919 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1921 optab builtin_optab;
1922 rtx op0, insns;
1923 tree fndecl = get_callee_fndecl (exp);
1924 enum machine_mode mode;
1925 bool errno_set = false;
1926 tree arg;
1928 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1929 return NULL_RTX;
1931 arg = CALL_EXPR_ARG (exp, 0);
1933 switch (DECL_FUNCTION_CODE (fndecl))
1935 CASE_FLT_FN (BUILT_IN_SQRT):
1936 errno_set = ! tree_expr_nonnegative_p (arg);
1937 builtin_optab = sqrt_optab;
1938 break;
1939 CASE_FLT_FN (BUILT_IN_EXP):
1940 errno_set = true; builtin_optab = exp_optab; break;
1941 CASE_FLT_FN (BUILT_IN_EXP10):
1942 CASE_FLT_FN (BUILT_IN_POW10):
1943 errno_set = true; builtin_optab = exp10_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP2):
1945 errno_set = true; builtin_optab = exp2_optab; break;
1946 CASE_FLT_FN (BUILT_IN_EXPM1):
1947 errno_set = true; builtin_optab = expm1_optab; break;
1948 CASE_FLT_FN (BUILT_IN_LOGB):
1949 errno_set = true; builtin_optab = logb_optab; break;
1950 CASE_FLT_FN (BUILT_IN_LOG):
1951 errno_set = true; builtin_optab = log_optab; break;
1952 CASE_FLT_FN (BUILT_IN_LOG10):
1953 errno_set = true; builtin_optab = log10_optab; break;
1954 CASE_FLT_FN (BUILT_IN_LOG2):
1955 errno_set = true; builtin_optab = log2_optab; break;
1956 CASE_FLT_FN (BUILT_IN_LOG1P):
1957 errno_set = true; builtin_optab = log1p_optab; break;
1958 CASE_FLT_FN (BUILT_IN_ASIN):
1959 builtin_optab = asin_optab; break;
1960 CASE_FLT_FN (BUILT_IN_ACOS):
1961 builtin_optab = acos_optab; break;
1962 CASE_FLT_FN (BUILT_IN_TAN):
1963 builtin_optab = tan_optab; break;
1964 CASE_FLT_FN (BUILT_IN_ATAN):
1965 builtin_optab = atan_optab; break;
1966 CASE_FLT_FN (BUILT_IN_FLOOR):
1967 builtin_optab = floor_optab; break;
1968 CASE_FLT_FN (BUILT_IN_CEIL):
1969 builtin_optab = ceil_optab; break;
1970 CASE_FLT_FN (BUILT_IN_TRUNC):
1971 builtin_optab = btrunc_optab; break;
1972 CASE_FLT_FN (BUILT_IN_ROUND):
1973 builtin_optab = round_optab; break;
1974 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1975 builtin_optab = nearbyint_optab;
1976 if (flag_trapping_math)
1977 break;
1978 /* Else fallthrough and expand as rint. */
1979 CASE_FLT_FN (BUILT_IN_RINT):
1980 builtin_optab = rint_optab; break;
1981 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1982 builtin_optab = significand_optab; break;
1983 default:
1984 gcc_unreachable ();
1987 /* Make a suitable register to place result in. */
1988 mode = TYPE_MODE (TREE_TYPE (exp));
1990 if (! flag_errno_math || ! HONOR_NANS (mode))
1991 errno_set = false;
1993 /* Before working hard, check whether the instruction is available. */
1994 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
1996 target = gen_reg_rtx (mode);
1998 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1999 need to expand the argument again. This way, we will not perform
2000 side-effects more the once. */
2001 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2003 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2005 start_sequence ();
2007 /* Compute into TARGET.
2008 Set TARGET to wherever the result comes back. */
2009 target = expand_unop (mode, builtin_optab, op0, target, 0);
2011 if (target != 0)
2013 if (errno_set)
2014 expand_errno_check (exp, target);
2016 /* Output the entire sequence. */
2017 insns = get_insns ();
2018 end_sequence ();
2019 emit_insn (insns);
2020 return target;
2023 /* If we were unable to expand via the builtin, stop the sequence
2024 (without outputting the insns) and call to the library function
2025 with the stabilized argument list. */
2026 end_sequence ();
2029 return expand_call (exp, target, target == const0_rtx);
2032 /* Expand a call to the builtin binary math functions (pow and atan2).
2033 Return NULL_RTX if a normal call should be emitted rather than expanding the
2034 function in-line. EXP is the expression that is a call to the builtin
2035 function; if convenient, the result should be placed in TARGET.
2036 SUBTARGET may be used as the target for computing one of EXP's
2037 operands. */
2039 static rtx
2040 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2042 optab builtin_optab;
2043 rtx op0, op1, insns;
2044 int op1_type = REAL_TYPE;
2045 tree fndecl = get_callee_fndecl (exp);
2046 tree arg0, arg1;
2047 enum machine_mode mode;
2048 bool errno_set = true;
2050 switch (DECL_FUNCTION_CODE (fndecl))
2052 CASE_FLT_FN (BUILT_IN_SCALBN):
2053 CASE_FLT_FN (BUILT_IN_SCALBLN):
2054 CASE_FLT_FN (BUILT_IN_LDEXP):
2055 op1_type = INTEGER_TYPE;
2056 default:
2057 break;
2060 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2061 return NULL_RTX;
2063 arg0 = CALL_EXPR_ARG (exp, 0);
2064 arg1 = CALL_EXPR_ARG (exp, 1);
2066 switch (DECL_FUNCTION_CODE (fndecl))
2068 CASE_FLT_FN (BUILT_IN_POW):
2069 builtin_optab = pow_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ATAN2):
2071 builtin_optab = atan2_optab; break;
2072 CASE_FLT_FN (BUILT_IN_SCALB):
2073 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2074 return 0;
2075 builtin_optab = scalb_optab; break;
2076 CASE_FLT_FN (BUILT_IN_SCALBN):
2077 CASE_FLT_FN (BUILT_IN_SCALBLN):
2078 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2079 return 0;
2080 /* Fall through... */
2081 CASE_FLT_FN (BUILT_IN_LDEXP):
2082 builtin_optab = ldexp_optab; break;
2083 CASE_FLT_FN (BUILT_IN_FMOD):
2084 builtin_optab = fmod_optab; break;
2085 CASE_FLT_FN (BUILT_IN_REMAINDER):
2086 CASE_FLT_FN (BUILT_IN_DREM):
2087 builtin_optab = remainder_optab; break;
2088 default:
2089 gcc_unreachable ();
2092 /* Make a suitable register to place result in. */
2093 mode = TYPE_MODE (TREE_TYPE (exp));
2095 /* Before working hard, check whether the instruction is available. */
2096 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2097 return NULL_RTX;
2099 target = gen_reg_rtx (mode);
2101 if (! flag_errno_math || ! HONOR_NANS (mode))
2102 errno_set = false;
2104 /* Always stabilize the argument list. */
2105 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2106 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2108 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2109 op1 = expand_normal (arg1);
2111 start_sequence ();
2113 /* Compute into TARGET.
2114 Set TARGET to wherever the result comes back. */
2115 target = expand_binop (mode, builtin_optab, op0, op1,
2116 target, 0, OPTAB_DIRECT);
2118 /* If we were unable to expand via the builtin, stop the sequence
2119 (without outputting the insns) and call to the library function
2120 with the stabilized argument list. */
2121 if (target == 0)
2123 end_sequence ();
2124 return expand_call (exp, target, target == const0_rtx);
2127 if (errno_set)
2128 expand_errno_check (exp, target);
2130 /* Output the entire sequence. */
2131 insns = get_insns ();
2132 end_sequence ();
2133 emit_insn (insns);
2135 return target;
2138 /* Expand a call to the builtin sin and cos math functions.
2139 Return NULL_RTX if a normal call should be emitted rather than expanding the
2140 function in-line. EXP is the expression that is a call to the builtin
2141 function; if convenient, the result should be placed in TARGET.
2142 SUBTARGET may be used as the target for computing one of EXP's
2143 operands. */
2145 static rtx
2146 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2148 optab builtin_optab;
2149 rtx op0, insns;
2150 tree fndecl = get_callee_fndecl (exp);
2151 enum machine_mode mode;
2152 tree arg;
2154 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2155 return NULL_RTX;
2157 arg = CALL_EXPR_ARG (exp, 0);
2159 switch (DECL_FUNCTION_CODE (fndecl))
2161 CASE_FLT_FN (BUILT_IN_SIN):
2162 CASE_FLT_FN (BUILT_IN_COS):
2163 builtin_optab = sincos_optab; break;
2164 default:
2165 gcc_unreachable ();
2168 /* Make a suitable register to place result in. */
2169 mode = TYPE_MODE (TREE_TYPE (exp));
2171 /* Check if sincos insn is available, otherwise fallback
2172 to sin or cos insn. */
2173 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2174 switch (DECL_FUNCTION_CODE (fndecl))
2176 CASE_FLT_FN (BUILT_IN_SIN):
2177 builtin_optab = sin_optab; break;
2178 CASE_FLT_FN (BUILT_IN_COS):
2179 builtin_optab = cos_optab; break;
2180 default:
2181 gcc_unreachable ();
2184 /* Before working hard, check whether the instruction is available. */
2185 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2187 target = gen_reg_rtx (mode);
2189 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2190 need to expand the argument again. This way, we will not perform
2191 side-effects more the once. */
2192 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2194 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2196 start_sequence ();
2198 /* Compute into TARGET.
2199 Set TARGET to wherever the result comes back. */
2200 if (builtin_optab == sincos_optab)
2202 int result;
2204 switch (DECL_FUNCTION_CODE (fndecl))
2206 CASE_FLT_FN (BUILT_IN_SIN):
2207 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2208 break;
2209 CASE_FLT_FN (BUILT_IN_COS):
2210 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2211 break;
2212 default:
2213 gcc_unreachable ();
2215 gcc_assert (result);
2217 else
2219 target = expand_unop (mode, builtin_optab, op0, target, 0);
2222 if (target != 0)
2224 /* Output the entire sequence. */
2225 insns = get_insns ();
2226 end_sequence ();
2227 emit_insn (insns);
2228 return target;
2231 /* If we were unable to expand via the builtin, stop the sequence
2232 (without outputting the insns) and call to the library function
2233 with the stabilized argument list. */
2234 end_sequence ();
2237 target = expand_call (exp, target, target == const0_rtx);
2239 return target;
2242 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2243 return an RTL instruction code that implements the functionality.
2244 If that isn't possible or available return CODE_FOR_nothing. */
2246 static enum insn_code
2247 interclass_mathfn_icode (tree arg, tree fndecl)
2249 bool errno_set = false;
2250 optab builtin_optab = 0;
2251 enum machine_mode mode;
2253 switch (DECL_FUNCTION_CODE (fndecl))
2255 CASE_FLT_FN (BUILT_IN_ILOGB):
2256 errno_set = true; builtin_optab = ilogb_optab; break;
2257 CASE_FLT_FN (BUILT_IN_ISINF):
2258 builtin_optab = isinf_optab; break;
2259 case BUILT_IN_ISNORMAL:
2260 case BUILT_IN_ISFINITE:
2261 CASE_FLT_FN (BUILT_IN_FINITE):
2262 case BUILT_IN_FINITED32:
2263 case BUILT_IN_FINITED64:
2264 case BUILT_IN_FINITED128:
2265 case BUILT_IN_ISINFD32:
2266 case BUILT_IN_ISINFD64:
2267 case BUILT_IN_ISINFD128:
2268 /* These builtins have no optabs (yet). */
2269 break;
2270 default:
2271 gcc_unreachable ();
2274 /* There's no easy way to detect the case we need to set EDOM. */
2275 if (flag_errno_math && errno_set)
2276 return CODE_FOR_nothing;
2278 /* Optab mode depends on the mode of the input argument. */
2279 mode = TYPE_MODE (TREE_TYPE (arg));
2281 if (builtin_optab)
2282 return optab_handler (builtin_optab, mode);
2283 return CODE_FOR_nothing;
2286 /* Expand a call to one of the builtin math functions that operate on
2287 floating point argument and output an integer result (ilogb, isinf,
2288 isnan, etc).
2289 Return 0 if a normal call should be emitted rather than expanding the
2290 function in-line. EXP is the expression that is a call to the builtin
2291 function; if convenient, the result should be placed in TARGET.
2292 SUBTARGET may be used as the target for computing one of EXP's operands. */
2294 static rtx
2295 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2297 enum insn_code icode = CODE_FOR_nothing;
2298 rtx op0;
2299 tree fndecl = get_callee_fndecl (exp);
2300 enum machine_mode mode;
2301 tree arg;
2303 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2304 return NULL_RTX;
2306 arg = CALL_EXPR_ARG (exp, 0);
2307 icode = interclass_mathfn_icode (arg, fndecl);
2308 mode = TYPE_MODE (TREE_TYPE (arg));
2310 if (icode != CODE_FOR_nothing)
2312 rtx last = get_last_insn ();
2313 tree orig_arg = arg;
2314 /* Make a suitable register to place result in. */
2315 if (!target
2316 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2317 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2320 gcc_assert (insn_data[icode].operand[0].predicate
2321 (target, GET_MODE (target)));
2323 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2324 need to expand the argument again. This way, we will not perform
2325 side-effects more the once. */
2326 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2328 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2330 if (mode != GET_MODE (op0))
2331 op0 = convert_to_mode (mode, op0, 0);
2333 /* Compute into TARGET.
2334 Set TARGET to wherever the result comes back. */
2335 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2336 return target;
2337 delete_insns_since (last);
2338 CALL_EXPR_ARG (exp, 0) = orig_arg;
2341 return NULL_RTX;
2344 /* Expand a call to the builtin sincos math function.
2345 Return NULL_RTX if a normal call should be emitted rather than expanding the
2346 function in-line. EXP is the expression that is a call to the builtin
2347 function. */
2349 static rtx
2350 expand_builtin_sincos (tree exp)
2352 rtx op0, op1, op2, target1, target2;
2353 enum machine_mode mode;
2354 tree arg, sinp, cosp;
2355 int result;
2356 location_t loc = EXPR_LOCATION (exp);
2358 if (!validate_arglist (exp, REAL_TYPE,
2359 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2360 return NULL_RTX;
2362 arg = CALL_EXPR_ARG (exp, 0);
2363 sinp = CALL_EXPR_ARG (exp, 1);
2364 cosp = CALL_EXPR_ARG (exp, 2);
2366 /* Make a suitable register to place result in. */
2367 mode = TYPE_MODE (TREE_TYPE (arg));
2369 /* Check if sincos insn is available, otherwise emit the call. */
2370 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2371 return NULL_RTX;
2373 target1 = gen_reg_rtx (mode);
2374 target2 = gen_reg_rtx (mode);
2376 op0 = expand_normal (arg);
2377 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2378 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2380 /* Compute into target1 and target2.
2381 Set TARGET to wherever the result comes back. */
2382 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2383 gcc_assert (result);
2385 /* Move target1 and target2 to the memory locations indicated
2386 by op1 and op2. */
2387 emit_move_insn (op1, target1);
2388 emit_move_insn (op2, target2);
2390 return const0_rtx;
2393 /* Expand a call to the internal cexpi builtin to the sincos math function.
2394 EXP is the expression that is a call to the builtin function; if convenient,
2395 the result should be placed in TARGET. SUBTARGET may be used as the target
2396 for computing one of EXP's operands. */
2398 static rtx
2399 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2401 tree fndecl = get_callee_fndecl (exp);
2402 tree arg, type;
2403 enum machine_mode mode;
2404 rtx op0, op1, op2;
2405 location_t loc = EXPR_LOCATION (exp);
2407 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2408 return NULL_RTX;
2410 arg = CALL_EXPR_ARG (exp, 0);
2411 type = TREE_TYPE (arg);
2412 mode = TYPE_MODE (TREE_TYPE (arg));
2414 /* Try expanding via a sincos optab, fall back to emitting a libcall
2415 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2416 is only generated from sincos, cexp or if we have either of them. */
2417 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2419 op1 = gen_reg_rtx (mode);
2420 op2 = gen_reg_rtx (mode);
2422 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2424 /* Compute into op1 and op2. */
2425 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2427 else if (TARGET_HAS_SINCOS)
2429 tree call, fn = NULL_TREE;
2430 tree top1, top2;
2431 rtx op1a, op2a;
2433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2434 fn = built_in_decls[BUILT_IN_SINCOSF];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2436 fn = built_in_decls[BUILT_IN_SINCOS];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2438 fn = built_in_decls[BUILT_IN_SINCOSL];
2439 else
2440 gcc_unreachable ();
2442 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2443 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2444 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2445 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2446 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2447 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2449 /* Make sure not to fold the sincos call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2452 call, 3, arg, top1, top2));
2454 else
2456 tree call, fn = NULL_TREE, narg;
2457 tree ctype = build_complex_type (type);
2459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2460 fn = built_in_decls[BUILT_IN_CEXPF];
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2462 fn = built_in_decls[BUILT_IN_CEXP];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2464 fn = built_in_decls[BUILT_IN_CEXPL];
2465 else
2466 gcc_unreachable ();
2468 /* If we don't have a decl for cexp create one. This is the
2469 friendliest fallback if the user calls __builtin_cexpi
2470 without full target C99 function support. */
2471 if (fn == NULL_TREE)
2473 tree fntype;
2474 const char *name = NULL;
2476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2477 name = "cexpf";
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2479 name = "cexp";
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2481 name = "cexpl";
2483 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2484 fn = build_fn_decl (name, fntype);
2487 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2488 build_real (type, dconst0), arg);
2490 /* Make sure not to fold the cexp call again. */
2491 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2492 return expand_expr (build_call_nary (ctype, call, 1, narg),
2493 target, VOIDmode, EXPAND_NORMAL);
2496 /* Now build the proper return type. */
2497 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2498 make_tree (TREE_TYPE (arg), op2),
2499 make_tree (TREE_TYPE (arg), op1)),
2500 target, VOIDmode, EXPAND_NORMAL);
2503 /* Conveniently construct a function call expression. FNDECL names the
2504 function to be called, N is the number of arguments, and the "..."
2505 parameters are the argument expressions. Unlike build_call_exr
2506 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2508 static tree
2509 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2511 va_list ap;
2512 tree fntype = TREE_TYPE (fndecl);
2513 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2515 va_start (ap, n);
2516 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2517 va_end (ap);
2518 SET_EXPR_LOCATION (fn, loc);
2519 return fn;
2522 /* Expand a call to one of the builtin rounding functions gcc defines
2523 as an extension (lfloor and lceil). As these are gcc extensions we
2524 do not need to worry about setting errno to EDOM.
2525 If expanding via optab fails, lower expression to (int)(floor(x)).
2526 EXP is the expression that is a call to the builtin function;
2527 if convenient, the result should be placed in TARGET. */
2529 static rtx
2530 expand_builtin_int_roundingfn (tree exp, rtx target)
2532 convert_optab builtin_optab;
2533 rtx op0, insns, tmp;
2534 tree fndecl = get_callee_fndecl (exp);
2535 enum built_in_function fallback_fn;
2536 tree fallback_fndecl;
2537 enum machine_mode mode;
2538 tree arg;
2540 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2541 gcc_unreachable ();
2543 arg = CALL_EXPR_ARG (exp, 0);
2545 switch (DECL_FUNCTION_CODE (fndecl))
2547 CASE_FLT_FN (BUILT_IN_LCEIL):
2548 CASE_FLT_FN (BUILT_IN_LLCEIL):
2549 builtin_optab = lceil_optab;
2550 fallback_fn = BUILT_IN_CEIL;
2551 break;
2553 CASE_FLT_FN (BUILT_IN_LFLOOR):
2554 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2555 builtin_optab = lfloor_optab;
2556 fallback_fn = BUILT_IN_FLOOR;
2557 break;
2559 default:
2560 gcc_unreachable ();
2563 /* Make a suitable register to place result in. */
2564 mode = TYPE_MODE (TREE_TYPE (exp));
2566 target = gen_reg_rtx (mode);
2568 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2569 need to expand the argument again. This way, we will not perform
2570 side-effects more the once. */
2571 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2573 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2575 start_sequence ();
2577 /* Compute into TARGET. */
2578 if (expand_sfix_optab (target, op0, builtin_optab))
2580 /* Output the entire sequence. */
2581 insns = get_insns ();
2582 end_sequence ();
2583 emit_insn (insns);
2584 return target;
2587 /* If we were unable to expand via the builtin, stop the sequence
2588 (without outputting the insns). */
2589 end_sequence ();
2591 /* Fall back to floating point rounding optab. */
2592 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2594 /* For non-C99 targets we may end up without a fallback fndecl here
2595 if the user called __builtin_lfloor directly. In this case emit
2596 a call to the floor/ceil variants nevertheless. This should result
2597 in the best user experience for not full C99 targets. */
2598 if (fallback_fndecl == NULL_TREE)
2600 tree fntype;
2601 const char *name = NULL;
2603 switch (DECL_FUNCTION_CODE (fndecl))
2605 case BUILT_IN_LCEIL:
2606 case BUILT_IN_LLCEIL:
2607 name = "ceil";
2608 break;
2609 case BUILT_IN_LCEILF:
2610 case BUILT_IN_LLCEILF:
2611 name = "ceilf";
2612 break;
2613 case BUILT_IN_LCEILL:
2614 case BUILT_IN_LLCEILL:
2615 name = "ceill";
2616 break;
2617 case BUILT_IN_LFLOOR:
2618 case BUILT_IN_LLFLOOR:
2619 name = "floor";
2620 break;
2621 case BUILT_IN_LFLOORF:
2622 case BUILT_IN_LLFLOORF:
2623 name = "floorf";
2624 break;
2625 case BUILT_IN_LFLOORL:
2626 case BUILT_IN_LLFLOORL:
2627 name = "floorl";
2628 break;
2629 default:
2630 gcc_unreachable ();
2633 fntype = build_function_type_list (TREE_TYPE (arg),
2634 TREE_TYPE (arg), NULL_TREE);
2635 fallback_fndecl = build_fn_decl (name, fntype);
2638 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2640 tmp = expand_normal (exp);
2642 /* Truncate the result of floating point optab to integer
2643 via expand_fix (). */
2644 target = gen_reg_rtx (mode);
2645 expand_fix (target, tmp, 0);
2647 return target;
2650 /* Expand a call to one of the builtin math functions doing integer
2651 conversion (lrint).
2652 Return 0 if a normal call should be emitted rather than expanding the
2653 function in-line. EXP is the expression that is a call to the builtin
2654 function; if convenient, the result should be placed in TARGET. */
2656 static rtx
2657 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2659 convert_optab builtin_optab;
2660 rtx op0, insns;
2661 tree fndecl = get_callee_fndecl (exp);
2662 tree arg;
2663 enum machine_mode mode;
2665 /* There's no easy way to detect the case we need to set EDOM. */
2666 if (flag_errno_math)
2667 return NULL_RTX;
2669 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2670 gcc_unreachable ();
2672 arg = CALL_EXPR_ARG (exp, 0);
2674 switch (DECL_FUNCTION_CODE (fndecl))
2676 CASE_FLT_FN (BUILT_IN_LRINT):
2677 CASE_FLT_FN (BUILT_IN_LLRINT):
2678 builtin_optab = lrint_optab; break;
2679 CASE_FLT_FN (BUILT_IN_LROUND):
2680 CASE_FLT_FN (BUILT_IN_LLROUND):
2681 builtin_optab = lround_optab; break;
2682 default:
2683 gcc_unreachable ();
2686 /* Make a suitable register to place result in. */
2687 mode = TYPE_MODE (TREE_TYPE (exp));
2689 target = gen_reg_rtx (mode);
2691 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2692 need to expand the argument again. This way, we will not perform
2693 side-effects more the once. */
2694 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2696 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2698 start_sequence ();
2700 if (expand_sfix_optab (target, op0, builtin_optab))
2702 /* Output the entire sequence. */
2703 insns = get_insns ();
2704 end_sequence ();
2705 emit_insn (insns);
2706 return target;
2709 /* If we were unable to expand via the builtin, stop the sequence
2710 (without outputting the insns) and call to the library function
2711 with the stabilized argument list. */
2712 end_sequence ();
2714 target = expand_call (exp, target, target == const0_rtx);
2716 return target;
2719 /* To evaluate powi(x,n), the floating point value x raised to the
2720 constant integer exponent n, we use a hybrid algorithm that
2721 combines the "window method" with look-up tables. For an
2722 introduction to exponentiation algorithms and "addition chains",
2723 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2724 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2725 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2726 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2728 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2729 multiplications to inline before calling the system library's pow
2730 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2731 so this default never requires calling pow, powf or powl. */
2733 #ifndef POWI_MAX_MULTS
2734 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2735 #endif
2737 /* The size of the "optimal power tree" lookup table. All
2738 exponents less than this value are simply looked up in the
2739 powi_table below. This threshold is also used to size the
2740 cache of pseudo registers that hold intermediate results. */
2741 #define POWI_TABLE_SIZE 256
2743 /* The size, in bits of the window, used in the "window method"
2744 exponentiation algorithm. This is equivalent to a radix of
2745 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2746 #define POWI_WINDOW_SIZE 3
2748 /* The following table is an efficient representation of an
2749 "optimal power tree". For each value, i, the corresponding
2750 value, j, in the table states than an optimal evaluation
2751 sequence for calculating pow(x,i) can be found by evaluating
2752 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2753 100 integers is given in Knuth's "Seminumerical algorithms". */
2755 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2757 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2758 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2759 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2760 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2761 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2762 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2763 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2764 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2765 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2766 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2767 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2768 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2769 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2770 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2771 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2772 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2773 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2774 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2775 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2776 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2777 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2778 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2779 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2780 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2781 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2782 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2783 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2784 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2785 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2786 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2787 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2788 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2792 /* Return the number of multiplications required to calculate
2793 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2794 subroutine of powi_cost. CACHE is an array indicating
2795 which exponents have already been calculated. */
2797 static int
2798 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2800 /* If we've already calculated this exponent, then this evaluation
2801 doesn't require any additional multiplications. */
2802 if (cache[n])
2803 return 0;
2805 cache[n] = true;
2806 return powi_lookup_cost (n - powi_table[n], cache)
2807 + powi_lookup_cost (powi_table[n], cache) + 1;
2810 /* Return the number of multiplications required to calculate
2811 powi(x,n) for an arbitrary x, given the exponent N. This
2812 function needs to be kept in sync with expand_powi below. */
2814 static int
2815 powi_cost (HOST_WIDE_INT n)
2817 bool cache[POWI_TABLE_SIZE];
2818 unsigned HOST_WIDE_INT digit;
2819 unsigned HOST_WIDE_INT val;
2820 int result;
2822 if (n == 0)
2823 return 0;
2825 /* Ignore the reciprocal when calculating the cost. */
2826 val = (n < 0) ? -n : n;
2828 /* Initialize the exponent cache. */
2829 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2830 cache[1] = true;
2832 result = 0;
2834 while (val >= POWI_TABLE_SIZE)
2836 if (val & 1)
2838 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2839 result += powi_lookup_cost (digit, cache)
2840 + POWI_WINDOW_SIZE + 1;
2841 val >>= POWI_WINDOW_SIZE;
2843 else
2845 val >>= 1;
2846 result++;
2850 return result + powi_lookup_cost (val, cache);
2853 /* Recursive subroutine of expand_powi. This function takes the array,
2854 CACHE, of already calculated exponents and an exponent N and returns
2855 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2857 static rtx
2858 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2860 unsigned HOST_WIDE_INT digit;
2861 rtx target, result;
2862 rtx op0, op1;
2864 if (n < POWI_TABLE_SIZE)
2866 if (cache[n])
2867 return cache[n];
2869 target = gen_reg_rtx (mode);
2870 cache[n] = target;
2872 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2873 op1 = expand_powi_1 (mode, powi_table[n], cache);
2875 else if (n & 1)
2877 target = gen_reg_rtx (mode);
2878 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2879 op0 = expand_powi_1 (mode, n - digit, cache);
2880 op1 = expand_powi_1 (mode, digit, cache);
2882 else
2884 target = gen_reg_rtx (mode);
2885 op0 = expand_powi_1 (mode, n >> 1, cache);
2886 op1 = op0;
2889 result = expand_mult (mode, op0, op1, target, 0);
2890 if (result != target)
2891 emit_move_insn (target, result);
2892 return target;
2895 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2896 floating point operand in mode MODE, and N is the exponent. This
2897 function needs to be kept in sync with powi_cost above. */
2899 static rtx
2900 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2902 rtx cache[POWI_TABLE_SIZE];
2903 rtx result;
2905 if (n == 0)
2906 return CONST1_RTX (mode);
2908 memset (cache, 0, sizeof (cache));
2909 cache[1] = x;
2911 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2913 /* If the original exponent was negative, reciprocate the result. */
2914 if (n < 0)
2915 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2916 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2918 return result;
2921 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2922 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2923 if we can simplify it. */
2924 static rtx
2925 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2926 rtx subtarget)
2928 if (TREE_CODE (arg1) == REAL_CST
2929 && !TREE_OVERFLOW (arg1)
2930 && flag_unsafe_math_optimizations)
2932 enum machine_mode mode = TYPE_MODE (type);
2933 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2934 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2935 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2936 tree op = NULL_TREE;
2938 if (sqrtfn)
2940 /* Optimize pow (x, 0.5) into sqrt. */
2941 if (REAL_VALUES_EQUAL (c, dconsthalf))
2942 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2944 else
2946 REAL_VALUE_TYPE dconst1_4 = dconst1;
2947 REAL_VALUE_TYPE dconst3_4;
2948 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2950 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2951 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2953 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2954 machines that a builtin sqrt instruction is smaller than a
2955 call to pow with 0.25, so do this optimization even if
2956 -Os. */
2957 if (REAL_VALUES_EQUAL (c, dconst1_4))
2959 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2960 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2963 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2964 are optimizing for space. */
2965 else if (optimize_insn_for_speed_p ()
2966 && !TREE_SIDE_EFFECTS (arg0)
2967 && REAL_VALUES_EQUAL (c, dconst3_4))
2969 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2970 tree sqrt2 = builtin_save_expr (sqrt1);
2971 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2972 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2977 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2978 cbrt/sqrts instead of pow (x, 1./6.). */
2979 if (cbrtfn && ! op
2980 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2982 /* First try 1/3. */
2983 REAL_VALUE_TYPE dconst1_3
2984 = real_value_truncate (mode, dconst_third ());
2986 if (REAL_VALUES_EQUAL (c, dconst1_3))
2987 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2989 /* Now try 1/6. */
2990 else if (optimize_insn_for_speed_p ())
2992 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2993 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
2995 if (REAL_VALUES_EQUAL (c, dconst1_6))
2997 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2998 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3003 if (op)
3004 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3007 return NULL_RTX;
3010 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3011 a normal call should be emitted rather than expanding the function
3012 in-line. EXP is the expression that is a call to the builtin
3013 function; if convenient, the result should be placed in TARGET. */
3015 static rtx
3016 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3018 tree arg0, arg1;
3019 tree fn, narg0;
3020 tree type = TREE_TYPE (exp);
3021 REAL_VALUE_TYPE cint, c, c2;
3022 HOST_WIDE_INT n;
3023 rtx op, op2;
3024 enum machine_mode mode = TYPE_MODE (type);
3026 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3027 return NULL_RTX;
3029 arg0 = CALL_EXPR_ARG (exp, 0);
3030 arg1 = CALL_EXPR_ARG (exp, 1);
3032 if (TREE_CODE (arg1) != REAL_CST
3033 || TREE_OVERFLOW (arg1))
3034 return expand_builtin_mathfn_2 (exp, target, subtarget);
3036 /* Handle constant exponents. */
3038 /* For integer valued exponents we can expand to an optimal multiplication
3039 sequence using expand_powi. */
3040 c = TREE_REAL_CST (arg1);
3041 n = real_to_integer (&c);
3042 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3043 if (real_identical (&c, &cint)
3044 && ((n >= -1 && n <= 2)
3045 || (flag_unsafe_math_optimizations
3046 && optimize_insn_for_speed_p ()
3047 && powi_cost (n) <= POWI_MAX_MULTS)))
3049 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3050 if (n != 1)
3052 op = force_reg (mode, op);
3053 op = expand_powi (op, mode, n);
3055 return op;
3058 narg0 = builtin_save_expr (arg0);
3060 /* If the exponent is not integer valued, check if it is half of an integer.
3061 In this case we can expand to sqrt (x) * x**(n/2). */
3062 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3063 if (fn != NULL_TREE)
3065 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3066 n = real_to_integer (&c2);
3067 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3068 if (real_identical (&c2, &cint)
3069 && ((flag_unsafe_math_optimizations
3070 && optimize_insn_for_speed_p ()
3071 && powi_cost (n/2) <= POWI_MAX_MULTS)
3072 /* Even the c == 0.5 case cannot be done unconditionally
3073 when we need to preserve signed zeros, as
3074 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3075 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3076 /* For c == 1.5 we can assume that x * sqrt (x) is always
3077 smaller than pow (x, 1.5) if sqrt will not be expanded
3078 as a call. */
3079 || (n == 3
3080 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3082 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3083 narg0);
3084 /* Use expand_expr in case the newly built call expression
3085 was folded to a non-call. */
3086 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3087 if (n != 1)
3089 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3090 op2 = force_reg (mode, op2);
3091 op2 = expand_powi (op2, mode, abs (n / 2));
3092 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3093 0, OPTAB_LIB_WIDEN);
3094 /* If the original exponent was negative, reciprocate the
3095 result. */
3096 if (n < 0)
3097 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3098 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3100 return op;
3104 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3105 call. */
3106 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3107 subtarget);
3108 if (op)
3109 return op;
3111 /* Try if the exponent is a third of an integer. In this case
3112 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3113 different from pow (x, 1./3.) due to rounding and behavior
3114 with negative x we need to constrain this transformation to
3115 unsafe math and positive x or finite math. */
3116 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3117 if (fn != NULL_TREE
3118 && flag_unsafe_math_optimizations
3119 && (tree_expr_nonnegative_p (arg0)
3120 || !HONOR_NANS (mode)))
3122 REAL_VALUE_TYPE dconst3;
3123 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3124 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3125 real_round (&c2, mode, &c2);
3126 n = real_to_integer (&c2);
3127 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3128 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3129 real_convert (&c2, mode, &c2);
3130 if (real_identical (&c2, &c)
3131 && ((optimize_insn_for_speed_p ()
3132 && powi_cost (n/3) <= POWI_MAX_MULTS)
3133 || n == 1))
3135 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3136 narg0);
3137 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3138 if (abs (n) % 3 == 2)
3139 op = expand_simple_binop (mode, MULT, op, op, op,
3140 0, OPTAB_LIB_WIDEN);
3141 if (n != 1)
3143 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3144 op2 = force_reg (mode, op2);
3145 op2 = expand_powi (op2, mode, abs (n / 3));
3146 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3147 0, OPTAB_LIB_WIDEN);
3148 /* If the original exponent was negative, reciprocate the
3149 result. */
3150 if (n < 0)
3151 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3152 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3154 return op;
3158 /* Fall back to optab expansion. */
3159 return expand_builtin_mathfn_2 (exp, target, subtarget);
3162 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3163 a normal call should be emitted rather than expanding the function
3164 in-line. EXP is the expression that is a call to the builtin
3165 function; if convenient, the result should be placed in TARGET. */
3167 static rtx
3168 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3170 tree arg0, arg1;
3171 rtx op0, op1;
3172 enum machine_mode mode;
3173 enum machine_mode mode2;
3175 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3176 return NULL_RTX;
3178 arg0 = CALL_EXPR_ARG (exp, 0);
3179 arg1 = CALL_EXPR_ARG (exp, 1);
3180 mode = TYPE_MODE (TREE_TYPE (exp));
3182 /* Handle constant power. */
3184 if (TREE_CODE (arg1) == INTEGER_CST
3185 && !TREE_OVERFLOW (arg1))
3187 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3189 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3190 Otherwise, check the number of multiplications required. */
3191 if ((TREE_INT_CST_HIGH (arg1) == 0
3192 || TREE_INT_CST_HIGH (arg1) == -1)
3193 && ((n >= -1 && n <= 2)
3194 || (optimize_insn_for_speed_p ()
3195 && powi_cost (n) <= POWI_MAX_MULTS)))
3197 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3198 op0 = force_reg (mode, op0);
3199 return expand_powi (op0, mode, n);
3203 /* Emit a libcall to libgcc. */
3205 /* Mode of the 2nd argument must match that of an int. */
3206 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3208 if (target == NULL_RTX)
3209 target = gen_reg_rtx (mode);
3211 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3212 if (GET_MODE (op0) != mode)
3213 op0 = convert_to_mode (mode, op0, 0);
3214 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3215 if (GET_MODE (op1) != mode2)
3216 op1 = convert_to_mode (mode2, op1, 0);
3218 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3219 target, LCT_CONST, mode, 2,
3220 op0, mode, op1, mode2);
3222 return target;
3225 /* Expand expression EXP which is a call to the strlen builtin. Return
3226 NULL_RTX if we failed the caller should emit a normal call, otherwise
3227 try to get the result in TARGET, if convenient. */
3229 static rtx
3230 expand_builtin_strlen (tree exp, rtx target,
3231 enum machine_mode target_mode)
3233 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3234 return NULL_RTX;
3235 else
3237 rtx pat;
3238 tree len;
3239 tree src = CALL_EXPR_ARG (exp, 0);
3240 rtx result, src_reg, char_rtx, before_strlen;
3241 enum machine_mode insn_mode = target_mode, char_mode;
3242 enum insn_code icode = CODE_FOR_nothing;
3243 int align;
3245 /* If the length can be computed at compile-time, return it. */
3246 len = c_strlen (src, 0);
3247 if (len)
3248 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3250 /* If the length can be computed at compile-time and is constant
3251 integer, but there are side-effects in src, evaluate
3252 src for side-effects, then return len.
3253 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3254 can be optimized into: i++; x = 3; */
3255 len = c_strlen (src, 1);
3256 if (len && TREE_CODE (len) == INTEGER_CST)
3258 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3259 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3262 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3264 /* If SRC is not a pointer type, don't do this operation inline. */
3265 if (align == 0)
3266 return NULL_RTX;
3268 /* Bail out if we can't compute strlen in the right mode. */
3269 while (insn_mode != VOIDmode)
3271 icode = optab_handler (strlen_optab, insn_mode);
3272 if (icode != CODE_FOR_nothing)
3273 break;
3275 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3277 if (insn_mode == VOIDmode)
3278 return NULL_RTX;
3280 /* Make a place to write the result of the instruction. */
3281 result = target;
3282 if (! (result != 0
3283 && REG_P (result)
3284 && GET_MODE (result) == insn_mode
3285 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3286 result = gen_reg_rtx (insn_mode);
3288 /* Make a place to hold the source address. We will not expand
3289 the actual source until we are sure that the expansion will
3290 not fail -- there are trees that cannot be expanded twice. */
3291 src_reg = gen_reg_rtx (Pmode);
3293 /* Mark the beginning of the strlen sequence so we can emit the
3294 source operand later. */
3295 before_strlen = get_last_insn ();
3297 char_rtx = const0_rtx;
3298 char_mode = insn_data[(int) icode].operand[2].mode;
3299 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3300 char_mode))
3301 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3303 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3304 char_rtx, GEN_INT (align));
3305 if (! pat)
3306 return NULL_RTX;
3307 emit_insn (pat);
3309 /* Now that we are assured of success, expand the source. */
3310 start_sequence ();
3311 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3312 if (pat != src_reg)
3313 emit_move_insn (src_reg, pat);
3314 pat = get_insns ();
3315 end_sequence ();
3317 if (before_strlen)
3318 emit_insn_after (pat, before_strlen);
3319 else
3320 emit_insn_before (pat, get_insns ());
3322 /* Return the value in the proper mode for this function. */
3323 if (GET_MODE (result) == target_mode)
3324 target = result;
3325 else if (target != 0)
3326 convert_move (target, result, 0);
3327 else
3328 target = convert_to_mode (target_mode, result, 0);
3330 return target;
3334 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3335 bytes from constant string DATA + OFFSET and return it as target
3336 constant. */
3338 static rtx
3339 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3340 enum machine_mode mode)
3342 const char *str = (const char *) data;
3344 gcc_assert (offset >= 0
3345 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3346 <= strlen (str) + 1));
3348 return c_readstr (str + offset, mode);
3351 /* Expand a call EXP to the memcpy builtin.
3352 Return NULL_RTX if we failed, the caller should emit a normal call,
3353 otherwise try to get the result in TARGET, if convenient (and in
3354 mode MODE if that's convenient). */
3356 static rtx
3357 expand_builtin_memcpy (tree exp, rtx target)
3359 if (!validate_arglist (exp,
3360 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3361 return NULL_RTX;
3362 else
3364 tree dest = CALL_EXPR_ARG (exp, 0);
3365 tree src = CALL_EXPR_ARG (exp, 1);
3366 tree len = CALL_EXPR_ARG (exp, 2);
3367 const char *src_str;
3368 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3369 unsigned int dest_align
3370 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3371 rtx dest_mem, src_mem, dest_addr, len_rtx;
3372 HOST_WIDE_INT expected_size = -1;
3373 unsigned int expected_align = 0;
3375 /* If DEST is not a pointer type, call the normal function. */
3376 if (dest_align == 0)
3377 return NULL_RTX;
3379 /* If either SRC is not a pointer type, don't do this
3380 operation in-line. */
3381 if (src_align == 0)
3382 return NULL_RTX;
3384 if (currently_expanding_gimple_stmt)
3385 stringop_block_profile (currently_expanding_gimple_stmt,
3386 &expected_align, &expected_size);
3388 if (expected_align < dest_align)
3389 expected_align = dest_align;
3390 dest_mem = get_memory_rtx (dest, len);
3391 set_mem_align (dest_mem, dest_align);
3392 len_rtx = expand_normal (len);
3393 src_str = c_getstr (src);
3395 /* If SRC is a string constant and block move would be done
3396 by pieces, we can avoid loading the string from memory
3397 and only stored the computed constants. */
3398 if (src_str
3399 && CONST_INT_P (len_rtx)
3400 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3401 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3402 CONST_CAST (char *, src_str),
3403 dest_align, false))
3405 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3406 builtin_memcpy_read_str,
3407 CONST_CAST (char *, src_str),
3408 dest_align, false, 0);
3409 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3410 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3411 return dest_mem;
3414 src_mem = get_memory_rtx (src, len);
3415 set_mem_align (src_mem, src_align);
3417 /* Copy word part most expediently. */
3418 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3419 CALL_EXPR_TAILCALL (exp)
3420 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3421 expected_align, expected_size);
3423 if (dest_addr == 0)
3425 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3426 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3428 return dest_addr;
3432 /* Expand a call EXP to the mempcpy builtin.
3433 Return NULL_RTX if we failed; the caller should emit a normal call,
3434 otherwise try to get the result in TARGET, if convenient (and in
3435 mode MODE if that's convenient). If ENDP is 0 return the
3436 destination pointer, if ENDP is 1 return the end pointer ala
3437 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3438 stpcpy. */
3440 static rtx
3441 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3443 if (!validate_arglist (exp,
3444 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3445 return NULL_RTX;
3446 else
3448 tree dest = CALL_EXPR_ARG (exp, 0);
3449 tree src = CALL_EXPR_ARG (exp, 1);
3450 tree len = CALL_EXPR_ARG (exp, 2);
3451 return expand_builtin_mempcpy_args (dest, src, len,
3452 target, mode, /*endp=*/ 1);
3456 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3457 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3458 so that this can also be called without constructing an actual CALL_EXPR.
3459 The other arguments and return value are the same as for
3460 expand_builtin_mempcpy. */
3462 static rtx
3463 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3464 rtx target, enum machine_mode mode, int endp)
3466 /* If return value is ignored, transform mempcpy into memcpy. */
3467 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3469 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3470 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3471 dest, src, len);
3472 return expand_expr (result, target, mode, EXPAND_NORMAL);
3474 else
3476 const char *src_str;
3477 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3478 unsigned int dest_align
3479 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3480 rtx dest_mem, src_mem, len_rtx;
3482 /* If either SRC or DEST is not a pointer type, don't do this
3483 operation in-line. */
3484 if (dest_align == 0 || src_align == 0)
3485 return NULL_RTX;
3487 /* If LEN is not constant, call the normal function. */
3488 if (! host_integerp (len, 1))
3489 return NULL_RTX;
3491 len_rtx = expand_normal (len);
3492 src_str = c_getstr (src);
3494 /* If SRC is a string constant and block move would be done
3495 by pieces, we can avoid loading the string from memory
3496 and only stored the computed constants. */
3497 if (src_str
3498 && CONST_INT_P (len_rtx)
3499 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3500 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3501 CONST_CAST (char *, src_str),
3502 dest_align, false))
3504 dest_mem = get_memory_rtx (dest, len);
3505 set_mem_align (dest_mem, dest_align);
3506 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3507 builtin_memcpy_read_str,
3508 CONST_CAST (char *, src_str),
3509 dest_align, false, endp);
3510 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3511 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3512 return dest_mem;
3515 if (CONST_INT_P (len_rtx)
3516 && can_move_by_pieces (INTVAL (len_rtx),
3517 MIN (dest_align, src_align)))
3519 dest_mem = get_memory_rtx (dest, len);
3520 set_mem_align (dest_mem, dest_align);
3521 src_mem = get_memory_rtx (src, len);
3522 set_mem_align (src_mem, src_align);
3523 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3524 MIN (dest_align, src_align), endp);
3525 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3526 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3527 return dest_mem;
3530 return NULL_RTX;
3534 #ifndef HAVE_movstr
3535 # define HAVE_movstr 0
3536 # define CODE_FOR_movstr CODE_FOR_nothing
3537 #endif
3539 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3540 we failed, the caller should emit a normal call, otherwise try to
3541 get the result in TARGET, if convenient. If ENDP is 0 return the
3542 destination pointer, if ENDP is 1 return the end pointer ala
3543 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3544 stpcpy. */
3546 static rtx
3547 expand_movstr (tree dest, tree src, rtx target, int endp)
3549 rtx end;
3550 rtx dest_mem;
3551 rtx src_mem;
3552 rtx insn;
3553 const struct insn_data_d * data;
3555 if (!HAVE_movstr)
3556 return NULL_RTX;
3558 dest_mem = get_memory_rtx (dest, NULL);
3559 src_mem = get_memory_rtx (src, NULL);
3560 data = insn_data + CODE_FOR_movstr;
3561 if (!endp)
3563 target = force_reg (Pmode, XEXP (dest_mem, 0));
3564 dest_mem = replace_equiv_address (dest_mem, target);
3565 end = gen_reg_rtx (Pmode);
3567 else
3569 if (target == 0
3570 || target == const0_rtx
3571 || ! (*data->operand[0].predicate) (target, Pmode))
3573 end = gen_reg_rtx (Pmode);
3574 if (target != const0_rtx)
3575 target = end;
3577 else
3578 end = target;
3581 if (data->operand[0].mode != VOIDmode)
3582 end = gen_lowpart (data->operand[0].mode, end);
3584 insn = data->genfun (end, dest_mem, src_mem);
3586 gcc_assert (insn);
3588 emit_insn (insn);
3590 /* movstr is supposed to set end to the address of the NUL
3591 terminator. If the caller requested a mempcpy-like return value,
3592 adjust it. */
3593 if (endp == 1 && target != const0_rtx)
3595 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3596 emit_move_insn (target, force_operand (tem, NULL_RTX));
3599 return target;
3602 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3603 NULL_RTX if we failed the caller should emit a normal call, otherwise
3604 try to get the result in TARGET, if convenient (and in mode MODE if that's
3605 convenient). */
3607 static rtx
3608 expand_builtin_strcpy (tree exp, rtx target)
3610 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3612 tree dest = CALL_EXPR_ARG (exp, 0);
3613 tree src = CALL_EXPR_ARG (exp, 1);
3614 return expand_builtin_strcpy_args (dest, src, target);
3616 return NULL_RTX;
3619 /* Helper function to do the actual work for expand_builtin_strcpy. The
3620 arguments to the builtin_strcpy call DEST and SRC are broken out
3621 so that this can also be called without constructing an actual CALL_EXPR.
3622 The other arguments and return value are the same as for
3623 expand_builtin_strcpy. */
3625 static rtx
3626 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3628 return expand_movstr (dest, src, target, /*endp=*/0);
3631 /* Expand a call EXP to the stpcpy builtin.
3632 Return NULL_RTX if we failed the caller should emit a normal call,
3633 otherwise try to get the result in TARGET, if convenient (and in
3634 mode MODE if that's convenient). */
3636 static rtx
3637 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3639 tree dst, src;
3640 location_t loc = EXPR_LOCATION (exp);
3642 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3643 return NULL_RTX;
3645 dst = CALL_EXPR_ARG (exp, 0);
3646 src = CALL_EXPR_ARG (exp, 1);
3648 /* If return value is ignored, transform stpcpy into strcpy. */
3649 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3651 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3652 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3653 return expand_expr (result, target, mode, EXPAND_NORMAL);
3655 else
3657 tree len, lenp1;
3658 rtx ret;
3660 /* Ensure we get an actual string whose length can be evaluated at
3661 compile-time, not an expression containing a string. This is
3662 because the latter will potentially produce pessimized code
3663 when used to produce the return value. */
3664 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3665 return expand_movstr (dst, src, target, /*endp=*/2);
3667 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3668 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3669 target, mode, /*endp=*/2);
3671 if (ret)
3672 return ret;
3674 if (TREE_CODE (len) == INTEGER_CST)
3676 rtx len_rtx = expand_normal (len);
3678 if (CONST_INT_P (len_rtx))
3680 ret = expand_builtin_strcpy_args (dst, src, target);
3682 if (ret)
3684 if (! target)
3686 if (mode != VOIDmode)
3687 target = gen_reg_rtx (mode);
3688 else
3689 target = gen_reg_rtx (GET_MODE (ret));
3691 if (GET_MODE (target) != GET_MODE (ret))
3692 ret = gen_lowpart (GET_MODE (target), ret);
3694 ret = plus_constant (ret, INTVAL (len_rtx));
3695 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3696 gcc_assert (ret);
3698 return target;
3703 return expand_movstr (dst, src, target, /*endp=*/2);
3707 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3708 bytes from constant string DATA + OFFSET and return it as target
3709 constant. */
3712 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3713 enum machine_mode mode)
3715 const char *str = (const char *) data;
3717 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3718 return const0_rtx;
3720 return c_readstr (str + offset, mode);
3723 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3724 NULL_RTX if we failed the caller should emit a normal call. */
3726 static rtx
3727 expand_builtin_strncpy (tree exp, rtx target)
3729 location_t loc = EXPR_LOCATION (exp);
3731 if (validate_arglist (exp,
3732 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree src = CALL_EXPR_ARG (exp, 1);
3736 tree len = CALL_EXPR_ARG (exp, 2);
3737 tree slen = c_strlen (src, 1);
3739 /* We must be passed a constant len and src parameter. */
3740 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3741 return NULL_RTX;
3743 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3745 /* We're required to pad with trailing zeros if the requested
3746 len is greater than strlen(s2)+1. In that case try to
3747 use store_by_pieces, if it fails, punt. */
3748 if (tree_int_cst_lt (slen, len))
3750 unsigned int dest_align
3751 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3752 const char *p = c_getstr (src);
3753 rtx dest_mem;
3755 if (!p || dest_align == 0 || !host_integerp (len, 1)
3756 || !can_store_by_pieces (tree_low_cst (len, 1),
3757 builtin_strncpy_read_str,
3758 CONST_CAST (char *, p),
3759 dest_align, false))
3760 return NULL_RTX;
3762 dest_mem = get_memory_rtx (dest, len);
3763 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3764 builtin_strncpy_read_str,
3765 CONST_CAST (char *, p), dest_align, false, 0);
3766 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3767 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3768 return dest_mem;
3771 return NULL_RTX;
3774 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3775 bytes from constant string DATA + OFFSET and return it as target
3776 constant. */
3779 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3780 enum machine_mode mode)
3782 const char *c = (const char *) data;
3783 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3785 memset (p, *c, GET_MODE_SIZE (mode));
3787 return c_readstr (p, mode);
3790 /* Callback routine for store_by_pieces. Return the RTL of a register
3791 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3792 char value given in the RTL register data. For example, if mode is
3793 4 bytes wide, return the RTL for 0x01010101*data. */
3795 static rtx
3796 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3797 enum machine_mode mode)
3799 rtx target, coeff;
3800 size_t size;
3801 char *p;
3803 size = GET_MODE_SIZE (mode);
3804 if (size == 1)
3805 return (rtx) data;
3807 p = XALLOCAVEC (char, size);
3808 memset (p, 1, size);
3809 coeff = c_readstr (p, mode);
3811 target = convert_to_mode (mode, (rtx) data, 1);
3812 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3813 return force_reg (mode, target);
3816 /* Expand expression EXP, which is a call to the memset builtin. Return
3817 NULL_RTX if we failed the caller should emit a normal call, otherwise
3818 try to get the result in TARGET, if convenient (and in mode MODE if that's
3819 convenient). */
3821 static rtx
3822 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3824 if (!validate_arglist (exp,
3825 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3826 return NULL_RTX;
3827 else
3829 tree dest = CALL_EXPR_ARG (exp, 0);
3830 tree val = CALL_EXPR_ARG (exp, 1);
3831 tree len = CALL_EXPR_ARG (exp, 2);
3832 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3836 /* Helper function to do the actual work for expand_builtin_memset. The
3837 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3838 so that this can also be called without constructing an actual CALL_EXPR.
3839 The other arguments and return value are the same as for
3840 expand_builtin_memset. */
3842 static rtx
3843 expand_builtin_memset_args (tree dest, tree val, tree len,
3844 rtx target, enum machine_mode mode, tree orig_exp)
3846 tree fndecl, fn;
3847 enum built_in_function fcode;
3848 char c;
3849 unsigned int dest_align;
3850 rtx dest_mem, dest_addr, len_rtx;
3851 HOST_WIDE_INT expected_size = -1;
3852 unsigned int expected_align = 0;
3854 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3856 /* If DEST is not a pointer type, don't do this operation in-line. */
3857 if (dest_align == 0)
3858 return NULL_RTX;
3860 if (currently_expanding_gimple_stmt)
3861 stringop_block_profile (currently_expanding_gimple_stmt,
3862 &expected_align, &expected_size);
3864 if (expected_align < dest_align)
3865 expected_align = dest_align;
3867 /* If the LEN parameter is zero, return DEST. */
3868 if (integer_zerop (len))
3870 /* Evaluate and ignore VAL in case it has side-effects. */
3871 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3872 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3875 /* Stabilize the arguments in case we fail. */
3876 dest = builtin_save_expr (dest);
3877 val = builtin_save_expr (val);
3878 len = builtin_save_expr (len);
3880 len_rtx = expand_normal (len);
3881 dest_mem = get_memory_rtx (dest, len);
3883 if (TREE_CODE (val) != INTEGER_CST)
3885 rtx val_rtx;
3887 val_rtx = expand_normal (val);
3888 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3889 val_rtx, 0);
3891 /* Assume that we can memset by pieces if we can store
3892 * the coefficients by pieces (in the required modes).
3893 * We can't pass builtin_memset_gen_str as that emits RTL. */
3894 c = 1;
3895 if (host_integerp (len, 1)
3896 && can_store_by_pieces (tree_low_cst (len, 1),
3897 builtin_memset_read_str, &c, dest_align,
3898 true))
3900 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3901 val_rtx);
3902 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3903 builtin_memset_gen_str, val_rtx, dest_align,
3904 true, 0);
3906 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3907 dest_align, expected_align,
3908 expected_size))
3909 goto do_libcall;
3911 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3912 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3913 return dest_mem;
3916 if (target_char_cast (val, &c))
3917 goto do_libcall;
3919 if (c)
3921 if (host_integerp (len, 1)
3922 && can_store_by_pieces (tree_low_cst (len, 1),
3923 builtin_memset_read_str, &c, dest_align,
3924 true))
3925 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3926 builtin_memset_read_str, &c, dest_align, true, 0);
3927 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3928 dest_align, expected_align,
3929 expected_size))
3930 goto do_libcall;
3932 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3933 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3934 return dest_mem;
3937 set_mem_align (dest_mem, dest_align);
3938 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3939 CALL_EXPR_TAILCALL (orig_exp)
3940 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3941 expected_align, expected_size);
3943 if (dest_addr == 0)
3945 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3946 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3949 return dest_addr;
3951 do_libcall:
3952 fndecl = get_callee_fndecl (orig_exp);
3953 fcode = DECL_FUNCTION_CODE (fndecl);
3954 if (fcode == BUILT_IN_MEMSET)
3955 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3956 dest, val, len);
3957 else if (fcode == BUILT_IN_BZERO)
3958 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3959 dest, len);
3960 else
3961 gcc_unreachable ();
3962 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3963 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3964 return expand_call (fn, target, target == const0_rtx);
3967 /* Expand expression EXP, which is a call to the bzero builtin. Return
3968 NULL_RTX if we failed the caller should emit a normal call. */
3970 static rtx
3971 expand_builtin_bzero (tree exp)
3973 tree dest, size;
3974 location_t loc = EXPR_LOCATION (exp);
3976 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3977 return NULL_RTX;
3979 dest = CALL_EXPR_ARG (exp, 0);
3980 size = CALL_EXPR_ARG (exp, 1);
3982 /* New argument list transforming bzero(ptr x, int y) to
3983 memset(ptr x, int 0, size_t y). This is done this way
3984 so that if it isn't expanded inline, we fallback to
3985 calling bzero instead of memset. */
3987 return expand_builtin_memset_args (dest, integer_zero_node,
3988 fold_convert_loc (loc, sizetype, size),
3989 const0_rtx, VOIDmode, exp);
3992 /* Expand expression EXP, which is a call to the memcmp built-in function.
3993 Return NULL_RTX if we failed and the
3994 caller should emit a normal call, otherwise try to get the result in
3995 TARGET, if convenient (and in mode MODE, if that's convenient). */
3997 static rtx
3998 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3999 ATTRIBUTE_UNUSED enum machine_mode mode)
4001 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4003 if (!validate_arglist (exp,
4004 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4005 return NULL_RTX;
4007 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4009 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4010 rtx result;
4011 rtx insn;
4012 tree arg1 = CALL_EXPR_ARG (exp, 0);
4013 tree arg2 = CALL_EXPR_ARG (exp, 1);
4014 tree len = CALL_EXPR_ARG (exp, 2);
4016 int arg1_align
4017 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4018 int arg2_align
4019 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4020 enum machine_mode insn_mode;
4022 #ifdef HAVE_cmpmemsi
4023 if (HAVE_cmpmemsi)
4024 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4025 else
4026 #endif
4027 #ifdef HAVE_cmpstrnsi
4028 if (HAVE_cmpstrnsi)
4029 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4030 else
4031 #endif
4032 return NULL_RTX;
4034 /* If we don't have POINTER_TYPE, call the function. */
4035 if (arg1_align == 0 || arg2_align == 0)
4036 return NULL_RTX;
4038 /* Make a place to write the result of the instruction. */
4039 result = target;
4040 if (! (result != 0
4041 && REG_P (result) && GET_MODE (result) == insn_mode
4042 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4043 result = gen_reg_rtx (insn_mode);
4045 arg1_rtx = get_memory_rtx (arg1, len);
4046 arg2_rtx = get_memory_rtx (arg2, len);
4047 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4049 /* Set MEM_SIZE as appropriate. */
4050 if (CONST_INT_P (arg3_rtx))
4052 set_mem_size (arg1_rtx, arg3_rtx);
4053 set_mem_size (arg2_rtx, arg3_rtx);
4056 #ifdef HAVE_cmpmemsi
4057 if (HAVE_cmpmemsi)
4058 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4059 GEN_INT (MIN (arg1_align, arg2_align)));
4060 else
4061 #endif
4062 #ifdef HAVE_cmpstrnsi
4063 if (HAVE_cmpstrnsi)
4064 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4065 GEN_INT (MIN (arg1_align, arg2_align)));
4066 else
4067 #endif
4068 gcc_unreachable ();
4070 if (insn)
4071 emit_insn (insn);
4072 else
4073 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4074 TYPE_MODE (integer_type_node), 3,
4075 XEXP (arg1_rtx, 0), Pmode,
4076 XEXP (arg2_rtx, 0), Pmode,
4077 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4078 TYPE_UNSIGNED (sizetype)),
4079 TYPE_MODE (sizetype));
4081 /* Return the value in the proper mode for this function. */
4082 mode = TYPE_MODE (TREE_TYPE (exp));
4083 if (GET_MODE (result) == mode)
4084 return result;
4085 else if (target != 0)
4087 convert_move (target, result, 0);
4088 return target;
4090 else
4091 return convert_to_mode (mode, result, 0);
4093 #endif
4095 return NULL_RTX;
4098 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4099 if we failed the caller should emit a normal call, otherwise try to get
4100 the result in TARGET, if convenient. */
4102 static rtx
4103 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4105 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4106 return NULL_RTX;
4108 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4109 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4110 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4112 rtx arg1_rtx, arg2_rtx;
4113 rtx result, insn = NULL_RTX;
4114 tree fndecl, fn;
4115 tree arg1 = CALL_EXPR_ARG (exp, 0);
4116 tree arg2 = CALL_EXPR_ARG (exp, 1);
4118 int arg1_align
4119 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4120 int arg2_align
4121 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4123 /* If we don't have POINTER_TYPE, call the function. */
4124 if (arg1_align == 0 || arg2_align == 0)
4125 return NULL_RTX;
4127 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4128 arg1 = builtin_save_expr (arg1);
4129 arg2 = builtin_save_expr (arg2);
4131 arg1_rtx = get_memory_rtx (arg1, NULL);
4132 arg2_rtx = get_memory_rtx (arg2, NULL);
4134 #ifdef HAVE_cmpstrsi
4135 /* Try to call cmpstrsi. */
4136 if (HAVE_cmpstrsi)
4138 enum machine_mode insn_mode
4139 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4141 /* Make a place to write the result of the instruction. */
4142 result = target;
4143 if (! (result != 0
4144 && REG_P (result) && GET_MODE (result) == insn_mode
4145 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4146 result = gen_reg_rtx (insn_mode);
4148 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4149 GEN_INT (MIN (arg1_align, arg2_align)));
4151 #endif
4152 #ifdef HAVE_cmpstrnsi
4153 /* Try to determine at least one length and call cmpstrnsi. */
4154 if (!insn && HAVE_cmpstrnsi)
4156 tree len;
4157 rtx arg3_rtx;
4159 enum machine_mode insn_mode
4160 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4161 tree len1 = c_strlen (arg1, 1);
4162 tree len2 = c_strlen (arg2, 1);
4164 if (len1)
4165 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4166 if (len2)
4167 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4169 /* If we don't have a constant length for the first, use the length
4170 of the second, if we know it. We don't require a constant for
4171 this case; some cost analysis could be done if both are available
4172 but neither is constant. For now, assume they're equally cheap,
4173 unless one has side effects. If both strings have constant lengths,
4174 use the smaller. */
4176 if (!len1)
4177 len = len2;
4178 else if (!len2)
4179 len = len1;
4180 else if (TREE_SIDE_EFFECTS (len1))
4181 len = len2;
4182 else if (TREE_SIDE_EFFECTS (len2))
4183 len = len1;
4184 else if (TREE_CODE (len1) != INTEGER_CST)
4185 len = len2;
4186 else if (TREE_CODE (len2) != INTEGER_CST)
4187 len = len1;
4188 else if (tree_int_cst_lt (len1, len2))
4189 len = len1;
4190 else
4191 len = len2;
4193 /* If both arguments have side effects, we cannot optimize. */
4194 if (!len || TREE_SIDE_EFFECTS (len))
4195 goto do_libcall;
4197 arg3_rtx = expand_normal (len);
4199 /* Make a place to write the result of the instruction. */
4200 result = target;
4201 if (! (result != 0
4202 && REG_P (result) && GET_MODE (result) == insn_mode
4203 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4204 result = gen_reg_rtx (insn_mode);
4206 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4207 GEN_INT (MIN (arg1_align, arg2_align)));
4209 #endif
4211 if (insn)
4213 enum machine_mode mode;
4214 emit_insn (insn);
4216 /* Return the value in the proper mode for this function. */
4217 mode = TYPE_MODE (TREE_TYPE (exp));
4218 if (GET_MODE (result) == mode)
4219 return result;
4220 if (target == 0)
4221 return convert_to_mode (mode, result, 0);
4222 convert_move (target, result, 0);
4223 return target;
4226 /* Expand the library call ourselves using a stabilized argument
4227 list to avoid re-evaluating the function's arguments twice. */
4228 #ifdef HAVE_cmpstrnsi
4229 do_libcall:
4230 #endif
4231 fndecl = get_callee_fndecl (exp);
4232 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4233 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4234 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4235 return expand_call (fn, target, target == const0_rtx);
4237 #endif
4238 return NULL_RTX;
4241 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4242 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4243 the result in TARGET, if convenient. */
4245 static rtx
4246 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4247 ATTRIBUTE_UNUSED enum machine_mode mode)
4249 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4251 if (!validate_arglist (exp,
4252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4253 return NULL_RTX;
4255 /* If c_strlen can determine an expression for one of the string
4256 lengths, and it doesn't have side effects, then emit cmpstrnsi
4257 using length MIN(strlen(string)+1, arg3). */
4258 #ifdef HAVE_cmpstrnsi
4259 if (HAVE_cmpstrnsi)
4261 tree len, len1, len2;
4262 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4263 rtx result, insn;
4264 tree fndecl, fn;
4265 tree arg1 = CALL_EXPR_ARG (exp, 0);
4266 tree arg2 = CALL_EXPR_ARG (exp, 1);
4267 tree arg3 = CALL_EXPR_ARG (exp, 2);
4269 int arg1_align
4270 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4271 int arg2_align
4272 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4273 enum machine_mode insn_mode
4274 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4276 len1 = c_strlen (arg1, 1);
4277 len2 = c_strlen (arg2, 1);
4279 if (len1)
4280 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4281 if (len2)
4282 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4284 /* If we don't have a constant length for the first, use the length
4285 of the second, if we know it. We don't require a constant for
4286 this case; some cost analysis could be done if both are available
4287 but neither is constant. For now, assume they're equally cheap,
4288 unless one has side effects. If both strings have constant lengths,
4289 use the smaller. */
4291 if (!len1)
4292 len = len2;
4293 else if (!len2)
4294 len = len1;
4295 else if (TREE_SIDE_EFFECTS (len1))
4296 len = len2;
4297 else if (TREE_SIDE_EFFECTS (len2))
4298 len = len1;
4299 else if (TREE_CODE (len1) != INTEGER_CST)
4300 len = len2;
4301 else if (TREE_CODE (len2) != INTEGER_CST)
4302 len = len1;
4303 else if (tree_int_cst_lt (len1, len2))
4304 len = len1;
4305 else
4306 len = len2;
4308 /* If both arguments have side effects, we cannot optimize. */
4309 if (!len || TREE_SIDE_EFFECTS (len))
4310 return NULL_RTX;
4312 /* The actual new length parameter is MIN(len,arg3). */
4313 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4314 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4316 /* If we don't have POINTER_TYPE, call the function. */
4317 if (arg1_align == 0 || arg2_align == 0)
4318 return NULL_RTX;
4320 /* Make a place to write the result of the instruction. */
4321 result = target;
4322 if (! (result != 0
4323 && REG_P (result) && GET_MODE (result) == insn_mode
4324 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4325 result = gen_reg_rtx (insn_mode);
4327 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4328 arg1 = builtin_save_expr (arg1);
4329 arg2 = builtin_save_expr (arg2);
4330 len = builtin_save_expr (len);
4332 arg1_rtx = get_memory_rtx (arg1, len);
4333 arg2_rtx = get_memory_rtx (arg2, len);
4334 arg3_rtx = expand_normal (len);
4335 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4336 GEN_INT (MIN (arg1_align, arg2_align)));
4337 if (insn)
4339 emit_insn (insn);
4341 /* Return the value in the proper mode for this function. */
4342 mode = TYPE_MODE (TREE_TYPE (exp));
4343 if (GET_MODE (result) == mode)
4344 return result;
4345 if (target == 0)
4346 return convert_to_mode (mode, result, 0);
4347 convert_move (target, result, 0);
4348 return target;
4351 /* Expand the library call ourselves using a stabilized argument
4352 list to avoid re-evaluating the function's arguments twice. */
4353 fndecl = get_callee_fndecl (exp);
4354 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4355 arg1, arg2, len);
4356 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4357 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4358 return expand_call (fn, target, target == const0_rtx);
4360 #endif
4361 return NULL_RTX;
4364 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4365 if that's convenient. */
4368 expand_builtin_saveregs (void)
4370 rtx val, seq;
4372 /* Don't do __builtin_saveregs more than once in a function.
4373 Save the result of the first call and reuse it. */
4374 if (saveregs_value != 0)
4375 return saveregs_value;
4377 /* When this function is called, it means that registers must be
4378 saved on entry to this function. So we migrate the call to the
4379 first insn of this function. */
4381 start_sequence ();
4383 /* Do whatever the machine needs done in this case. */
4384 val = targetm.calls.expand_builtin_saveregs ();
4386 seq = get_insns ();
4387 end_sequence ();
4389 saveregs_value = val;
4391 /* Put the insns after the NOTE that starts the function. If this
4392 is inside a start_sequence, make the outer-level insn chain current, so
4393 the code is placed at the start of the function. */
4394 push_topmost_sequence ();
4395 emit_insn_after (seq, entry_of_function ());
4396 pop_topmost_sequence ();
4398 return val;
4401 /* __builtin_args_info (N) returns word N of the arg space info
4402 for the current function. The number and meanings of words
4403 is controlled by the definition of CUMULATIVE_ARGS. */
4405 static rtx
4406 expand_builtin_args_info (tree exp)
4408 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4409 int *word_ptr = (int *) &crtl->args.info;
4411 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4413 if (call_expr_nargs (exp) != 0)
4415 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4416 error ("argument of %<__builtin_args_info%> must be constant");
4417 else
4419 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4421 if (wordnum < 0 || wordnum >= nwords)
4422 error ("argument of %<__builtin_args_info%> out of range");
4423 else
4424 return GEN_INT (word_ptr[wordnum]);
4427 else
4428 error ("missing argument in %<__builtin_args_info%>");
4430 return const0_rtx;
4433 /* Expand a call to __builtin_next_arg. */
4435 static rtx
4436 expand_builtin_next_arg (void)
4438 /* Checking arguments is already done in fold_builtin_next_arg
4439 that must be called before this function. */
4440 return expand_binop (ptr_mode, add_optab,
4441 crtl->args.internal_arg_pointer,
4442 crtl->args.arg_offset_rtx,
4443 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4446 /* Make it easier for the backends by protecting the valist argument
4447 from multiple evaluations. */
4449 static tree
4450 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4452 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4454 /* The current way of determining the type of valist is completely
4455 bogus. We should have the information on the va builtin instead. */
4456 if (!vatype)
4457 vatype = targetm.fn_abi_va_list (cfun->decl);
4459 if (TREE_CODE (vatype) == ARRAY_TYPE)
4461 if (TREE_SIDE_EFFECTS (valist))
4462 valist = save_expr (valist);
4464 /* For this case, the backends will be expecting a pointer to
4465 vatype, but it's possible we've actually been given an array
4466 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4467 So fix it. */
4468 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4470 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4471 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4474 else
4476 tree pt = build_pointer_type (vatype);
4478 if (! needs_lvalue)
4480 if (! TREE_SIDE_EFFECTS (valist))
4481 return valist;
4483 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4484 TREE_SIDE_EFFECTS (valist) = 1;
4487 if (TREE_SIDE_EFFECTS (valist))
4488 valist = save_expr (valist);
4489 valist = fold_build2_loc (loc, MEM_REF,
4490 vatype, valist, build_int_cst (pt, 0));
4493 return valist;
4496 /* The "standard" definition of va_list is void*. */
4498 tree
4499 std_build_builtin_va_list (void)
4501 return ptr_type_node;
4504 /* The "standard" abi va_list is va_list_type_node. */
4506 tree
4507 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4509 return va_list_type_node;
4512 /* The "standard" type of va_list is va_list_type_node. */
4514 tree
4515 std_canonical_va_list_type (tree type)
4517 tree wtype, htype;
4519 if (INDIRECT_REF_P (type))
4520 type = TREE_TYPE (type);
4521 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4522 type = TREE_TYPE (type);
4523 wtype = va_list_type_node;
4524 htype = type;
4525 /* Treat structure va_list types. */
4526 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4527 htype = TREE_TYPE (htype);
4528 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4530 /* If va_list is an array type, the argument may have decayed
4531 to a pointer type, e.g. by being passed to another function.
4532 In that case, unwrap both types so that we can compare the
4533 underlying records. */
4534 if (TREE_CODE (htype) == ARRAY_TYPE
4535 || POINTER_TYPE_P (htype))
4537 wtype = TREE_TYPE (wtype);
4538 htype = TREE_TYPE (htype);
4541 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4542 return va_list_type_node;
4544 return NULL_TREE;
4547 /* The "standard" implementation of va_start: just assign `nextarg' to
4548 the variable. */
4550 void
4551 std_expand_builtin_va_start (tree valist, rtx nextarg)
4553 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4554 convert_move (va_r, nextarg, 0);
4557 /* Expand EXP, a call to __builtin_va_start. */
4559 static rtx
4560 expand_builtin_va_start (tree exp)
4562 rtx nextarg;
4563 tree valist;
4564 location_t loc = EXPR_LOCATION (exp);
4566 if (call_expr_nargs (exp) < 2)
4568 error_at (loc, "too few arguments to function %<va_start%>");
4569 return const0_rtx;
4572 if (fold_builtin_next_arg (exp, true))
4573 return const0_rtx;
4575 nextarg = expand_builtin_next_arg ();
4576 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4578 if (targetm.expand_builtin_va_start)
4579 targetm.expand_builtin_va_start (valist, nextarg);
4580 else
4581 std_expand_builtin_va_start (valist, nextarg);
4583 return const0_rtx;
4586 /* The "standard" implementation of va_arg: read the value from the
4587 current (padded) address and increment by the (padded) size. */
4589 tree
4590 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4591 gimple_seq *post_p)
4593 tree addr, t, type_size, rounded_size, valist_tmp;
4594 unsigned HOST_WIDE_INT align, boundary;
4595 bool indirect;
4597 #ifdef ARGS_GROW_DOWNWARD
4598 /* All of the alignment and movement below is for args-grow-up machines.
4599 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4600 implement their own specialized gimplify_va_arg_expr routines. */
4601 gcc_unreachable ();
4602 #endif
4604 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4605 if (indirect)
4606 type = build_pointer_type (type);
4608 align = PARM_BOUNDARY / BITS_PER_UNIT;
4609 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4611 /* When we align parameter on stack for caller, if the parameter
4612 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4613 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4614 here with caller. */
4615 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4616 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4618 boundary /= BITS_PER_UNIT;
4620 /* Hoist the valist value into a temporary for the moment. */
4621 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4623 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4624 requires greater alignment, we must perform dynamic alignment. */
4625 if (boundary > align
4626 && !integer_zerop (TYPE_SIZE (type)))
4628 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4629 fold_build2 (POINTER_PLUS_EXPR,
4630 TREE_TYPE (valist),
4631 valist_tmp, size_int (boundary - 1)));
4632 gimplify_and_add (t, pre_p);
4634 t = fold_convert (sizetype, valist_tmp);
4635 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4636 fold_convert (TREE_TYPE (valist),
4637 fold_build2 (BIT_AND_EXPR, sizetype, t,
4638 size_int (-boundary))));
4639 gimplify_and_add (t, pre_p);
4641 else
4642 boundary = align;
4644 /* If the actual alignment is less than the alignment of the type,
4645 adjust the type accordingly so that we don't assume strict alignment
4646 when dereferencing the pointer. */
4647 boundary *= BITS_PER_UNIT;
4648 if (boundary < TYPE_ALIGN (type))
4650 type = build_variant_type_copy (type);
4651 TYPE_ALIGN (type) = boundary;
4654 /* Compute the rounded size of the type. */
4655 type_size = size_in_bytes (type);
4656 rounded_size = round_up (type_size, align);
4658 /* Reduce rounded_size so it's sharable with the postqueue. */
4659 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4661 /* Get AP. */
4662 addr = valist_tmp;
4663 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4665 /* Small args are padded downward. */
4666 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4667 rounded_size, size_int (align));
4668 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4669 size_binop (MINUS_EXPR, rounded_size, type_size));
4670 addr = fold_build2 (POINTER_PLUS_EXPR,
4671 TREE_TYPE (addr), addr, t);
4674 /* Compute new value for AP. */
4675 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4676 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4677 gimplify_and_add (t, pre_p);
4679 addr = fold_convert (build_pointer_type (type), addr);
4681 if (indirect)
4682 addr = build_va_arg_indirect_ref (addr);
4684 return build_va_arg_indirect_ref (addr);
4687 /* Build an indirect-ref expression over the given TREE, which represents a
4688 piece of a va_arg() expansion. */
4689 tree
4690 build_va_arg_indirect_ref (tree addr)
4692 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4694 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4695 mf_mark (addr);
4697 return addr;
4700 /* Return a dummy expression of type TYPE in order to keep going after an
4701 error. */
4703 static tree
4704 dummy_object (tree type)
4706 tree t = build_int_cst (build_pointer_type (type), 0);
4707 return build1 (INDIRECT_REF, type, t);
4710 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4711 builtin function, but a very special sort of operator. */
4713 enum gimplify_status
4714 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4716 tree promoted_type, have_va_type;
4717 tree valist = TREE_OPERAND (*expr_p, 0);
4718 tree type = TREE_TYPE (*expr_p);
4719 tree t;
4720 location_t loc = EXPR_LOCATION (*expr_p);
4722 /* Verify that valist is of the proper type. */
4723 have_va_type = TREE_TYPE (valist);
4724 if (have_va_type == error_mark_node)
4725 return GS_ERROR;
4726 have_va_type = targetm.canonical_va_list_type (have_va_type);
4728 if (have_va_type == NULL_TREE)
4730 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4731 return GS_ERROR;
4734 /* Generate a diagnostic for requesting data of a type that cannot
4735 be passed through `...' due to type promotion at the call site. */
4736 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4737 != type)
4739 static bool gave_help;
4740 bool warned;
4742 /* Unfortunately, this is merely undefined, rather than a constraint
4743 violation, so we cannot make this an error. If this call is never
4744 executed, the program is still strictly conforming. */
4745 warned = warning_at (loc, 0,
4746 "%qT is promoted to %qT when passed through %<...%>",
4747 type, promoted_type);
4748 if (!gave_help && warned)
4750 gave_help = true;
4751 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4752 promoted_type, type);
4755 /* We can, however, treat "undefined" any way we please.
4756 Call abort to encourage the user to fix the program. */
4757 if (warned)
4758 inform (loc, "if this code is reached, the program will abort");
4759 /* Before the abort, allow the evaluation of the va_list
4760 expression to exit or longjmp. */
4761 gimplify_and_add (valist, pre_p);
4762 t = build_call_expr_loc (loc,
4763 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4764 gimplify_and_add (t, pre_p);
4766 /* This is dead code, but go ahead and finish so that the
4767 mode of the result comes out right. */
4768 *expr_p = dummy_object (type);
4769 return GS_ALL_DONE;
4771 else
4773 /* Make it easier for the backends by protecting the valist argument
4774 from multiple evaluations. */
4775 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4777 /* For this case, the backends will be expecting a pointer to
4778 TREE_TYPE (abi), but it's possible we've
4779 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4780 So fix it. */
4781 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4783 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4784 valist = fold_convert_loc (loc, p1,
4785 build_fold_addr_expr_loc (loc, valist));
4788 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4790 else
4791 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4793 if (!targetm.gimplify_va_arg_expr)
4794 /* FIXME: Once most targets are converted we should merely
4795 assert this is non-null. */
4796 return GS_ALL_DONE;
4798 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4799 return GS_OK;
4803 /* Expand EXP, a call to __builtin_va_end. */
4805 static rtx
4806 expand_builtin_va_end (tree exp)
4808 tree valist = CALL_EXPR_ARG (exp, 0);
4810 /* Evaluate for side effects, if needed. I hate macros that don't
4811 do that. */
4812 if (TREE_SIDE_EFFECTS (valist))
4813 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4815 return const0_rtx;
4818 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4819 builtin rather than just as an assignment in stdarg.h because of the
4820 nastiness of array-type va_list types. */
4822 static rtx
4823 expand_builtin_va_copy (tree exp)
4825 tree dst, src, t;
4826 location_t loc = EXPR_LOCATION (exp);
4828 dst = CALL_EXPR_ARG (exp, 0);
4829 src = CALL_EXPR_ARG (exp, 1);
4831 dst = stabilize_va_list_loc (loc, dst, 1);
4832 src = stabilize_va_list_loc (loc, src, 0);
4834 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4836 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4838 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4839 TREE_SIDE_EFFECTS (t) = 1;
4840 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4842 else
4844 rtx dstb, srcb, size;
4846 /* Evaluate to pointers. */
4847 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4848 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4849 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4850 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4852 dstb = convert_memory_address (Pmode, dstb);
4853 srcb = convert_memory_address (Pmode, srcb);
4855 /* "Dereference" to BLKmode memories. */
4856 dstb = gen_rtx_MEM (BLKmode, dstb);
4857 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4858 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4859 srcb = gen_rtx_MEM (BLKmode, srcb);
4860 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4861 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4863 /* Copy. */
4864 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4867 return const0_rtx;
4870 /* Expand a call to one of the builtin functions __builtin_frame_address or
4871 __builtin_return_address. */
4873 static rtx
4874 expand_builtin_frame_address (tree fndecl, tree exp)
4876 /* The argument must be a nonnegative integer constant.
4877 It counts the number of frames to scan up the stack.
4878 The value is the return address saved in that frame. */
4879 if (call_expr_nargs (exp) == 0)
4880 /* Warning about missing arg was already issued. */
4881 return const0_rtx;
4882 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4884 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4885 error ("invalid argument to %<__builtin_frame_address%>");
4886 else
4887 error ("invalid argument to %<__builtin_return_address%>");
4888 return const0_rtx;
4890 else
4892 rtx tem
4893 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4894 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4896 /* Some ports cannot access arbitrary stack frames. */
4897 if (tem == NULL)
4899 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4900 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4901 else
4902 warning (0, "unsupported argument to %<__builtin_return_address%>");
4903 return const0_rtx;
4906 /* For __builtin_frame_address, return what we've got. */
4907 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4908 return tem;
4910 if (!REG_P (tem)
4911 && ! CONSTANT_P (tem))
4912 tem = copy_to_mode_reg (Pmode, tem);
4913 return tem;
4917 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4918 we failed and the caller should emit a normal call, otherwise try to get
4919 the result in TARGET, if convenient. */
4921 static rtx
4922 expand_builtin_alloca (tree exp, rtx target)
4924 rtx op0;
4925 rtx result;
4927 /* Emit normal call if marked not-inlineable. */
4928 if (CALL_CANNOT_INLINE_P (exp))
4929 return NULL_RTX;
4931 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4932 return NULL_RTX;
4934 /* Compute the argument. */
4935 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4937 /* Allocate the desired space. */
4938 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4939 result = convert_memory_address (ptr_mode, result);
4941 return result;
4944 /* Expand a call to a bswap builtin with argument ARG0. MODE
4945 is the mode to expand with. */
4947 static rtx
4948 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4950 enum machine_mode mode;
4951 tree arg;
4952 rtx op0;
4954 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4955 return NULL_RTX;
4957 arg = CALL_EXPR_ARG (exp, 0);
4958 mode = TYPE_MODE (TREE_TYPE (arg));
4959 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4961 target = expand_unop (mode, bswap_optab, op0, target, 1);
4963 gcc_assert (target);
4965 return convert_to_mode (mode, target, 0);
4968 /* Expand a call to a unary builtin in EXP.
4969 Return NULL_RTX if a normal call should be emitted rather than expanding the
4970 function in-line. If convenient, the result should be placed in TARGET.
4971 SUBTARGET may be used as the target for computing one of EXP's operands. */
4973 static rtx
4974 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4975 rtx subtarget, optab op_optab)
4977 rtx op0;
4979 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4980 return NULL_RTX;
4982 /* Compute the argument. */
4983 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4984 VOIDmode, EXPAND_NORMAL);
4985 /* Compute op, into TARGET if possible.
4986 Set TARGET to wherever the result comes back. */
4987 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4988 op_optab, op0, target, 1);
4989 gcc_assert (target);
4991 return convert_to_mode (target_mode, target, 0);
4994 /* Expand a call to __builtin_expect. We just return our argument
4995 as the builtin_expect semantic should've been already executed by
4996 tree branch prediction pass. */
4998 static rtx
4999 expand_builtin_expect (tree exp, rtx target)
5001 tree arg;
5003 if (call_expr_nargs (exp) < 2)
5004 return const0_rtx;
5005 arg = CALL_EXPR_ARG (exp, 0);
5007 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5008 /* When guessing was done, the hints should be already stripped away. */
5009 gcc_assert (!flag_guess_branch_prob
5010 || optimize == 0 || seen_error ());
5011 return target;
5014 void
5015 expand_builtin_trap (void)
5017 #ifdef HAVE_trap
5018 if (HAVE_trap)
5019 emit_insn (gen_trap ());
5020 else
5021 #endif
5022 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5023 emit_barrier ();
5026 /* Expand a call to __builtin_unreachable. We do nothing except emit
5027 a barrier saying that control flow will not pass here.
5029 It is the responsibility of the program being compiled to ensure
5030 that control flow does never reach __builtin_unreachable. */
5031 static void
5032 expand_builtin_unreachable (void)
5034 emit_barrier ();
5037 /* Expand EXP, a call to fabs, fabsf or fabsl.
5038 Return NULL_RTX if a normal call should be emitted rather than expanding
5039 the function inline. If convenient, the result should be placed
5040 in TARGET. SUBTARGET may be used as the target for computing
5041 the operand. */
5043 static rtx
5044 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5046 enum machine_mode mode;
5047 tree arg;
5048 rtx op0;
5050 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5051 return NULL_RTX;
5053 arg = CALL_EXPR_ARG (exp, 0);
5054 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5055 mode = TYPE_MODE (TREE_TYPE (arg));
5056 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5057 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5060 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5061 Return NULL is a normal call should be emitted rather than expanding the
5062 function inline. If convenient, the result should be placed in TARGET.
5063 SUBTARGET may be used as the target for computing the operand. */
5065 static rtx
5066 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5068 rtx op0, op1;
5069 tree arg;
5071 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5072 return NULL_RTX;
5074 arg = CALL_EXPR_ARG (exp, 0);
5075 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5077 arg = CALL_EXPR_ARG (exp, 1);
5078 op1 = expand_normal (arg);
5080 return expand_copysign (op0, op1, target);
5083 /* Create a new constant string literal and return a char* pointer to it.
5084 The STRING_CST value is the LEN characters at STR. */
5085 tree
5086 build_string_literal (int len, const char *str)
5088 tree t, elem, index, type;
5090 t = build_string (len, str);
5091 elem = build_type_variant (char_type_node, 1, 0);
5092 index = build_index_type (size_int (len - 1));
5093 type = build_array_type (elem, index);
5094 TREE_TYPE (t) = type;
5095 TREE_CONSTANT (t) = 1;
5096 TREE_READONLY (t) = 1;
5097 TREE_STATIC (t) = 1;
5099 type = build_pointer_type (elem);
5100 t = build1 (ADDR_EXPR, type,
5101 build4 (ARRAY_REF, elem,
5102 t, integer_zero_node, NULL_TREE, NULL_TREE));
5103 return t;
5106 /* Expand a call to either the entry or exit function profiler. */
5108 static rtx
5109 expand_builtin_profile_func (bool exitp)
5111 rtx this_rtx, which;
5113 this_rtx = DECL_RTL (current_function_decl);
5114 gcc_assert (MEM_P (this_rtx));
5115 this_rtx = XEXP (this_rtx, 0);
5117 if (exitp)
5118 which = profile_function_exit_libfunc;
5119 else
5120 which = profile_function_entry_libfunc;
5122 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5123 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5125 Pmode);
5127 return const0_rtx;
5130 /* Expand a call to __builtin___clear_cache. */
5132 static rtx
5133 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5135 #ifndef HAVE_clear_cache
5136 #ifdef CLEAR_INSN_CACHE
5137 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5138 does something. Just do the default expansion to a call to
5139 __clear_cache(). */
5140 return NULL_RTX;
5141 #else
5142 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5143 does nothing. There is no need to call it. Do nothing. */
5144 return const0_rtx;
5145 #endif /* CLEAR_INSN_CACHE */
5146 #else
5147 /* We have a "clear_cache" insn, and it will handle everything. */
5148 tree begin, end;
5149 rtx begin_rtx, end_rtx;
5150 enum insn_code icode;
5152 /* We must not expand to a library call. If we did, any
5153 fallback library function in libgcc that might contain a call to
5154 __builtin___clear_cache() would recurse infinitely. */
5155 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5157 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5158 return const0_rtx;
5161 if (HAVE_clear_cache)
5163 icode = CODE_FOR_clear_cache;
5165 begin = CALL_EXPR_ARG (exp, 0);
5166 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5167 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5168 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5169 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5171 end = CALL_EXPR_ARG (exp, 1);
5172 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5173 end_rtx = convert_memory_address (Pmode, end_rtx);
5174 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5175 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5177 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5179 return const0_rtx;
5180 #endif /* HAVE_clear_cache */
5183 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5185 static rtx
5186 round_trampoline_addr (rtx tramp)
5188 rtx temp, addend, mask;
5190 /* If we don't need too much alignment, we'll have been guaranteed
5191 proper alignment by get_trampoline_type. */
5192 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5193 return tramp;
5195 /* Round address up to desired boundary. */
5196 temp = gen_reg_rtx (Pmode);
5197 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5198 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5200 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5201 temp, 0, OPTAB_LIB_WIDEN);
5202 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5203 temp, 0, OPTAB_LIB_WIDEN);
5205 return tramp;
5208 static rtx
5209 expand_builtin_init_trampoline (tree exp)
5211 tree t_tramp, t_func, t_chain;
5212 rtx m_tramp, r_tramp, r_chain, tmp;
5214 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5215 POINTER_TYPE, VOID_TYPE))
5216 return NULL_RTX;
5218 t_tramp = CALL_EXPR_ARG (exp, 0);
5219 t_func = CALL_EXPR_ARG (exp, 1);
5220 t_chain = CALL_EXPR_ARG (exp, 2);
5222 r_tramp = expand_normal (t_tramp);
5223 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5224 MEM_NOTRAP_P (m_tramp) = 1;
5226 /* The TRAMP argument should be the address of a field within the
5227 local function's FRAME decl. Let's see if we can fill in the
5228 to fill in the MEM_ATTRs for this memory. */
5229 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5230 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5231 true, 0);
5233 tmp = round_trampoline_addr (r_tramp);
5234 if (tmp != r_tramp)
5236 m_tramp = change_address (m_tramp, BLKmode, tmp);
5237 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5238 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5241 /* The FUNC argument should be the address of the nested function.
5242 Extract the actual function decl to pass to the hook. */
5243 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5244 t_func = TREE_OPERAND (t_func, 0);
5245 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5247 r_chain = expand_normal (t_chain);
5249 /* Generate insns to initialize the trampoline. */
5250 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5252 trampolines_created = 1;
5254 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5255 "trampoline generated for nested function %qD", t_func);
5257 return const0_rtx;
5260 static rtx
5261 expand_builtin_adjust_trampoline (tree exp)
5263 rtx tramp;
5265 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5266 return NULL_RTX;
5268 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5269 tramp = round_trampoline_addr (tramp);
5270 if (targetm.calls.trampoline_adjust_address)
5271 tramp = targetm.calls.trampoline_adjust_address (tramp);
5273 return tramp;
5276 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5277 function. The function first checks whether the back end provides
5278 an insn to implement signbit for the respective mode. If not, it
5279 checks whether the floating point format of the value is such that
5280 the sign bit can be extracted. If that is not the case, the
5281 function returns NULL_RTX to indicate that a normal call should be
5282 emitted rather than expanding the function in-line. EXP is the
5283 expression that is a call to the builtin function; if convenient,
5284 the result should be placed in TARGET. */
5285 static rtx
5286 expand_builtin_signbit (tree exp, rtx target)
5288 const struct real_format *fmt;
5289 enum machine_mode fmode, imode, rmode;
5290 tree arg;
5291 int word, bitpos;
5292 enum insn_code icode;
5293 rtx temp;
5294 location_t loc = EXPR_LOCATION (exp);
5296 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5297 return NULL_RTX;
5299 arg = CALL_EXPR_ARG (exp, 0);
5300 fmode = TYPE_MODE (TREE_TYPE (arg));
5301 rmode = TYPE_MODE (TREE_TYPE (exp));
5302 fmt = REAL_MODE_FORMAT (fmode);
5304 arg = builtin_save_expr (arg);
5306 /* Expand the argument yielding a RTX expression. */
5307 temp = expand_normal (arg);
5309 /* Check if the back end provides an insn that handles signbit for the
5310 argument's mode. */
5311 icode = optab_handler (signbit_optab, fmode);
5312 if (icode != CODE_FOR_nothing)
5314 rtx last = get_last_insn ();
5315 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5316 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5317 return target;
5318 delete_insns_since (last);
5321 /* For floating point formats without a sign bit, implement signbit
5322 as "ARG < 0.0". */
5323 bitpos = fmt->signbit_ro;
5324 if (bitpos < 0)
5326 /* But we can't do this if the format supports signed zero. */
5327 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5328 return NULL_RTX;
5330 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5331 build_real (TREE_TYPE (arg), dconst0));
5332 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5335 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5337 imode = int_mode_for_mode (fmode);
5338 if (imode == BLKmode)
5339 return NULL_RTX;
5340 temp = gen_lowpart (imode, temp);
5342 else
5344 imode = word_mode;
5345 /* Handle targets with different FP word orders. */
5346 if (FLOAT_WORDS_BIG_ENDIAN)
5347 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5348 else
5349 word = bitpos / BITS_PER_WORD;
5350 temp = operand_subword_force (temp, word, fmode);
5351 bitpos = bitpos % BITS_PER_WORD;
5354 /* Force the intermediate word_mode (or narrower) result into a
5355 register. This avoids attempting to create paradoxical SUBREGs
5356 of floating point modes below. */
5357 temp = force_reg (imode, temp);
5359 /* If the bitpos is within the "result mode" lowpart, the operation
5360 can be implement with a single bitwise AND. Otherwise, we need
5361 a right shift and an AND. */
5363 if (bitpos < GET_MODE_BITSIZE (rmode))
5365 double_int mask = double_int_setbit (double_int_zero, bitpos);
5367 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5368 temp = gen_lowpart (rmode, temp);
5369 temp = expand_binop (rmode, and_optab, temp,
5370 immed_double_int_const (mask, rmode),
5371 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5373 else
5375 /* Perform a logical right shift to place the signbit in the least
5376 significant bit, then truncate the result to the desired mode
5377 and mask just this bit. */
5378 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5379 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5380 temp = gen_lowpart (rmode, temp);
5381 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5382 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5385 return temp;
5388 /* Expand fork or exec calls. TARGET is the desired target of the
5389 call. EXP is the call. FN is the
5390 identificator of the actual function. IGNORE is nonzero if the
5391 value is to be ignored. */
5393 static rtx
5394 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5396 tree id, decl;
5397 tree call;
5399 /* If we are not profiling, just call the function. */
5400 if (!profile_arc_flag)
5401 return NULL_RTX;
5403 /* Otherwise call the wrapper. This should be equivalent for the rest of
5404 compiler, so the code does not diverge, and the wrapper may run the
5405 code necessary for keeping the profiling sane. */
5407 switch (DECL_FUNCTION_CODE (fn))
5409 case BUILT_IN_FORK:
5410 id = get_identifier ("__gcov_fork");
5411 break;
5413 case BUILT_IN_EXECL:
5414 id = get_identifier ("__gcov_execl");
5415 break;
5417 case BUILT_IN_EXECV:
5418 id = get_identifier ("__gcov_execv");
5419 break;
5421 case BUILT_IN_EXECLP:
5422 id = get_identifier ("__gcov_execlp");
5423 break;
5425 case BUILT_IN_EXECLE:
5426 id = get_identifier ("__gcov_execle");
5427 break;
5429 case BUILT_IN_EXECVP:
5430 id = get_identifier ("__gcov_execvp");
5431 break;
5433 case BUILT_IN_EXECVE:
5434 id = get_identifier ("__gcov_execve");
5435 break;
5437 default:
5438 gcc_unreachable ();
5441 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5442 FUNCTION_DECL, id, TREE_TYPE (fn));
5443 DECL_EXTERNAL (decl) = 1;
5444 TREE_PUBLIC (decl) = 1;
5445 DECL_ARTIFICIAL (decl) = 1;
5446 TREE_NOTHROW (decl) = 1;
5447 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5448 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5449 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5450 return expand_call (call, target, ignore);
5455 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5456 the pointer in these functions is void*, the tree optimizers may remove
5457 casts. The mode computed in expand_builtin isn't reliable either, due
5458 to __sync_bool_compare_and_swap.
5460 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5461 group of builtins. This gives us log2 of the mode size. */
5463 static inline enum machine_mode
5464 get_builtin_sync_mode (int fcode_diff)
5466 /* The size is not negotiable, so ask not to get BLKmode in return
5467 if the target indicates that a smaller size would be better. */
5468 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5471 /* Expand the memory expression LOC and return the appropriate memory operand
5472 for the builtin_sync operations. */
5474 static rtx
5475 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5477 rtx addr, mem;
5479 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5480 addr = convert_memory_address (Pmode, addr);
5482 /* Note that we explicitly do not want any alias information for this
5483 memory, so that we kill all other live memories. Otherwise we don't
5484 satisfy the full barrier semantics of the intrinsic. */
5485 mem = validize_mem (gen_rtx_MEM (mode, addr));
5487 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5488 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5489 MEM_VOLATILE_P (mem) = 1;
5491 return mem;
5494 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5495 EXP is the CALL_EXPR. CODE is the rtx code
5496 that corresponds to the arithmetic or logical operation from the name;
5497 an exception here is that NOT actually means NAND. TARGET is an optional
5498 place for us to store the results; AFTER is true if this is the
5499 fetch_and_xxx form. IGNORE is true if we don't actually care about
5500 the result of the operation at all. */
5502 static rtx
5503 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5504 enum rtx_code code, bool after,
5505 rtx target, bool ignore)
5507 rtx val, mem;
5508 enum machine_mode old_mode;
5509 location_t loc = EXPR_LOCATION (exp);
5511 if (code == NOT && warn_sync_nand)
5513 tree fndecl = get_callee_fndecl (exp);
5514 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5516 static bool warned_f_a_n, warned_n_a_f;
5518 switch (fcode)
5520 case BUILT_IN_FETCH_AND_NAND_1:
5521 case BUILT_IN_FETCH_AND_NAND_2:
5522 case BUILT_IN_FETCH_AND_NAND_4:
5523 case BUILT_IN_FETCH_AND_NAND_8:
5524 case BUILT_IN_FETCH_AND_NAND_16:
5526 if (warned_f_a_n)
5527 break;
5529 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5530 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5531 warned_f_a_n = true;
5532 break;
5534 case BUILT_IN_NAND_AND_FETCH_1:
5535 case BUILT_IN_NAND_AND_FETCH_2:
5536 case BUILT_IN_NAND_AND_FETCH_4:
5537 case BUILT_IN_NAND_AND_FETCH_8:
5538 case BUILT_IN_NAND_AND_FETCH_16:
5540 if (warned_n_a_f)
5541 break;
5543 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5544 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5545 warned_n_a_f = true;
5546 break;
5548 default:
5549 gcc_unreachable ();
5553 /* Expand the operands. */
5554 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5556 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5557 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5558 of CONST_INTs, where we know the old_mode only from the call argument. */
5559 old_mode = GET_MODE (val);
5560 if (old_mode == VOIDmode)
5561 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5562 val = convert_modes (mode, old_mode, val, 1);
5564 if (ignore)
5565 return expand_sync_operation (mem, val, code);
5566 else
5567 return expand_sync_fetch_operation (mem, val, code, after, target);
5570 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5571 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5572 true if this is the boolean form. TARGET is a place for us to store the
5573 results; this is NOT optional if IS_BOOL is true. */
5575 static rtx
5576 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5577 bool is_bool, rtx target)
5579 rtx old_val, new_val, mem;
5580 enum machine_mode old_mode;
5582 /* Expand the operands. */
5583 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5586 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5587 mode, EXPAND_NORMAL);
5588 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5589 of CONST_INTs, where we know the old_mode only from the call argument. */
5590 old_mode = GET_MODE (old_val);
5591 if (old_mode == VOIDmode)
5592 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5593 old_val = convert_modes (mode, old_mode, old_val, 1);
5595 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5596 mode, EXPAND_NORMAL);
5597 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5598 of CONST_INTs, where we know the old_mode only from the call argument. */
5599 old_mode = GET_MODE (new_val);
5600 if (old_mode == VOIDmode)
5601 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5602 new_val = convert_modes (mode, old_mode, new_val, 1);
5604 if (is_bool)
5605 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5606 else
5607 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5610 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5611 general form is actually an atomic exchange, and some targets only
5612 support a reduced form with the second argument being a constant 1.
5613 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5614 the results. */
5616 static rtx
5617 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5618 rtx target)
5620 rtx val, mem;
5621 enum machine_mode old_mode;
5623 /* Expand the operands. */
5624 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5625 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5626 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5627 of CONST_INTs, where we know the old_mode only from the call argument. */
5628 old_mode = GET_MODE (val);
5629 if (old_mode == VOIDmode)
5630 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5631 val = convert_modes (mode, old_mode, val, 1);
5633 return expand_sync_lock_test_and_set (mem, val, target);
5636 /* Expand the __sync_synchronize intrinsic. */
5638 static void
5639 expand_builtin_synchronize (void)
5641 gimple x;
5642 VEC (tree, gc) *v_clobbers;
5644 #ifdef HAVE_memory_barrier
5645 if (HAVE_memory_barrier)
5647 emit_insn (gen_memory_barrier ());
5648 return;
5650 #endif
5652 if (synchronize_libfunc != NULL_RTX)
5654 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5655 return;
5658 /* If no explicit memory barrier instruction is available, create an
5659 empty asm stmt with a memory clobber. */
5660 v_clobbers = VEC_alloc (tree, gc, 1);
5661 VEC_quick_push (tree, v_clobbers,
5662 tree_cons (NULL, build_string (6, "memory"), NULL));
5663 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5664 gimple_asm_set_volatile (x, true);
5665 expand_asm_stmt (x);
5668 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5670 static void
5671 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5673 enum insn_code icode;
5674 rtx mem, insn;
5675 rtx val = const0_rtx;
5677 /* Expand the operands. */
5678 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5680 /* If there is an explicit operation in the md file, use it. */
5681 icode = direct_optab_handler (sync_lock_release_optab, mode);
5682 if (icode != CODE_FOR_nothing)
5684 if (!insn_data[icode].operand[1].predicate (val, mode))
5685 val = force_reg (mode, val);
5687 insn = GEN_FCN (icode) (mem, val);
5688 if (insn)
5690 emit_insn (insn);
5691 return;
5695 /* Otherwise we can implement this operation by emitting a barrier
5696 followed by a store of zero. */
5697 expand_builtin_synchronize ();
5698 emit_move_insn (mem, val);
5701 /* Expand an expression EXP that calls a built-in function,
5702 with result going to TARGET if that's convenient
5703 (and in mode MODE if that's convenient).
5704 SUBTARGET may be used as the target for computing one of EXP's operands.
5705 IGNORE is nonzero if the value is to be ignored. */
5708 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5709 int ignore)
5711 tree fndecl = get_callee_fndecl (exp);
5712 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5713 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5715 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5716 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5718 /* When not optimizing, generate calls to library functions for a certain
5719 set of builtins. */
5720 if (!optimize
5721 && !called_as_built_in (fndecl)
5722 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5723 && fcode != BUILT_IN_ALLOCA
5724 && fcode != BUILT_IN_FREE)
5725 return expand_call (exp, target, ignore);
5727 /* The built-in function expanders test for target == const0_rtx
5728 to determine whether the function's result will be ignored. */
5729 if (ignore)
5730 target = const0_rtx;
5732 /* If the result of a pure or const built-in function is ignored, and
5733 none of its arguments are volatile, we can avoid expanding the
5734 built-in call and just evaluate the arguments for side-effects. */
5735 if (target == const0_rtx
5736 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5738 bool volatilep = false;
5739 tree arg;
5740 call_expr_arg_iterator iter;
5742 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5743 if (TREE_THIS_VOLATILE (arg))
5745 volatilep = true;
5746 break;
5749 if (! volatilep)
5751 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5752 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5753 return const0_rtx;
5757 switch (fcode)
5759 CASE_FLT_FN (BUILT_IN_FABS):
5760 target = expand_builtin_fabs (exp, target, subtarget);
5761 if (target)
5762 return target;
5763 break;
5765 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5766 target = expand_builtin_copysign (exp, target, subtarget);
5767 if (target)
5768 return target;
5769 break;
5771 /* Just do a normal library call if we were unable to fold
5772 the values. */
5773 CASE_FLT_FN (BUILT_IN_CABS):
5774 break;
5776 CASE_FLT_FN (BUILT_IN_EXP):
5777 CASE_FLT_FN (BUILT_IN_EXP10):
5778 CASE_FLT_FN (BUILT_IN_POW10):
5779 CASE_FLT_FN (BUILT_IN_EXP2):
5780 CASE_FLT_FN (BUILT_IN_EXPM1):
5781 CASE_FLT_FN (BUILT_IN_LOGB):
5782 CASE_FLT_FN (BUILT_IN_LOG):
5783 CASE_FLT_FN (BUILT_IN_LOG10):
5784 CASE_FLT_FN (BUILT_IN_LOG2):
5785 CASE_FLT_FN (BUILT_IN_LOG1P):
5786 CASE_FLT_FN (BUILT_IN_TAN):
5787 CASE_FLT_FN (BUILT_IN_ASIN):
5788 CASE_FLT_FN (BUILT_IN_ACOS):
5789 CASE_FLT_FN (BUILT_IN_ATAN):
5790 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5791 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5792 because of possible accuracy problems. */
5793 if (! flag_unsafe_math_optimizations)
5794 break;
5795 CASE_FLT_FN (BUILT_IN_SQRT):
5796 CASE_FLT_FN (BUILT_IN_FLOOR):
5797 CASE_FLT_FN (BUILT_IN_CEIL):
5798 CASE_FLT_FN (BUILT_IN_TRUNC):
5799 CASE_FLT_FN (BUILT_IN_ROUND):
5800 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5801 CASE_FLT_FN (BUILT_IN_RINT):
5802 target = expand_builtin_mathfn (exp, target, subtarget);
5803 if (target)
5804 return target;
5805 break;
5807 CASE_FLT_FN (BUILT_IN_ILOGB):
5808 if (! flag_unsafe_math_optimizations)
5809 break;
5810 CASE_FLT_FN (BUILT_IN_ISINF):
5811 CASE_FLT_FN (BUILT_IN_FINITE):
5812 case BUILT_IN_ISFINITE:
5813 case BUILT_IN_ISNORMAL:
5814 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5815 if (target)
5816 return target;
5817 break;
5819 CASE_FLT_FN (BUILT_IN_LCEIL):
5820 CASE_FLT_FN (BUILT_IN_LLCEIL):
5821 CASE_FLT_FN (BUILT_IN_LFLOOR):
5822 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5823 target = expand_builtin_int_roundingfn (exp, target);
5824 if (target)
5825 return target;
5826 break;
5828 CASE_FLT_FN (BUILT_IN_LRINT):
5829 CASE_FLT_FN (BUILT_IN_LLRINT):
5830 CASE_FLT_FN (BUILT_IN_LROUND):
5831 CASE_FLT_FN (BUILT_IN_LLROUND):
5832 target = expand_builtin_int_roundingfn_2 (exp, target);
5833 if (target)
5834 return target;
5835 break;
5837 CASE_FLT_FN (BUILT_IN_POW):
5838 target = expand_builtin_pow (exp, target, subtarget);
5839 if (target)
5840 return target;
5841 break;
5843 CASE_FLT_FN (BUILT_IN_POWI):
5844 target = expand_builtin_powi (exp, target, subtarget);
5845 if (target)
5846 return target;
5847 break;
5849 CASE_FLT_FN (BUILT_IN_ATAN2):
5850 CASE_FLT_FN (BUILT_IN_LDEXP):
5851 CASE_FLT_FN (BUILT_IN_SCALB):
5852 CASE_FLT_FN (BUILT_IN_SCALBN):
5853 CASE_FLT_FN (BUILT_IN_SCALBLN):
5854 if (! flag_unsafe_math_optimizations)
5855 break;
5857 CASE_FLT_FN (BUILT_IN_FMOD):
5858 CASE_FLT_FN (BUILT_IN_REMAINDER):
5859 CASE_FLT_FN (BUILT_IN_DREM):
5860 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5861 if (target)
5862 return target;
5863 break;
5865 CASE_FLT_FN (BUILT_IN_CEXPI):
5866 target = expand_builtin_cexpi (exp, target, subtarget);
5867 gcc_assert (target);
5868 return target;
5870 CASE_FLT_FN (BUILT_IN_SIN):
5871 CASE_FLT_FN (BUILT_IN_COS):
5872 if (! flag_unsafe_math_optimizations)
5873 break;
5874 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5875 if (target)
5876 return target;
5877 break;
5879 CASE_FLT_FN (BUILT_IN_SINCOS):
5880 if (! flag_unsafe_math_optimizations)
5881 break;
5882 target = expand_builtin_sincos (exp);
5883 if (target)
5884 return target;
5885 break;
5887 case BUILT_IN_APPLY_ARGS:
5888 return expand_builtin_apply_args ();
5890 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5891 FUNCTION with a copy of the parameters described by
5892 ARGUMENTS, and ARGSIZE. It returns a block of memory
5893 allocated on the stack into which is stored all the registers
5894 that might possibly be used for returning the result of a
5895 function. ARGUMENTS is the value returned by
5896 __builtin_apply_args. ARGSIZE is the number of bytes of
5897 arguments that must be copied. ??? How should this value be
5898 computed? We'll also need a safe worst case value for varargs
5899 functions. */
5900 case BUILT_IN_APPLY:
5901 if (!validate_arglist (exp, POINTER_TYPE,
5902 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5903 && !validate_arglist (exp, REFERENCE_TYPE,
5904 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5905 return const0_rtx;
5906 else
5908 rtx ops[3];
5910 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5911 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5912 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5914 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5917 /* __builtin_return (RESULT) causes the function to return the
5918 value described by RESULT. RESULT is address of the block of
5919 memory returned by __builtin_apply. */
5920 case BUILT_IN_RETURN:
5921 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5922 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5923 return const0_rtx;
5925 case BUILT_IN_SAVEREGS:
5926 return expand_builtin_saveregs ();
5928 case BUILT_IN_ARGS_INFO:
5929 return expand_builtin_args_info (exp);
5931 case BUILT_IN_VA_ARG_PACK:
5932 /* All valid uses of __builtin_va_arg_pack () are removed during
5933 inlining. */
5934 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5935 return const0_rtx;
5937 case BUILT_IN_VA_ARG_PACK_LEN:
5938 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5939 inlining. */
5940 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5941 return const0_rtx;
5943 /* Return the address of the first anonymous stack arg. */
5944 case BUILT_IN_NEXT_ARG:
5945 if (fold_builtin_next_arg (exp, false))
5946 return const0_rtx;
5947 return expand_builtin_next_arg ();
5949 case BUILT_IN_CLEAR_CACHE:
5950 target = expand_builtin___clear_cache (exp);
5951 if (target)
5952 return target;
5953 break;
5955 case BUILT_IN_CLASSIFY_TYPE:
5956 return expand_builtin_classify_type (exp);
5958 case BUILT_IN_CONSTANT_P:
5959 return const0_rtx;
5961 case BUILT_IN_FRAME_ADDRESS:
5962 case BUILT_IN_RETURN_ADDRESS:
5963 return expand_builtin_frame_address (fndecl, exp);
5965 /* Returns the address of the area where the structure is returned.
5966 0 otherwise. */
5967 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5968 if (call_expr_nargs (exp) != 0
5969 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5970 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5971 return const0_rtx;
5972 else
5973 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5975 case BUILT_IN_ALLOCA:
5976 target = expand_builtin_alloca (exp, target);
5977 if (target)
5978 return target;
5979 break;
5981 case BUILT_IN_STACK_SAVE:
5982 return expand_stack_save ();
5984 case BUILT_IN_STACK_RESTORE:
5985 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5986 return const0_rtx;
5988 case BUILT_IN_BSWAP32:
5989 case BUILT_IN_BSWAP64:
5990 target = expand_builtin_bswap (exp, target, subtarget);
5992 if (target)
5993 return target;
5994 break;
5996 CASE_INT_FN (BUILT_IN_FFS):
5997 case BUILT_IN_FFSIMAX:
5998 target = expand_builtin_unop (target_mode, exp, target,
5999 subtarget, ffs_optab);
6000 if (target)
6001 return target;
6002 break;
6004 CASE_INT_FN (BUILT_IN_CLZ):
6005 case BUILT_IN_CLZIMAX:
6006 target = expand_builtin_unop (target_mode, exp, target,
6007 subtarget, clz_optab);
6008 if (target)
6009 return target;
6010 break;
6012 CASE_INT_FN (BUILT_IN_CTZ):
6013 case BUILT_IN_CTZIMAX:
6014 target = expand_builtin_unop (target_mode, exp, target,
6015 subtarget, ctz_optab);
6016 if (target)
6017 return target;
6018 break;
6020 CASE_INT_FN (BUILT_IN_POPCOUNT):
6021 case BUILT_IN_POPCOUNTIMAX:
6022 target = expand_builtin_unop (target_mode, exp, target,
6023 subtarget, popcount_optab);
6024 if (target)
6025 return target;
6026 break;
6028 CASE_INT_FN (BUILT_IN_PARITY):
6029 case BUILT_IN_PARITYIMAX:
6030 target = expand_builtin_unop (target_mode, exp, target,
6031 subtarget, parity_optab);
6032 if (target)
6033 return target;
6034 break;
6036 case BUILT_IN_STRLEN:
6037 target = expand_builtin_strlen (exp, target, target_mode);
6038 if (target)
6039 return target;
6040 break;
6042 case BUILT_IN_STRCPY:
6043 target = expand_builtin_strcpy (exp, target);
6044 if (target)
6045 return target;
6046 break;
6048 case BUILT_IN_STRNCPY:
6049 target = expand_builtin_strncpy (exp, target);
6050 if (target)
6051 return target;
6052 break;
6054 case BUILT_IN_STPCPY:
6055 target = expand_builtin_stpcpy (exp, target, mode);
6056 if (target)
6057 return target;
6058 break;
6060 case BUILT_IN_MEMCPY:
6061 target = expand_builtin_memcpy (exp, target);
6062 if (target)
6063 return target;
6064 break;
6066 case BUILT_IN_MEMPCPY:
6067 target = expand_builtin_mempcpy (exp, target, mode);
6068 if (target)
6069 return target;
6070 break;
6072 case BUILT_IN_MEMSET:
6073 target = expand_builtin_memset (exp, target, mode);
6074 if (target)
6075 return target;
6076 break;
6078 case BUILT_IN_BZERO:
6079 target = expand_builtin_bzero (exp);
6080 if (target)
6081 return target;
6082 break;
6084 case BUILT_IN_STRCMP:
6085 target = expand_builtin_strcmp (exp, target);
6086 if (target)
6087 return target;
6088 break;
6090 case BUILT_IN_STRNCMP:
6091 target = expand_builtin_strncmp (exp, target, mode);
6092 if (target)
6093 return target;
6094 break;
6096 case BUILT_IN_BCMP:
6097 case BUILT_IN_MEMCMP:
6098 target = expand_builtin_memcmp (exp, target, mode);
6099 if (target)
6100 return target;
6101 break;
6103 case BUILT_IN_SETJMP:
6104 /* This should have been lowered to the builtins below. */
6105 gcc_unreachable ();
6107 case BUILT_IN_SETJMP_SETUP:
6108 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6109 and the receiver label. */
6110 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6112 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6113 VOIDmode, EXPAND_NORMAL);
6114 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6115 rtx label_r = label_rtx (label);
6117 /* This is copied from the handling of non-local gotos. */
6118 expand_builtin_setjmp_setup (buf_addr, label_r);
6119 nonlocal_goto_handler_labels
6120 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6121 nonlocal_goto_handler_labels);
6122 /* ??? Do not let expand_label treat us as such since we would
6123 not want to be both on the list of non-local labels and on
6124 the list of forced labels. */
6125 FORCED_LABEL (label) = 0;
6126 return const0_rtx;
6128 break;
6130 case BUILT_IN_SETJMP_DISPATCHER:
6131 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6132 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6134 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6135 rtx label_r = label_rtx (label);
6137 /* Remove the dispatcher label from the list of non-local labels
6138 since the receiver labels have been added to it above. */
6139 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6140 return const0_rtx;
6142 break;
6144 case BUILT_IN_SETJMP_RECEIVER:
6145 /* __builtin_setjmp_receiver is passed the receiver label. */
6146 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6148 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6149 rtx label_r = label_rtx (label);
6151 expand_builtin_setjmp_receiver (label_r);
6152 return const0_rtx;
6154 break;
6156 /* __builtin_longjmp is passed a pointer to an array of five words.
6157 It's similar to the C library longjmp function but works with
6158 __builtin_setjmp above. */
6159 case BUILT_IN_LONGJMP:
6160 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6162 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6163 VOIDmode, EXPAND_NORMAL);
6164 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6166 if (value != const1_rtx)
6168 error ("%<__builtin_longjmp%> second argument must be 1");
6169 return const0_rtx;
6172 expand_builtin_longjmp (buf_addr, value);
6173 return const0_rtx;
6175 break;
6177 case BUILT_IN_NONLOCAL_GOTO:
6178 target = expand_builtin_nonlocal_goto (exp);
6179 if (target)
6180 return target;
6181 break;
6183 /* This updates the setjmp buffer that is its argument with the value
6184 of the current stack pointer. */
6185 case BUILT_IN_UPDATE_SETJMP_BUF:
6186 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6188 rtx buf_addr
6189 = expand_normal (CALL_EXPR_ARG (exp, 0));
6191 expand_builtin_update_setjmp_buf (buf_addr);
6192 return const0_rtx;
6194 break;
6196 case BUILT_IN_TRAP:
6197 expand_builtin_trap ();
6198 return const0_rtx;
6200 case BUILT_IN_UNREACHABLE:
6201 expand_builtin_unreachable ();
6202 return const0_rtx;
6204 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6205 case BUILT_IN_SIGNBITD32:
6206 case BUILT_IN_SIGNBITD64:
6207 case BUILT_IN_SIGNBITD128:
6208 target = expand_builtin_signbit (exp, target);
6209 if (target)
6210 return target;
6211 break;
6213 /* Various hooks for the DWARF 2 __throw routine. */
6214 case BUILT_IN_UNWIND_INIT:
6215 expand_builtin_unwind_init ();
6216 return const0_rtx;
6217 case BUILT_IN_DWARF_CFA:
6218 return virtual_cfa_rtx;
6219 #ifdef DWARF2_UNWIND_INFO
6220 case BUILT_IN_DWARF_SP_COLUMN:
6221 return expand_builtin_dwarf_sp_column ();
6222 case BUILT_IN_INIT_DWARF_REG_SIZES:
6223 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6224 return const0_rtx;
6225 #endif
6226 case BUILT_IN_FROB_RETURN_ADDR:
6227 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6228 case BUILT_IN_EXTRACT_RETURN_ADDR:
6229 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6230 case BUILT_IN_EH_RETURN:
6231 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6232 CALL_EXPR_ARG (exp, 1));
6233 return const0_rtx;
6234 #ifdef EH_RETURN_DATA_REGNO
6235 case BUILT_IN_EH_RETURN_DATA_REGNO:
6236 return expand_builtin_eh_return_data_regno (exp);
6237 #endif
6238 case BUILT_IN_EXTEND_POINTER:
6239 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6240 case BUILT_IN_EH_POINTER:
6241 return expand_builtin_eh_pointer (exp);
6242 case BUILT_IN_EH_FILTER:
6243 return expand_builtin_eh_filter (exp);
6244 case BUILT_IN_EH_COPY_VALUES:
6245 return expand_builtin_eh_copy_values (exp);
6247 case BUILT_IN_VA_START:
6248 return expand_builtin_va_start (exp);
6249 case BUILT_IN_VA_END:
6250 return expand_builtin_va_end (exp);
6251 case BUILT_IN_VA_COPY:
6252 return expand_builtin_va_copy (exp);
6253 case BUILT_IN_EXPECT:
6254 return expand_builtin_expect (exp, target);
6255 case BUILT_IN_PREFETCH:
6256 expand_builtin_prefetch (exp);
6257 return const0_rtx;
6259 case BUILT_IN_PROFILE_FUNC_ENTER:
6260 return expand_builtin_profile_func (false);
6261 case BUILT_IN_PROFILE_FUNC_EXIT:
6262 return expand_builtin_profile_func (true);
6264 case BUILT_IN_INIT_TRAMPOLINE:
6265 return expand_builtin_init_trampoline (exp);
6266 case BUILT_IN_ADJUST_TRAMPOLINE:
6267 return expand_builtin_adjust_trampoline (exp);
6269 case BUILT_IN_FORK:
6270 case BUILT_IN_EXECL:
6271 case BUILT_IN_EXECV:
6272 case BUILT_IN_EXECLP:
6273 case BUILT_IN_EXECLE:
6274 case BUILT_IN_EXECVP:
6275 case BUILT_IN_EXECVE:
6276 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6277 if (target)
6278 return target;
6279 break;
6281 case BUILT_IN_FETCH_AND_ADD_1:
6282 case BUILT_IN_FETCH_AND_ADD_2:
6283 case BUILT_IN_FETCH_AND_ADD_4:
6284 case BUILT_IN_FETCH_AND_ADD_8:
6285 case BUILT_IN_FETCH_AND_ADD_16:
6286 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6287 target = expand_builtin_sync_operation (mode, exp, PLUS,
6288 false, target, ignore);
6289 if (target)
6290 return target;
6291 break;
6293 case BUILT_IN_FETCH_AND_SUB_1:
6294 case BUILT_IN_FETCH_AND_SUB_2:
6295 case BUILT_IN_FETCH_AND_SUB_4:
6296 case BUILT_IN_FETCH_AND_SUB_8:
6297 case BUILT_IN_FETCH_AND_SUB_16:
6298 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6299 target = expand_builtin_sync_operation (mode, exp, MINUS,
6300 false, target, ignore);
6301 if (target)
6302 return target;
6303 break;
6305 case BUILT_IN_FETCH_AND_OR_1:
6306 case BUILT_IN_FETCH_AND_OR_2:
6307 case BUILT_IN_FETCH_AND_OR_4:
6308 case BUILT_IN_FETCH_AND_OR_8:
6309 case BUILT_IN_FETCH_AND_OR_16:
6310 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6311 target = expand_builtin_sync_operation (mode, exp, IOR,
6312 false, target, ignore);
6313 if (target)
6314 return target;
6315 break;
6317 case BUILT_IN_FETCH_AND_AND_1:
6318 case BUILT_IN_FETCH_AND_AND_2:
6319 case BUILT_IN_FETCH_AND_AND_4:
6320 case BUILT_IN_FETCH_AND_AND_8:
6321 case BUILT_IN_FETCH_AND_AND_16:
6322 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6323 target = expand_builtin_sync_operation (mode, exp, AND,
6324 false, target, ignore);
6325 if (target)
6326 return target;
6327 break;
6329 case BUILT_IN_FETCH_AND_XOR_1:
6330 case BUILT_IN_FETCH_AND_XOR_2:
6331 case BUILT_IN_FETCH_AND_XOR_4:
6332 case BUILT_IN_FETCH_AND_XOR_8:
6333 case BUILT_IN_FETCH_AND_XOR_16:
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6335 target = expand_builtin_sync_operation (mode, exp, XOR,
6336 false, target, ignore);
6337 if (target)
6338 return target;
6339 break;
6341 case BUILT_IN_FETCH_AND_NAND_1:
6342 case BUILT_IN_FETCH_AND_NAND_2:
6343 case BUILT_IN_FETCH_AND_NAND_4:
6344 case BUILT_IN_FETCH_AND_NAND_8:
6345 case BUILT_IN_FETCH_AND_NAND_16:
6346 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6347 target = expand_builtin_sync_operation (mode, exp, NOT,
6348 false, target, ignore);
6349 if (target)
6350 return target;
6351 break;
6353 case BUILT_IN_ADD_AND_FETCH_1:
6354 case BUILT_IN_ADD_AND_FETCH_2:
6355 case BUILT_IN_ADD_AND_FETCH_4:
6356 case BUILT_IN_ADD_AND_FETCH_8:
6357 case BUILT_IN_ADD_AND_FETCH_16:
6358 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6359 target = expand_builtin_sync_operation (mode, exp, PLUS,
6360 true, target, ignore);
6361 if (target)
6362 return target;
6363 break;
6365 case BUILT_IN_SUB_AND_FETCH_1:
6366 case BUILT_IN_SUB_AND_FETCH_2:
6367 case BUILT_IN_SUB_AND_FETCH_4:
6368 case BUILT_IN_SUB_AND_FETCH_8:
6369 case BUILT_IN_SUB_AND_FETCH_16:
6370 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6371 target = expand_builtin_sync_operation (mode, exp, MINUS,
6372 true, target, ignore);
6373 if (target)
6374 return target;
6375 break;
6377 case BUILT_IN_OR_AND_FETCH_1:
6378 case BUILT_IN_OR_AND_FETCH_2:
6379 case BUILT_IN_OR_AND_FETCH_4:
6380 case BUILT_IN_OR_AND_FETCH_8:
6381 case BUILT_IN_OR_AND_FETCH_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6383 target = expand_builtin_sync_operation (mode, exp, IOR,
6384 true, target, ignore);
6385 if (target)
6386 return target;
6387 break;
6389 case BUILT_IN_AND_AND_FETCH_1:
6390 case BUILT_IN_AND_AND_FETCH_2:
6391 case BUILT_IN_AND_AND_FETCH_4:
6392 case BUILT_IN_AND_AND_FETCH_8:
6393 case BUILT_IN_AND_AND_FETCH_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6395 target = expand_builtin_sync_operation (mode, exp, AND,
6396 true, target, ignore);
6397 if (target)
6398 return target;
6399 break;
6401 case BUILT_IN_XOR_AND_FETCH_1:
6402 case BUILT_IN_XOR_AND_FETCH_2:
6403 case BUILT_IN_XOR_AND_FETCH_4:
6404 case BUILT_IN_XOR_AND_FETCH_8:
6405 case BUILT_IN_XOR_AND_FETCH_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6407 target = expand_builtin_sync_operation (mode, exp, XOR,
6408 true, target, ignore);
6409 if (target)
6410 return target;
6411 break;
6413 case BUILT_IN_NAND_AND_FETCH_1:
6414 case BUILT_IN_NAND_AND_FETCH_2:
6415 case BUILT_IN_NAND_AND_FETCH_4:
6416 case BUILT_IN_NAND_AND_FETCH_8:
6417 case BUILT_IN_NAND_AND_FETCH_16:
6418 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6419 target = expand_builtin_sync_operation (mode, exp, NOT,
6420 true, target, ignore);
6421 if (target)
6422 return target;
6423 break;
6425 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6426 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6427 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6428 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6429 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6430 if (mode == VOIDmode)
6431 mode = TYPE_MODE (boolean_type_node);
6432 if (!target || !register_operand (target, mode))
6433 target = gen_reg_rtx (mode);
6435 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6436 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6437 if (target)
6438 return target;
6439 break;
6441 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6442 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6443 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6444 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6445 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6447 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6448 if (target)
6449 return target;
6450 break;
6452 case BUILT_IN_LOCK_TEST_AND_SET_1:
6453 case BUILT_IN_LOCK_TEST_AND_SET_2:
6454 case BUILT_IN_LOCK_TEST_AND_SET_4:
6455 case BUILT_IN_LOCK_TEST_AND_SET_8:
6456 case BUILT_IN_LOCK_TEST_AND_SET_16:
6457 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6458 target = expand_builtin_lock_test_and_set (mode, exp, target);
6459 if (target)
6460 return target;
6461 break;
6463 case BUILT_IN_LOCK_RELEASE_1:
6464 case BUILT_IN_LOCK_RELEASE_2:
6465 case BUILT_IN_LOCK_RELEASE_4:
6466 case BUILT_IN_LOCK_RELEASE_8:
6467 case BUILT_IN_LOCK_RELEASE_16:
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6469 expand_builtin_lock_release (mode, exp);
6470 return const0_rtx;
6472 case BUILT_IN_SYNCHRONIZE:
6473 expand_builtin_synchronize ();
6474 return const0_rtx;
6476 case BUILT_IN_OBJECT_SIZE:
6477 return expand_builtin_object_size (exp);
6479 case BUILT_IN_MEMCPY_CHK:
6480 case BUILT_IN_MEMPCPY_CHK:
6481 case BUILT_IN_MEMMOVE_CHK:
6482 case BUILT_IN_MEMSET_CHK:
6483 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6484 if (target)
6485 return target;
6486 break;
6488 case BUILT_IN_STRCPY_CHK:
6489 case BUILT_IN_STPCPY_CHK:
6490 case BUILT_IN_STRNCPY_CHK:
6491 case BUILT_IN_STRCAT_CHK:
6492 case BUILT_IN_STRNCAT_CHK:
6493 case BUILT_IN_SNPRINTF_CHK:
6494 case BUILT_IN_VSNPRINTF_CHK:
6495 maybe_emit_chk_warning (exp, fcode);
6496 break;
6498 case BUILT_IN_SPRINTF_CHK:
6499 case BUILT_IN_VSPRINTF_CHK:
6500 maybe_emit_sprintf_chk_warning (exp, fcode);
6501 break;
6503 case BUILT_IN_FREE:
6504 maybe_emit_free_warning (exp);
6505 break;
6507 default: /* just do library call, if unknown builtin */
6508 break;
6511 /* The switch statement above can drop through to cause the function
6512 to be called normally. */
6513 return expand_call (exp, target, ignore);
6516 /* Determine whether a tree node represents a call to a built-in
6517 function. If the tree T is a call to a built-in function with
6518 the right number of arguments of the appropriate types, return
6519 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6520 Otherwise the return value is END_BUILTINS. */
6522 enum built_in_function
6523 builtin_mathfn_code (const_tree t)
6525 const_tree fndecl, arg, parmlist;
6526 const_tree argtype, parmtype;
6527 const_call_expr_arg_iterator iter;
6529 if (TREE_CODE (t) != CALL_EXPR
6530 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6531 return END_BUILTINS;
6533 fndecl = get_callee_fndecl (t);
6534 if (fndecl == NULL_TREE
6535 || TREE_CODE (fndecl) != FUNCTION_DECL
6536 || ! DECL_BUILT_IN (fndecl)
6537 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6538 return END_BUILTINS;
6540 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6541 init_const_call_expr_arg_iterator (t, &iter);
6542 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6544 /* If a function doesn't take a variable number of arguments,
6545 the last element in the list will have type `void'. */
6546 parmtype = TREE_VALUE (parmlist);
6547 if (VOID_TYPE_P (parmtype))
6549 if (more_const_call_expr_args_p (&iter))
6550 return END_BUILTINS;
6551 return DECL_FUNCTION_CODE (fndecl);
6554 if (! more_const_call_expr_args_p (&iter))
6555 return END_BUILTINS;
6557 arg = next_const_call_expr_arg (&iter);
6558 argtype = TREE_TYPE (arg);
6560 if (SCALAR_FLOAT_TYPE_P (parmtype))
6562 if (! SCALAR_FLOAT_TYPE_P (argtype))
6563 return END_BUILTINS;
6565 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6567 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6568 return END_BUILTINS;
6570 else if (POINTER_TYPE_P (parmtype))
6572 if (! POINTER_TYPE_P (argtype))
6573 return END_BUILTINS;
6575 else if (INTEGRAL_TYPE_P (parmtype))
6577 if (! INTEGRAL_TYPE_P (argtype))
6578 return END_BUILTINS;
6580 else
6581 return END_BUILTINS;
6584 /* Variable-length argument list. */
6585 return DECL_FUNCTION_CODE (fndecl);
6588 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6589 evaluate to a constant. */
6591 static tree
6592 fold_builtin_constant_p (tree arg)
6594 /* We return 1 for a numeric type that's known to be a constant
6595 value at compile-time or for an aggregate type that's a
6596 literal constant. */
6597 STRIP_NOPS (arg);
6599 /* If we know this is a constant, emit the constant of one. */
6600 if (CONSTANT_CLASS_P (arg)
6601 || (TREE_CODE (arg) == CONSTRUCTOR
6602 && TREE_CONSTANT (arg)))
6603 return integer_one_node;
6604 if (TREE_CODE (arg) == ADDR_EXPR)
6606 tree op = TREE_OPERAND (arg, 0);
6607 if (TREE_CODE (op) == STRING_CST
6608 || (TREE_CODE (op) == ARRAY_REF
6609 && integer_zerop (TREE_OPERAND (op, 1))
6610 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6611 return integer_one_node;
6614 /* If this expression has side effects, show we don't know it to be a
6615 constant. Likewise if it's a pointer or aggregate type since in
6616 those case we only want literals, since those are only optimized
6617 when generating RTL, not later.
6618 And finally, if we are compiling an initializer, not code, we
6619 need to return a definite result now; there's not going to be any
6620 more optimization done. */
6621 if (TREE_SIDE_EFFECTS (arg)
6622 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6623 || POINTER_TYPE_P (TREE_TYPE (arg))
6624 || cfun == 0
6625 || folding_initializer)
6626 return integer_zero_node;
6628 return NULL_TREE;
6631 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6632 return it as a truthvalue. */
6634 static tree
6635 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6637 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6639 fn = built_in_decls[BUILT_IN_EXPECT];
6640 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6641 ret_type = TREE_TYPE (TREE_TYPE (fn));
6642 pred_type = TREE_VALUE (arg_types);
6643 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6645 pred = fold_convert_loc (loc, pred_type, pred);
6646 expected = fold_convert_loc (loc, expected_type, expected);
6647 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6649 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6650 build_int_cst (ret_type, 0));
6653 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6654 NULL_TREE if no simplification is possible. */
6656 static tree
6657 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6659 tree inner, fndecl;
6660 enum tree_code code;
6662 /* If this is a builtin_expect within a builtin_expect keep the
6663 inner one. See through a comparison against a constant. It
6664 might have been added to create a thruthvalue. */
6665 inner = arg0;
6666 if (COMPARISON_CLASS_P (inner)
6667 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6668 inner = TREE_OPERAND (inner, 0);
6670 if (TREE_CODE (inner) == CALL_EXPR
6671 && (fndecl = get_callee_fndecl (inner))
6672 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6673 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6674 return arg0;
6676 /* Distribute the expected value over short-circuiting operators.
6677 See through the cast from truthvalue_type_node to long. */
6678 inner = arg0;
6679 while (TREE_CODE (inner) == NOP_EXPR
6680 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6681 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6682 inner = TREE_OPERAND (inner, 0);
6684 code = TREE_CODE (inner);
6685 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6687 tree op0 = TREE_OPERAND (inner, 0);
6688 tree op1 = TREE_OPERAND (inner, 1);
6690 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6691 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6692 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6694 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6697 /* If the argument isn't invariant then there's nothing else we can do. */
6698 if (!TREE_CONSTANT (arg0))
6699 return NULL_TREE;
6701 /* If we expect that a comparison against the argument will fold to
6702 a constant return the constant. In practice, this means a true
6703 constant or the address of a non-weak symbol. */
6704 inner = arg0;
6705 STRIP_NOPS (inner);
6706 if (TREE_CODE (inner) == ADDR_EXPR)
6710 inner = TREE_OPERAND (inner, 0);
6712 while (TREE_CODE (inner) == COMPONENT_REF
6713 || TREE_CODE (inner) == ARRAY_REF);
6714 if ((TREE_CODE (inner) == VAR_DECL
6715 || TREE_CODE (inner) == FUNCTION_DECL)
6716 && DECL_WEAK (inner))
6717 return NULL_TREE;
6720 /* Otherwise, ARG0 already has the proper type for the return value. */
6721 return arg0;
6724 /* Fold a call to __builtin_classify_type with argument ARG. */
6726 static tree
6727 fold_builtin_classify_type (tree arg)
6729 if (arg == 0)
6730 return build_int_cst (NULL_TREE, no_type_class);
6732 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6735 /* Fold a call to __builtin_strlen with argument ARG. */
6737 static tree
6738 fold_builtin_strlen (location_t loc, tree type, tree arg)
6740 if (!validate_arg (arg, POINTER_TYPE))
6741 return NULL_TREE;
6742 else
6744 tree len = c_strlen (arg, 0);
6746 if (len)
6747 return fold_convert_loc (loc, type, len);
6749 return NULL_TREE;
6753 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6755 static tree
6756 fold_builtin_inf (location_t loc, tree type, int warn)
6758 REAL_VALUE_TYPE real;
6760 /* __builtin_inff is intended to be usable to define INFINITY on all
6761 targets. If an infinity is not available, INFINITY expands "to a
6762 positive constant of type float that overflows at translation
6763 time", footnote "In this case, using INFINITY will violate the
6764 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6765 Thus we pedwarn to ensure this constraint violation is
6766 diagnosed. */
6767 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6768 pedwarn (loc, 0, "target format does not support infinity");
6770 real_inf (&real);
6771 return build_real (type, real);
6774 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6776 static tree
6777 fold_builtin_nan (tree arg, tree type, int quiet)
6779 REAL_VALUE_TYPE real;
6780 const char *str;
6782 if (!validate_arg (arg, POINTER_TYPE))
6783 return NULL_TREE;
6784 str = c_getstr (arg);
6785 if (!str)
6786 return NULL_TREE;
6788 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6789 return NULL_TREE;
6791 return build_real (type, real);
6794 /* Return true if the floating point expression T has an integer value.
6795 We also allow +Inf, -Inf and NaN to be considered integer values. */
6797 static bool
6798 integer_valued_real_p (tree t)
6800 switch (TREE_CODE (t))
6802 case FLOAT_EXPR:
6803 return true;
6805 case ABS_EXPR:
6806 case SAVE_EXPR:
6807 return integer_valued_real_p (TREE_OPERAND (t, 0));
6809 case COMPOUND_EXPR:
6810 case MODIFY_EXPR:
6811 case BIND_EXPR:
6812 return integer_valued_real_p (TREE_OPERAND (t, 1));
6814 case PLUS_EXPR:
6815 case MINUS_EXPR:
6816 case MULT_EXPR:
6817 case MIN_EXPR:
6818 case MAX_EXPR:
6819 return integer_valued_real_p (TREE_OPERAND (t, 0))
6820 && integer_valued_real_p (TREE_OPERAND (t, 1));
6822 case COND_EXPR:
6823 return integer_valued_real_p (TREE_OPERAND (t, 1))
6824 && integer_valued_real_p (TREE_OPERAND (t, 2));
6826 case REAL_CST:
6827 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6829 case NOP_EXPR:
6831 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6832 if (TREE_CODE (type) == INTEGER_TYPE)
6833 return true;
6834 if (TREE_CODE (type) == REAL_TYPE)
6835 return integer_valued_real_p (TREE_OPERAND (t, 0));
6836 break;
6839 case CALL_EXPR:
6840 switch (builtin_mathfn_code (t))
6842 CASE_FLT_FN (BUILT_IN_CEIL):
6843 CASE_FLT_FN (BUILT_IN_FLOOR):
6844 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6845 CASE_FLT_FN (BUILT_IN_RINT):
6846 CASE_FLT_FN (BUILT_IN_ROUND):
6847 CASE_FLT_FN (BUILT_IN_TRUNC):
6848 return true;
6850 CASE_FLT_FN (BUILT_IN_FMIN):
6851 CASE_FLT_FN (BUILT_IN_FMAX):
6852 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6853 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6855 default:
6856 break;
6858 break;
6860 default:
6861 break;
6863 return false;
6866 /* FNDECL is assumed to be a builtin where truncation can be propagated
6867 across (for instance floor((double)f) == (double)floorf (f).
6868 Do the transformation for a call with argument ARG. */
6870 static tree
6871 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6873 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6875 if (!validate_arg (arg, REAL_TYPE))
6876 return NULL_TREE;
6878 /* Integer rounding functions are idempotent. */
6879 if (fcode == builtin_mathfn_code (arg))
6880 return arg;
6882 /* If argument is already integer valued, and we don't need to worry
6883 about setting errno, there's no need to perform rounding. */
6884 if (! flag_errno_math && integer_valued_real_p (arg))
6885 return arg;
6887 if (optimize)
6889 tree arg0 = strip_float_extensions (arg);
6890 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6891 tree newtype = TREE_TYPE (arg0);
6892 tree decl;
6894 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6895 && (decl = mathfn_built_in (newtype, fcode)))
6896 return fold_convert_loc (loc, ftype,
6897 build_call_expr_loc (loc, decl, 1,
6898 fold_convert_loc (loc,
6899 newtype,
6900 arg0)));
6902 return NULL_TREE;
6905 /* FNDECL is assumed to be builtin which can narrow the FP type of
6906 the argument, for instance lround((double)f) -> lroundf (f).
6907 Do the transformation for a call with argument ARG. */
6909 static tree
6910 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6912 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6914 if (!validate_arg (arg, REAL_TYPE))
6915 return NULL_TREE;
6917 /* If argument is already integer valued, and we don't need to worry
6918 about setting errno, there's no need to perform rounding. */
6919 if (! flag_errno_math && integer_valued_real_p (arg))
6920 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6921 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6923 if (optimize)
6925 tree ftype = TREE_TYPE (arg);
6926 tree arg0 = strip_float_extensions (arg);
6927 tree newtype = TREE_TYPE (arg0);
6928 tree decl;
6930 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6931 && (decl = mathfn_built_in (newtype, fcode)))
6932 return build_call_expr_loc (loc, decl, 1,
6933 fold_convert_loc (loc, newtype, arg0));
6936 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6937 sizeof (long long) == sizeof (long). */
6938 if (TYPE_PRECISION (long_long_integer_type_node)
6939 == TYPE_PRECISION (long_integer_type_node))
6941 tree newfn = NULL_TREE;
6942 switch (fcode)
6944 CASE_FLT_FN (BUILT_IN_LLCEIL):
6945 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6946 break;
6948 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6949 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6950 break;
6952 CASE_FLT_FN (BUILT_IN_LLROUND):
6953 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6954 break;
6956 CASE_FLT_FN (BUILT_IN_LLRINT):
6957 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6958 break;
6960 default:
6961 break;
6964 if (newfn)
6966 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6967 return fold_convert_loc (loc,
6968 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6972 return NULL_TREE;
6975 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6976 return type. Return NULL_TREE if no simplification can be made. */
6978 static tree
6979 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6981 tree res;
6983 if (!validate_arg (arg, COMPLEX_TYPE)
6984 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6985 return NULL_TREE;
6987 /* Calculate the result when the argument is a constant. */
6988 if (TREE_CODE (arg) == COMPLEX_CST
6989 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6990 type, mpfr_hypot)))
6991 return res;
6993 if (TREE_CODE (arg) == COMPLEX_EXPR)
6995 tree real = TREE_OPERAND (arg, 0);
6996 tree imag = TREE_OPERAND (arg, 1);
6998 /* If either part is zero, cabs is fabs of the other. */
6999 if (real_zerop (real))
7000 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7001 if (real_zerop (imag))
7002 return fold_build1_loc (loc, ABS_EXPR, type, real);
7004 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7005 if (flag_unsafe_math_optimizations
7006 && operand_equal_p (real, imag, OEP_PURE_SAME))
7008 const REAL_VALUE_TYPE sqrt2_trunc
7009 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7010 STRIP_NOPS (real);
7011 return fold_build2_loc (loc, MULT_EXPR, type,
7012 fold_build1_loc (loc, ABS_EXPR, type, real),
7013 build_real (type, sqrt2_trunc));
7017 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7018 if (TREE_CODE (arg) == NEGATE_EXPR
7019 || TREE_CODE (arg) == CONJ_EXPR)
7020 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7022 /* Don't do this when optimizing for size. */
7023 if (flag_unsafe_math_optimizations
7024 && optimize && optimize_function_for_speed_p (cfun))
7026 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7028 if (sqrtfn != NULL_TREE)
7030 tree rpart, ipart, result;
7032 arg = builtin_save_expr (arg);
7034 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7035 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7037 rpart = builtin_save_expr (rpart);
7038 ipart = builtin_save_expr (ipart);
7040 result = fold_build2_loc (loc, PLUS_EXPR, type,
7041 fold_build2_loc (loc, MULT_EXPR, type,
7042 rpart, rpart),
7043 fold_build2_loc (loc, MULT_EXPR, type,
7044 ipart, ipart));
7046 return build_call_expr_loc (loc, sqrtfn, 1, result);
7050 return NULL_TREE;
7053 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7054 complex tree type of the result. If NEG is true, the imaginary
7055 zero is negative. */
7057 static tree
7058 build_complex_cproj (tree type, bool neg)
7060 REAL_VALUE_TYPE rinf, rzero = dconst0;
7062 real_inf (&rinf);
7063 rzero.sign = neg;
7064 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7065 build_real (TREE_TYPE (type), rzero));
7068 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7069 return type. Return NULL_TREE if no simplification can be made. */
7071 static tree
7072 fold_builtin_cproj (location_t loc, tree arg, tree type)
7074 if (!validate_arg (arg, COMPLEX_TYPE)
7075 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7076 return NULL_TREE;
7078 /* If there are no infinities, return arg. */
7079 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7080 return non_lvalue_loc (loc, arg);
7082 /* Calculate the result when the argument is a constant. */
7083 if (TREE_CODE (arg) == COMPLEX_CST)
7085 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7086 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7088 if (real_isinf (real) || real_isinf (imag))
7089 return build_complex_cproj (type, imag->sign);
7090 else
7091 return arg;
7093 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7095 tree real = TREE_OPERAND (arg, 0);
7096 tree imag = TREE_OPERAND (arg, 1);
7098 STRIP_NOPS (real);
7099 STRIP_NOPS (imag);
7101 /* If the real part is inf and the imag part is known to be
7102 nonnegative, return (inf + 0i). Remember side-effects are
7103 possible in the imag part. */
7104 if (TREE_CODE (real) == REAL_CST
7105 && real_isinf (TREE_REAL_CST_PTR (real))
7106 && tree_expr_nonnegative_p (imag))
7107 return omit_one_operand_loc (loc, type,
7108 build_complex_cproj (type, false),
7109 arg);
7111 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7112 Remember side-effects are possible in the real part. */
7113 if (TREE_CODE (imag) == REAL_CST
7114 && real_isinf (TREE_REAL_CST_PTR (imag)))
7115 return
7116 omit_one_operand_loc (loc, type,
7117 build_complex_cproj (type, TREE_REAL_CST_PTR
7118 (imag)->sign), arg);
7121 return NULL_TREE;
7124 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7125 Return NULL_TREE if no simplification can be made. */
7127 static tree
7128 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7131 enum built_in_function fcode;
7132 tree res;
7134 if (!validate_arg (arg, REAL_TYPE))
7135 return NULL_TREE;
7137 /* Calculate the result when the argument is a constant. */
7138 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7139 return res;
7141 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7142 fcode = builtin_mathfn_code (arg);
7143 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7145 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7146 arg = fold_build2_loc (loc, MULT_EXPR, type,
7147 CALL_EXPR_ARG (arg, 0),
7148 build_real (type, dconsthalf));
7149 return build_call_expr_loc (loc, expfn, 1, arg);
7152 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7153 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7155 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7157 if (powfn)
7159 tree arg0 = CALL_EXPR_ARG (arg, 0);
7160 tree tree_root;
7161 /* The inner root was either sqrt or cbrt. */
7162 /* This was a conditional expression but it triggered a bug
7163 in Sun C 5.5. */
7164 REAL_VALUE_TYPE dconstroot;
7165 if (BUILTIN_SQRT_P (fcode))
7166 dconstroot = dconsthalf;
7167 else
7168 dconstroot = dconst_third ();
7170 /* Adjust for the outer root. */
7171 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7172 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7173 tree_root = build_real (type, dconstroot);
7174 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7178 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7179 if (flag_unsafe_math_optimizations
7180 && (fcode == BUILT_IN_POW
7181 || fcode == BUILT_IN_POWF
7182 || fcode == BUILT_IN_POWL))
7184 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7185 tree arg0 = CALL_EXPR_ARG (arg, 0);
7186 tree arg1 = CALL_EXPR_ARG (arg, 1);
7187 tree narg1;
7188 if (!tree_expr_nonnegative_p (arg0))
7189 arg0 = build1 (ABS_EXPR, type, arg0);
7190 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7191 build_real (type, dconsthalf));
7192 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7195 return NULL_TREE;
7198 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7199 Return NULL_TREE if no simplification can be made. */
7201 static tree
7202 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7204 const enum built_in_function fcode = builtin_mathfn_code (arg);
7205 tree res;
7207 if (!validate_arg (arg, REAL_TYPE))
7208 return NULL_TREE;
7210 /* Calculate the result when the argument is a constant. */
7211 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7212 return res;
7214 if (flag_unsafe_math_optimizations)
7216 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7217 if (BUILTIN_EXPONENT_P (fcode))
7219 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7220 const REAL_VALUE_TYPE third_trunc =
7221 real_value_truncate (TYPE_MODE (type), dconst_third ());
7222 arg = fold_build2_loc (loc, MULT_EXPR, type,
7223 CALL_EXPR_ARG (arg, 0),
7224 build_real (type, third_trunc));
7225 return build_call_expr_loc (loc, expfn, 1, arg);
7228 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7229 if (BUILTIN_SQRT_P (fcode))
7231 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7233 if (powfn)
7235 tree arg0 = CALL_EXPR_ARG (arg, 0);
7236 tree tree_root;
7237 REAL_VALUE_TYPE dconstroot = dconst_third ();
7239 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7240 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7241 tree_root = build_real (type, dconstroot);
7242 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7246 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7247 if (BUILTIN_CBRT_P (fcode))
7249 tree arg0 = CALL_EXPR_ARG (arg, 0);
7250 if (tree_expr_nonnegative_p (arg0))
7252 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7254 if (powfn)
7256 tree tree_root;
7257 REAL_VALUE_TYPE dconstroot;
7259 real_arithmetic (&dconstroot, MULT_EXPR,
7260 dconst_third_ptr (), dconst_third_ptr ());
7261 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7262 tree_root = build_real (type, dconstroot);
7263 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7268 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7269 if (fcode == BUILT_IN_POW
7270 || fcode == BUILT_IN_POWF
7271 || fcode == BUILT_IN_POWL)
7273 tree arg00 = CALL_EXPR_ARG (arg, 0);
7274 tree arg01 = CALL_EXPR_ARG (arg, 1);
7275 if (tree_expr_nonnegative_p (arg00))
7277 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7278 const REAL_VALUE_TYPE dconstroot
7279 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7280 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7281 build_real (type, dconstroot));
7282 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7286 return NULL_TREE;
7289 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7290 TYPE is the type of the return value. Return NULL_TREE if no
7291 simplification can be made. */
7293 static tree
7294 fold_builtin_cos (location_t loc,
7295 tree arg, tree type, tree fndecl)
7297 tree res, narg;
7299 if (!validate_arg (arg, REAL_TYPE))
7300 return NULL_TREE;
7302 /* Calculate the result when the argument is a constant. */
7303 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7304 return res;
7306 /* Optimize cos(-x) into cos (x). */
7307 if ((narg = fold_strip_sign_ops (arg)))
7308 return build_call_expr_loc (loc, fndecl, 1, narg);
7310 return NULL_TREE;
7313 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7314 Return NULL_TREE if no simplification can be made. */
7316 static tree
7317 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7319 if (validate_arg (arg, REAL_TYPE))
7321 tree res, narg;
7323 /* Calculate the result when the argument is a constant. */
7324 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7325 return res;
7327 /* Optimize cosh(-x) into cosh (x). */
7328 if ((narg = fold_strip_sign_ops (arg)))
7329 return build_call_expr_loc (loc, fndecl, 1, narg);
7332 return NULL_TREE;
7335 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7336 argument ARG. TYPE is the type of the return value. Return
7337 NULL_TREE if no simplification can be made. */
7339 static tree
7340 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7341 bool hyper)
7343 if (validate_arg (arg, COMPLEX_TYPE)
7344 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7346 tree tmp;
7348 /* Calculate the result when the argument is a constant. */
7349 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7350 return tmp;
7352 /* Optimize fn(-x) into fn(x). */
7353 if ((tmp = fold_strip_sign_ops (arg)))
7354 return build_call_expr_loc (loc, fndecl, 1, tmp);
7357 return NULL_TREE;
7360 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7361 Return NULL_TREE if no simplification can be made. */
7363 static tree
7364 fold_builtin_tan (tree arg, tree type)
7366 enum built_in_function fcode;
7367 tree res;
7369 if (!validate_arg (arg, REAL_TYPE))
7370 return NULL_TREE;
7372 /* Calculate the result when the argument is a constant. */
7373 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7374 return res;
7376 /* Optimize tan(atan(x)) = x. */
7377 fcode = builtin_mathfn_code (arg);
7378 if (flag_unsafe_math_optimizations
7379 && (fcode == BUILT_IN_ATAN
7380 || fcode == BUILT_IN_ATANF
7381 || fcode == BUILT_IN_ATANL))
7382 return CALL_EXPR_ARG (arg, 0);
7384 return NULL_TREE;
7387 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7388 NULL_TREE if no simplification can be made. */
7390 static tree
7391 fold_builtin_sincos (location_t loc,
7392 tree arg0, tree arg1, tree arg2)
7394 tree type;
7395 tree res, fn, call;
7397 if (!validate_arg (arg0, REAL_TYPE)
7398 || !validate_arg (arg1, POINTER_TYPE)
7399 || !validate_arg (arg2, POINTER_TYPE))
7400 return NULL_TREE;
7402 type = TREE_TYPE (arg0);
7404 /* Calculate the result when the argument is a constant. */
7405 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7406 return res;
7408 /* Canonicalize sincos to cexpi. */
7409 if (!TARGET_C99_FUNCTIONS)
7410 return NULL_TREE;
7411 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7412 if (!fn)
7413 return NULL_TREE;
7415 call = build_call_expr_loc (loc, fn, 1, arg0);
7416 call = builtin_save_expr (call);
7418 return build2 (COMPOUND_EXPR, void_type_node,
7419 build2 (MODIFY_EXPR, void_type_node,
7420 build_fold_indirect_ref_loc (loc, arg1),
7421 build1 (IMAGPART_EXPR, type, call)),
7422 build2 (MODIFY_EXPR, void_type_node,
7423 build_fold_indirect_ref_loc (loc, arg2),
7424 build1 (REALPART_EXPR, type, call)));
7427 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7428 NULL_TREE if no simplification can be made. */
7430 static tree
7431 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7433 tree rtype;
7434 tree realp, imagp, ifn;
7435 tree res;
7437 if (!validate_arg (arg0, COMPLEX_TYPE)
7438 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7439 return NULL_TREE;
7441 /* Calculate the result when the argument is a constant. */
7442 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7443 return res;
7445 rtype = TREE_TYPE (TREE_TYPE (arg0));
7447 /* In case we can figure out the real part of arg0 and it is constant zero
7448 fold to cexpi. */
7449 if (!TARGET_C99_FUNCTIONS)
7450 return NULL_TREE;
7451 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7452 if (!ifn)
7453 return NULL_TREE;
7455 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7456 && real_zerop (realp))
7458 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7459 return build_call_expr_loc (loc, ifn, 1, narg);
7462 /* In case we can easily decompose real and imaginary parts split cexp
7463 to exp (r) * cexpi (i). */
7464 if (flag_unsafe_math_optimizations
7465 && realp)
7467 tree rfn, rcall, icall;
7469 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7470 if (!rfn)
7471 return NULL_TREE;
7473 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7474 if (!imagp)
7475 return NULL_TREE;
7477 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7478 icall = builtin_save_expr (icall);
7479 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7480 rcall = builtin_save_expr (rcall);
7481 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7482 fold_build2_loc (loc, MULT_EXPR, rtype,
7483 rcall,
7484 fold_build1_loc (loc, REALPART_EXPR,
7485 rtype, icall)),
7486 fold_build2_loc (loc, MULT_EXPR, rtype,
7487 rcall,
7488 fold_build1_loc (loc, IMAGPART_EXPR,
7489 rtype, icall)));
7492 return NULL_TREE;
7495 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7496 Return NULL_TREE if no simplification can be made. */
7498 static tree
7499 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7501 if (!validate_arg (arg, REAL_TYPE))
7502 return NULL_TREE;
7504 /* Optimize trunc of constant value. */
7505 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7507 REAL_VALUE_TYPE r, x;
7508 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7510 x = TREE_REAL_CST (arg);
7511 real_trunc (&r, TYPE_MODE (type), &x);
7512 return build_real (type, r);
7515 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7518 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7519 Return NULL_TREE if no simplification can be made. */
7521 static tree
7522 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7524 if (!validate_arg (arg, REAL_TYPE))
7525 return NULL_TREE;
7527 /* Optimize floor of constant value. */
7528 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7530 REAL_VALUE_TYPE x;
7532 x = TREE_REAL_CST (arg);
7533 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7535 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7536 REAL_VALUE_TYPE r;
7538 real_floor (&r, TYPE_MODE (type), &x);
7539 return build_real (type, r);
7543 /* Fold floor (x) where x is nonnegative to trunc (x). */
7544 if (tree_expr_nonnegative_p (arg))
7546 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7547 if (truncfn)
7548 return build_call_expr_loc (loc, truncfn, 1, arg);
7551 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7554 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7555 Return NULL_TREE if no simplification can be made. */
7557 static tree
7558 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7560 if (!validate_arg (arg, REAL_TYPE))
7561 return NULL_TREE;
7563 /* Optimize ceil of constant value. */
7564 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7566 REAL_VALUE_TYPE x;
7568 x = TREE_REAL_CST (arg);
7569 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7571 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7572 REAL_VALUE_TYPE r;
7574 real_ceil (&r, TYPE_MODE (type), &x);
7575 return build_real (type, r);
7579 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7582 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7583 Return NULL_TREE if no simplification can be made. */
7585 static tree
7586 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7588 if (!validate_arg (arg, REAL_TYPE))
7589 return NULL_TREE;
7591 /* Optimize round of constant value. */
7592 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7594 REAL_VALUE_TYPE x;
7596 x = TREE_REAL_CST (arg);
7597 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7599 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7600 REAL_VALUE_TYPE r;
7602 real_round (&r, TYPE_MODE (type), &x);
7603 return build_real (type, r);
7607 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7610 /* Fold function call to builtin lround, lroundf or lroundl (or the
7611 corresponding long long versions) and other rounding functions. ARG
7612 is the argument to the call. Return NULL_TREE if no simplification
7613 can be made. */
7615 static tree
7616 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7618 if (!validate_arg (arg, REAL_TYPE))
7619 return NULL_TREE;
7621 /* Optimize lround of constant value. */
7622 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7624 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7626 if (real_isfinite (&x))
7628 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7629 tree ftype = TREE_TYPE (arg);
7630 double_int val;
7631 REAL_VALUE_TYPE r;
7633 switch (DECL_FUNCTION_CODE (fndecl))
7635 CASE_FLT_FN (BUILT_IN_LFLOOR):
7636 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7637 real_floor (&r, TYPE_MODE (ftype), &x);
7638 break;
7640 CASE_FLT_FN (BUILT_IN_LCEIL):
7641 CASE_FLT_FN (BUILT_IN_LLCEIL):
7642 real_ceil (&r, TYPE_MODE (ftype), &x);
7643 break;
7645 CASE_FLT_FN (BUILT_IN_LROUND):
7646 CASE_FLT_FN (BUILT_IN_LLROUND):
7647 real_round (&r, TYPE_MODE (ftype), &x);
7648 break;
7650 default:
7651 gcc_unreachable ();
7654 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7655 if (double_int_fits_to_tree_p (itype, val))
7656 return double_int_to_tree (itype, val);
7660 switch (DECL_FUNCTION_CODE (fndecl))
7662 CASE_FLT_FN (BUILT_IN_LFLOOR):
7663 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7664 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7665 if (tree_expr_nonnegative_p (arg))
7666 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7667 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7668 break;
7669 default:;
7672 return fold_fixed_mathfn (loc, fndecl, arg);
7675 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7676 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7677 the argument to the call. Return NULL_TREE if no simplification can
7678 be made. */
7680 static tree
7681 fold_builtin_bitop (tree fndecl, tree arg)
7683 if (!validate_arg (arg, INTEGER_TYPE))
7684 return NULL_TREE;
7686 /* Optimize for constant argument. */
7687 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7689 HOST_WIDE_INT hi, width, result;
7690 unsigned HOST_WIDE_INT lo;
7691 tree type;
7693 type = TREE_TYPE (arg);
7694 width = TYPE_PRECISION (type);
7695 lo = TREE_INT_CST_LOW (arg);
7697 /* Clear all the bits that are beyond the type's precision. */
7698 if (width > HOST_BITS_PER_WIDE_INT)
7700 hi = TREE_INT_CST_HIGH (arg);
7701 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7702 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7704 else
7706 hi = 0;
7707 if (width < HOST_BITS_PER_WIDE_INT)
7708 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7711 switch (DECL_FUNCTION_CODE (fndecl))
7713 CASE_INT_FN (BUILT_IN_FFS):
7714 if (lo != 0)
7715 result = exact_log2 (lo & -lo) + 1;
7716 else if (hi != 0)
7717 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7718 else
7719 result = 0;
7720 break;
7722 CASE_INT_FN (BUILT_IN_CLZ):
7723 if (hi != 0)
7724 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7725 else if (lo != 0)
7726 result = width - floor_log2 (lo) - 1;
7727 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7728 result = width;
7729 break;
7731 CASE_INT_FN (BUILT_IN_CTZ):
7732 if (lo != 0)
7733 result = exact_log2 (lo & -lo);
7734 else if (hi != 0)
7735 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7736 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7737 result = width;
7738 break;
7740 CASE_INT_FN (BUILT_IN_POPCOUNT):
7741 result = 0;
7742 while (lo)
7743 result++, lo &= lo - 1;
7744 while (hi)
7745 result++, hi &= hi - 1;
7746 break;
7748 CASE_INT_FN (BUILT_IN_PARITY):
7749 result = 0;
7750 while (lo)
7751 result++, lo &= lo - 1;
7752 while (hi)
7753 result++, hi &= hi - 1;
7754 result &= 1;
7755 break;
7757 default:
7758 gcc_unreachable ();
7761 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7764 return NULL_TREE;
7767 /* Fold function call to builtin_bswap and the long and long long
7768 variants. Return NULL_TREE if no simplification can be made. */
7769 static tree
7770 fold_builtin_bswap (tree fndecl, tree arg)
7772 if (! validate_arg (arg, INTEGER_TYPE))
7773 return NULL_TREE;
7775 /* Optimize constant value. */
7776 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7778 HOST_WIDE_INT hi, width, r_hi = 0;
7779 unsigned HOST_WIDE_INT lo, r_lo = 0;
7780 tree type;
7782 type = TREE_TYPE (arg);
7783 width = TYPE_PRECISION (type);
7784 lo = TREE_INT_CST_LOW (arg);
7785 hi = TREE_INT_CST_HIGH (arg);
7787 switch (DECL_FUNCTION_CODE (fndecl))
7789 case BUILT_IN_BSWAP32:
7790 case BUILT_IN_BSWAP64:
7792 int s;
7794 for (s = 0; s < width; s += 8)
7796 int d = width - s - 8;
7797 unsigned HOST_WIDE_INT byte;
7799 if (s < HOST_BITS_PER_WIDE_INT)
7800 byte = (lo >> s) & 0xff;
7801 else
7802 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7804 if (d < HOST_BITS_PER_WIDE_INT)
7805 r_lo |= byte << d;
7806 else
7807 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7811 break;
7813 default:
7814 gcc_unreachable ();
7817 if (width < HOST_BITS_PER_WIDE_INT)
7818 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7819 else
7820 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7823 return NULL_TREE;
7826 /* A subroutine of fold_builtin to fold the various logarithmic
7827 functions. Return NULL_TREE if no simplification can me made.
7828 FUNC is the corresponding MPFR logarithm function. */
7830 static tree
7831 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7832 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7834 if (validate_arg (arg, REAL_TYPE))
7836 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7837 tree res;
7838 const enum built_in_function fcode = builtin_mathfn_code (arg);
7840 /* Calculate the result when the argument is a constant. */
7841 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7842 return res;
7844 /* Special case, optimize logN(expN(x)) = x. */
7845 if (flag_unsafe_math_optimizations
7846 && ((func == mpfr_log
7847 && (fcode == BUILT_IN_EXP
7848 || fcode == BUILT_IN_EXPF
7849 || fcode == BUILT_IN_EXPL))
7850 || (func == mpfr_log2
7851 && (fcode == BUILT_IN_EXP2
7852 || fcode == BUILT_IN_EXP2F
7853 || fcode == BUILT_IN_EXP2L))
7854 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7855 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7857 /* Optimize logN(func()) for various exponential functions. We
7858 want to determine the value "x" and the power "exponent" in
7859 order to transform logN(x**exponent) into exponent*logN(x). */
7860 if (flag_unsafe_math_optimizations)
7862 tree exponent = 0, x = 0;
7864 switch (fcode)
7866 CASE_FLT_FN (BUILT_IN_EXP):
7867 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7868 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7869 dconst_e ()));
7870 exponent = CALL_EXPR_ARG (arg, 0);
7871 break;
7872 CASE_FLT_FN (BUILT_IN_EXP2):
7873 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7874 x = build_real (type, dconst2);
7875 exponent = CALL_EXPR_ARG (arg, 0);
7876 break;
7877 CASE_FLT_FN (BUILT_IN_EXP10):
7878 CASE_FLT_FN (BUILT_IN_POW10):
7879 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7881 REAL_VALUE_TYPE dconst10;
7882 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7883 x = build_real (type, dconst10);
7885 exponent = CALL_EXPR_ARG (arg, 0);
7886 break;
7887 CASE_FLT_FN (BUILT_IN_SQRT):
7888 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7889 x = CALL_EXPR_ARG (arg, 0);
7890 exponent = build_real (type, dconsthalf);
7891 break;
7892 CASE_FLT_FN (BUILT_IN_CBRT):
7893 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7894 x = CALL_EXPR_ARG (arg, 0);
7895 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7896 dconst_third ()));
7897 break;
7898 CASE_FLT_FN (BUILT_IN_POW):
7899 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7900 x = CALL_EXPR_ARG (arg, 0);
7901 exponent = CALL_EXPR_ARG (arg, 1);
7902 break;
7903 default:
7904 break;
7907 /* Now perform the optimization. */
7908 if (x && exponent)
7910 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7911 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7916 return NULL_TREE;
7919 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7920 NULL_TREE if no simplification can be made. */
7922 static tree
7923 fold_builtin_hypot (location_t loc, tree fndecl,
7924 tree arg0, tree arg1, tree type)
7926 tree res, narg0, narg1;
7928 if (!validate_arg (arg0, REAL_TYPE)
7929 || !validate_arg (arg1, REAL_TYPE))
7930 return NULL_TREE;
7932 /* Calculate the result when the argument is a constant. */
7933 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7934 return res;
7936 /* If either argument to hypot has a negate or abs, strip that off.
7937 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7938 narg0 = fold_strip_sign_ops (arg0);
7939 narg1 = fold_strip_sign_ops (arg1);
7940 if (narg0 || narg1)
7942 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7943 narg1 ? narg1 : arg1);
7946 /* If either argument is zero, hypot is fabs of the other. */
7947 if (real_zerop (arg0))
7948 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7949 else if (real_zerop (arg1))
7950 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7952 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7953 if (flag_unsafe_math_optimizations
7954 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7956 const REAL_VALUE_TYPE sqrt2_trunc
7957 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7958 return fold_build2_loc (loc, MULT_EXPR, type,
7959 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7960 build_real (type, sqrt2_trunc));
7963 return NULL_TREE;
7967 /* Fold a builtin function call to pow, powf, or powl. Return
7968 NULL_TREE if no simplification can be made. */
7969 static tree
7970 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7972 tree res;
7974 if (!validate_arg (arg0, REAL_TYPE)
7975 || !validate_arg (arg1, REAL_TYPE))
7976 return NULL_TREE;
7978 /* Calculate the result when the argument is a constant. */
7979 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7980 return res;
7982 /* Optimize pow(1.0,y) = 1.0. */
7983 if (real_onep (arg0))
7984 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7986 if (TREE_CODE (arg1) == REAL_CST
7987 && !TREE_OVERFLOW (arg1))
7989 REAL_VALUE_TYPE cint;
7990 REAL_VALUE_TYPE c;
7991 HOST_WIDE_INT n;
7993 c = TREE_REAL_CST (arg1);
7995 /* Optimize pow(x,0.0) = 1.0. */
7996 if (REAL_VALUES_EQUAL (c, dconst0))
7997 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7998 arg0);
8000 /* Optimize pow(x,1.0) = x. */
8001 if (REAL_VALUES_EQUAL (c, dconst1))
8002 return arg0;
8004 /* Optimize pow(x,-1.0) = 1.0/x. */
8005 if (REAL_VALUES_EQUAL (c, dconstm1))
8006 return fold_build2_loc (loc, RDIV_EXPR, type,
8007 build_real (type, dconst1), arg0);
8009 /* Optimize pow(x,0.5) = sqrt(x). */
8010 if (flag_unsafe_math_optimizations
8011 && REAL_VALUES_EQUAL (c, dconsthalf))
8013 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8015 if (sqrtfn != NULL_TREE)
8016 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8019 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8020 if (flag_unsafe_math_optimizations)
8022 const REAL_VALUE_TYPE dconstroot
8023 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8025 if (REAL_VALUES_EQUAL (c, dconstroot))
8027 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8028 if (cbrtfn != NULL_TREE)
8029 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8033 /* Check for an integer exponent. */
8034 n = real_to_integer (&c);
8035 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8036 if (real_identical (&c, &cint))
8038 /* Attempt to evaluate pow at compile-time, unless this should
8039 raise an exception. */
8040 if (TREE_CODE (arg0) == REAL_CST
8041 && !TREE_OVERFLOW (arg0)
8042 && (n > 0
8043 || (!flag_trapping_math && !flag_errno_math)
8044 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8046 REAL_VALUE_TYPE x;
8047 bool inexact;
8049 x = TREE_REAL_CST (arg0);
8050 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8051 if (flag_unsafe_math_optimizations || !inexact)
8052 return build_real (type, x);
8055 /* Strip sign ops from even integer powers. */
8056 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8058 tree narg0 = fold_strip_sign_ops (arg0);
8059 if (narg0)
8060 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8065 if (flag_unsafe_math_optimizations)
8067 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8069 /* Optimize pow(expN(x),y) = expN(x*y). */
8070 if (BUILTIN_EXPONENT_P (fcode))
8072 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8073 tree arg = CALL_EXPR_ARG (arg0, 0);
8074 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8075 return build_call_expr_loc (loc, expfn, 1, arg);
8078 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8079 if (BUILTIN_SQRT_P (fcode))
8081 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8082 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8083 build_real (type, dconsthalf));
8084 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8087 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8088 if (BUILTIN_CBRT_P (fcode))
8090 tree arg = CALL_EXPR_ARG (arg0, 0);
8091 if (tree_expr_nonnegative_p (arg))
8093 const REAL_VALUE_TYPE dconstroot
8094 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8095 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8096 build_real (type, dconstroot));
8097 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8101 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8102 if (fcode == BUILT_IN_POW
8103 || fcode == BUILT_IN_POWF
8104 || fcode == BUILT_IN_POWL)
8106 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8107 if (tree_expr_nonnegative_p (arg00))
8109 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8110 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8111 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8116 return NULL_TREE;
8119 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8120 Return NULL_TREE if no simplification can be made. */
8121 static tree
8122 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8123 tree arg0, tree arg1, tree type)
8125 if (!validate_arg (arg0, REAL_TYPE)
8126 || !validate_arg (arg1, INTEGER_TYPE))
8127 return NULL_TREE;
8129 /* Optimize pow(1.0,y) = 1.0. */
8130 if (real_onep (arg0))
8131 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8133 if (host_integerp (arg1, 0))
8135 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8137 /* Evaluate powi at compile-time. */
8138 if (TREE_CODE (arg0) == REAL_CST
8139 && !TREE_OVERFLOW (arg0))
8141 REAL_VALUE_TYPE x;
8142 x = TREE_REAL_CST (arg0);
8143 real_powi (&x, TYPE_MODE (type), &x, c);
8144 return build_real (type, x);
8147 /* Optimize pow(x,0) = 1.0. */
8148 if (c == 0)
8149 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8150 arg0);
8152 /* Optimize pow(x,1) = x. */
8153 if (c == 1)
8154 return arg0;
8156 /* Optimize pow(x,-1) = 1.0/x. */
8157 if (c == -1)
8158 return fold_build2_loc (loc, RDIV_EXPR, type,
8159 build_real (type, dconst1), arg0);
8162 return NULL_TREE;
8165 /* A subroutine of fold_builtin to fold the various exponent
8166 functions. Return NULL_TREE if no simplification can be made.
8167 FUNC is the corresponding MPFR exponent function. */
8169 static tree
8170 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8171 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8173 if (validate_arg (arg, REAL_TYPE))
8175 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8176 tree res;
8178 /* Calculate the result when the argument is a constant. */
8179 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8180 return res;
8182 /* Optimize expN(logN(x)) = x. */
8183 if (flag_unsafe_math_optimizations)
8185 const enum built_in_function fcode = builtin_mathfn_code (arg);
8187 if ((func == mpfr_exp
8188 && (fcode == BUILT_IN_LOG
8189 || fcode == BUILT_IN_LOGF
8190 || fcode == BUILT_IN_LOGL))
8191 || (func == mpfr_exp2
8192 && (fcode == BUILT_IN_LOG2
8193 || fcode == BUILT_IN_LOG2F
8194 || fcode == BUILT_IN_LOG2L))
8195 || (func == mpfr_exp10
8196 && (fcode == BUILT_IN_LOG10
8197 || fcode == BUILT_IN_LOG10F
8198 || fcode == BUILT_IN_LOG10L)))
8199 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8203 return NULL_TREE;
8206 /* Return true if VAR is a VAR_DECL or a component thereof. */
8208 static bool
8209 var_decl_component_p (tree var)
8211 tree inner = var;
8212 while (handled_component_p (inner))
8213 inner = TREE_OPERAND (inner, 0);
8214 return SSA_VAR_P (inner);
8217 /* Fold function call to builtin memset. Return
8218 NULL_TREE if no simplification can be made. */
8220 static tree
8221 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8222 tree type, bool ignore)
8224 tree var, ret, etype;
8225 unsigned HOST_WIDE_INT length, cval;
8227 if (! validate_arg (dest, POINTER_TYPE)
8228 || ! validate_arg (c, INTEGER_TYPE)
8229 || ! validate_arg (len, INTEGER_TYPE))
8230 return NULL_TREE;
8232 if (! host_integerp (len, 1))
8233 return NULL_TREE;
8235 /* If the LEN parameter is zero, return DEST. */
8236 if (integer_zerop (len))
8237 return omit_one_operand_loc (loc, type, dest, c);
8239 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8240 return NULL_TREE;
8242 var = dest;
8243 STRIP_NOPS (var);
8244 if (TREE_CODE (var) != ADDR_EXPR)
8245 return NULL_TREE;
8247 var = TREE_OPERAND (var, 0);
8248 if (TREE_THIS_VOLATILE (var))
8249 return NULL_TREE;
8251 etype = TREE_TYPE (var);
8252 if (TREE_CODE (etype) == ARRAY_TYPE)
8253 etype = TREE_TYPE (etype);
8255 if (!INTEGRAL_TYPE_P (etype)
8256 && !POINTER_TYPE_P (etype))
8257 return NULL_TREE;
8259 if (! var_decl_component_p (var))
8260 return NULL_TREE;
8262 length = tree_low_cst (len, 1);
8263 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8264 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8265 < (int) length)
8266 return NULL_TREE;
8268 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8269 return NULL_TREE;
8271 if (integer_zerop (c))
8272 cval = 0;
8273 else
8275 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8276 return NULL_TREE;
8278 cval = tree_low_cst (c, 1);
8279 cval &= 0xff;
8280 cval |= cval << 8;
8281 cval |= cval << 16;
8282 cval |= (cval << 31) << 1;
8285 ret = build_int_cst_type (etype, cval);
8286 var = build_fold_indirect_ref_loc (loc,
8287 fold_convert_loc (loc,
8288 build_pointer_type (etype),
8289 dest));
8290 ret = build2 (MODIFY_EXPR, etype, var, ret);
8291 if (ignore)
8292 return ret;
8294 return omit_one_operand_loc (loc, type, dest, ret);
8297 /* Fold function call to builtin memset. Return
8298 NULL_TREE if no simplification can be made. */
8300 static tree
8301 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8303 if (! validate_arg (dest, POINTER_TYPE)
8304 || ! validate_arg (size, INTEGER_TYPE))
8305 return NULL_TREE;
8307 if (!ignore)
8308 return NULL_TREE;
8310 /* New argument list transforming bzero(ptr x, int y) to
8311 memset(ptr x, int 0, size_t y). This is done this way
8312 so that if it isn't expanded inline, we fallback to
8313 calling bzero instead of memset. */
8315 return fold_builtin_memset (loc, dest, integer_zero_node,
8316 fold_convert_loc (loc, sizetype, size),
8317 void_type_node, ignore);
8320 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8321 NULL_TREE if no simplification can be made.
8322 If ENDP is 0, return DEST (like memcpy).
8323 If ENDP is 1, return DEST+LEN (like mempcpy).
8324 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8325 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8326 (memmove). */
8328 static tree
8329 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8330 tree len, tree type, bool ignore, int endp)
8332 tree destvar, srcvar, expr;
8334 if (! validate_arg (dest, POINTER_TYPE)
8335 || ! validate_arg (src, POINTER_TYPE)
8336 || ! validate_arg (len, INTEGER_TYPE))
8337 return NULL_TREE;
8339 /* If the LEN parameter is zero, return DEST. */
8340 if (integer_zerop (len))
8341 return omit_one_operand_loc (loc, type, dest, src);
8343 /* If SRC and DEST are the same (and not volatile), return
8344 DEST{,+LEN,+LEN-1}. */
8345 if (operand_equal_p (src, dest, 0))
8346 expr = len;
8347 else
8349 tree srctype, desttype;
8350 int src_align, dest_align;
8351 tree off0;
8353 if (endp == 3)
8355 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8356 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8358 /* Both DEST and SRC must be pointer types.
8359 ??? This is what old code did. Is the testing for pointer types
8360 really mandatory?
8362 If either SRC is readonly or length is 1, we can use memcpy. */
8363 if (!dest_align || !src_align)
8364 return NULL_TREE;
8365 if (readonly_data_expr (src)
8366 || (host_integerp (len, 1)
8367 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8368 >= tree_low_cst (len, 1))))
8370 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8371 if (!fn)
8372 return NULL_TREE;
8373 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8376 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8377 if (TREE_CODE (src) == ADDR_EXPR
8378 && TREE_CODE (dest) == ADDR_EXPR)
8380 tree src_base, dest_base, fn;
8381 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8382 HOST_WIDE_INT size = -1;
8383 HOST_WIDE_INT maxsize = -1;
8385 srcvar = TREE_OPERAND (src, 0);
8386 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8387 &size, &maxsize);
8388 destvar = TREE_OPERAND (dest, 0);
8389 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8390 &size, &maxsize);
8391 if (host_integerp (len, 1))
8392 maxsize = tree_low_cst (len, 1);
8393 else
8394 maxsize = -1;
8395 src_offset /= BITS_PER_UNIT;
8396 dest_offset /= BITS_PER_UNIT;
8397 if (SSA_VAR_P (src_base)
8398 && SSA_VAR_P (dest_base))
8400 if (operand_equal_p (src_base, dest_base, 0)
8401 && ranges_overlap_p (src_offset, maxsize,
8402 dest_offset, maxsize))
8403 return NULL_TREE;
8405 else if (TREE_CODE (src_base) == MEM_REF
8406 && TREE_CODE (dest_base) == MEM_REF)
8408 double_int off;
8409 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8410 TREE_OPERAND (dest_base, 0), 0))
8411 return NULL_TREE;
8412 off = double_int_add (mem_ref_offset (src_base),
8413 shwi_to_double_int (src_offset));
8414 if (!double_int_fits_in_shwi_p (off))
8415 return NULL_TREE;
8416 src_offset = off.low;
8417 off = double_int_add (mem_ref_offset (dest_base),
8418 shwi_to_double_int (dest_offset));
8419 if (!double_int_fits_in_shwi_p (off))
8420 return NULL_TREE;
8421 dest_offset = off.low;
8422 if (ranges_overlap_p (src_offset, maxsize,
8423 dest_offset, maxsize))
8424 return NULL_TREE;
8426 else
8427 return NULL_TREE;
8429 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8430 if (!fn)
8431 return NULL_TREE;
8432 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8434 return NULL_TREE;
8437 if (!host_integerp (len, 0))
8438 return NULL_TREE;
8439 /* FIXME:
8440 This logic lose for arguments like (type *)malloc (sizeof (type)),
8441 since we strip the casts of up to VOID return value from malloc.
8442 Perhaps we ought to inherit type from non-VOID argument here? */
8443 STRIP_NOPS (src);
8444 STRIP_NOPS (dest);
8445 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8446 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8448 tree tem = TREE_OPERAND (src, 0);
8449 STRIP_NOPS (tem);
8450 if (tem != TREE_OPERAND (src, 0))
8451 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8453 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8455 tree tem = TREE_OPERAND (dest, 0);
8456 STRIP_NOPS (tem);
8457 if (tem != TREE_OPERAND (dest, 0))
8458 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8460 srctype = TREE_TYPE (TREE_TYPE (src));
8461 if (srctype
8462 && TREE_CODE (srctype) == ARRAY_TYPE
8463 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8465 srctype = TREE_TYPE (srctype);
8466 STRIP_NOPS (src);
8467 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8469 desttype = TREE_TYPE (TREE_TYPE (dest));
8470 if (desttype
8471 && TREE_CODE (desttype) == ARRAY_TYPE
8472 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8474 desttype = TREE_TYPE (desttype);
8475 STRIP_NOPS (dest);
8476 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8478 if (!srctype || !desttype
8479 || TREE_ADDRESSABLE (srctype)
8480 || TREE_ADDRESSABLE (desttype)
8481 || !TYPE_SIZE_UNIT (srctype)
8482 || !TYPE_SIZE_UNIT (desttype)
8483 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8484 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8485 return NULL_TREE;
8487 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8488 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8489 if (dest_align < (int) TYPE_ALIGN (desttype)
8490 || src_align < (int) TYPE_ALIGN (srctype))
8491 return NULL_TREE;
8493 if (!ignore)
8494 dest = builtin_save_expr (dest);
8496 /* Build accesses at offset zero with a ref-all character type. */
8497 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8498 ptr_mode, true), 0);
8500 destvar = dest;
8501 STRIP_NOPS (destvar);
8502 if (TREE_CODE (destvar) == ADDR_EXPR
8503 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8504 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8505 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8506 else
8507 destvar = NULL_TREE;
8509 srcvar = src;
8510 STRIP_NOPS (srcvar);
8511 if (TREE_CODE (srcvar) == ADDR_EXPR
8512 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8513 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8514 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8515 srcvar, off0);
8516 else
8517 srcvar = NULL_TREE;
8519 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8520 return NULL_TREE;
8522 if (srcvar == NULL_TREE)
8524 STRIP_NOPS (src);
8525 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8527 else if (destvar == NULL_TREE)
8529 STRIP_NOPS (dest);
8530 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8533 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8536 if (ignore)
8537 return expr;
8539 if (endp == 0 || endp == 3)
8540 return omit_one_operand_loc (loc, type, dest, expr);
8542 if (expr == len)
8543 expr = NULL_TREE;
8545 if (endp == 2)
8546 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8547 ssize_int (1));
8549 len = fold_convert_loc (loc, sizetype, len);
8550 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8551 dest = fold_convert_loc (loc, type, dest);
8552 if (expr)
8553 dest = omit_one_operand_loc (loc, type, dest, expr);
8554 return dest;
8557 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8558 If LEN is not NULL, it represents the length of the string to be
8559 copied. Return NULL_TREE if no simplification can be made. */
8561 tree
8562 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8564 tree fn;
8566 if (!validate_arg (dest, POINTER_TYPE)
8567 || !validate_arg (src, POINTER_TYPE))
8568 return NULL_TREE;
8570 /* If SRC and DEST are the same (and not volatile), return DEST. */
8571 if (operand_equal_p (src, dest, 0))
8572 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8574 if (optimize_function_for_size_p (cfun))
8575 return NULL_TREE;
8577 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8578 if (!fn)
8579 return NULL_TREE;
8581 if (!len)
8583 len = c_strlen (src, 1);
8584 if (! len || TREE_SIDE_EFFECTS (len))
8585 return NULL_TREE;
8588 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8589 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8590 build_call_expr_loc (loc, fn, 3, dest, src, len));
8593 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8594 Return NULL_TREE if no simplification can be made. */
8596 static tree
8597 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8599 tree fn, len, lenp1, call, type;
8601 if (!validate_arg (dest, POINTER_TYPE)
8602 || !validate_arg (src, POINTER_TYPE))
8603 return NULL_TREE;
8605 len = c_strlen (src, 1);
8606 if (!len
8607 || TREE_CODE (len) != INTEGER_CST)
8608 return NULL_TREE;
8610 if (optimize_function_for_size_p (cfun)
8611 /* If length is zero it's small enough. */
8612 && !integer_zerop (len))
8613 return NULL_TREE;
8615 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8616 if (!fn)
8617 return NULL_TREE;
8619 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8620 /* We use dest twice in building our expression. Save it from
8621 multiple expansions. */
8622 dest = builtin_save_expr (dest);
8623 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8625 type = TREE_TYPE (TREE_TYPE (fndecl));
8626 len = fold_convert_loc (loc, sizetype, len);
8627 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8628 dest = fold_convert_loc (loc, type, dest);
8629 dest = omit_one_operand_loc (loc, type, dest, call);
8630 return dest;
8633 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8634 If SLEN is not NULL, it represents the length of the source string.
8635 Return NULL_TREE if no simplification can be made. */
8637 tree
8638 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8639 tree src, tree len, tree slen)
8641 tree fn;
8643 if (!validate_arg (dest, POINTER_TYPE)
8644 || !validate_arg (src, POINTER_TYPE)
8645 || !validate_arg (len, INTEGER_TYPE))
8646 return NULL_TREE;
8648 /* If the LEN parameter is zero, return DEST. */
8649 if (integer_zerop (len))
8650 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8652 /* We can't compare slen with len as constants below if len is not a
8653 constant. */
8654 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8655 return NULL_TREE;
8657 if (!slen)
8658 slen = c_strlen (src, 1);
8660 /* Now, we must be passed a constant src ptr parameter. */
8661 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8662 return NULL_TREE;
8664 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8666 /* We do not support simplification of this case, though we do
8667 support it when expanding trees into RTL. */
8668 /* FIXME: generate a call to __builtin_memset. */
8669 if (tree_int_cst_lt (slen, len))
8670 return NULL_TREE;
8672 /* OK transform into builtin memcpy. */
8673 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8674 if (!fn)
8675 return NULL_TREE;
8676 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8677 build_call_expr_loc (loc, fn, 3, dest, src, len));
8680 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8681 arguments to the call, and TYPE is its return type.
8682 Return NULL_TREE if no simplification can be made. */
8684 static tree
8685 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8687 if (!validate_arg (arg1, POINTER_TYPE)
8688 || !validate_arg (arg2, INTEGER_TYPE)
8689 || !validate_arg (len, INTEGER_TYPE))
8690 return NULL_TREE;
8691 else
8693 const char *p1;
8695 if (TREE_CODE (arg2) != INTEGER_CST
8696 || !host_integerp (len, 1))
8697 return NULL_TREE;
8699 p1 = c_getstr (arg1);
8700 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8702 char c;
8703 const char *r;
8704 tree tem;
8706 if (target_char_cast (arg2, &c))
8707 return NULL_TREE;
8709 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8711 if (r == NULL)
8712 return build_int_cst (TREE_TYPE (arg1), 0);
8714 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8715 size_int (r - p1));
8716 return fold_convert_loc (loc, type, tem);
8718 return NULL_TREE;
8722 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8723 Return NULL_TREE if no simplification can be made. */
8725 static tree
8726 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8728 const char *p1, *p2;
8730 if (!validate_arg (arg1, POINTER_TYPE)
8731 || !validate_arg (arg2, POINTER_TYPE)
8732 || !validate_arg (len, INTEGER_TYPE))
8733 return NULL_TREE;
8735 /* If the LEN parameter is zero, return zero. */
8736 if (integer_zerop (len))
8737 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8738 arg1, arg2);
8740 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8741 if (operand_equal_p (arg1, arg2, 0))
8742 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8744 p1 = c_getstr (arg1);
8745 p2 = c_getstr (arg2);
8747 /* If all arguments are constant, and the value of len is not greater
8748 than the lengths of arg1 and arg2, evaluate at compile-time. */
8749 if (host_integerp (len, 1) && p1 && p2
8750 && compare_tree_int (len, strlen (p1) + 1) <= 0
8751 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8753 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8755 if (r > 0)
8756 return integer_one_node;
8757 else if (r < 0)
8758 return integer_minus_one_node;
8759 else
8760 return integer_zero_node;
8763 /* If len parameter is one, return an expression corresponding to
8764 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8765 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8767 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8768 tree cst_uchar_ptr_node
8769 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8771 tree ind1
8772 = fold_convert_loc (loc, integer_type_node,
8773 build1 (INDIRECT_REF, cst_uchar_node,
8774 fold_convert_loc (loc,
8775 cst_uchar_ptr_node,
8776 arg1)));
8777 tree ind2
8778 = fold_convert_loc (loc, integer_type_node,
8779 build1 (INDIRECT_REF, cst_uchar_node,
8780 fold_convert_loc (loc,
8781 cst_uchar_ptr_node,
8782 arg2)));
8783 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8786 return NULL_TREE;
8789 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8790 Return NULL_TREE if no simplification can be made. */
8792 static tree
8793 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8795 const char *p1, *p2;
8797 if (!validate_arg (arg1, POINTER_TYPE)
8798 || !validate_arg (arg2, POINTER_TYPE))
8799 return NULL_TREE;
8801 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8802 if (operand_equal_p (arg1, arg2, 0))
8803 return integer_zero_node;
8805 p1 = c_getstr (arg1);
8806 p2 = c_getstr (arg2);
8808 if (p1 && p2)
8810 const int i = strcmp (p1, p2);
8811 if (i < 0)
8812 return integer_minus_one_node;
8813 else if (i > 0)
8814 return integer_one_node;
8815 else
8816 return integer_zero_node;
8819 /* If the second arg is "", return *(const unsigned char*)arg1. */
8820 if (p2 && *p2 == '\0')
8822 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8823 tree cst_uchar_ptr_node
8824 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8826 return fold_convert_loc (loc, integer_type_node,
8827 build1 (INDIRECT_REF, cst_uchar_node,
8828 fold_convert_loc (loc,
8829 cst_uchar_ptr_node,
8830 arg1)));
8833 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8834 if (p1 && *p1 == '\0')
8836 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8837 tree cst_uchar_ptr_node
8838 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8840 tree temp
8841 = fold_convert_loc (loc, integer_type_node,
8842 build1 (INDIRECT_REF, cst_uchar_node,
8843 fold_convert_loc (loc,
8844 cst_uchar_ptr_node,
8845 arg2)));
8846 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8849 return NULL_TREE;
8852 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8853 Return NULL_TREE if no simplification can be made. */
8855 static tree
8856 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8858 const char *p1, *p2;
8860 if (!validate_arg (arg1, POINTER_TYPE)
8861 || !validate_arg (arg2, POINTER_TYPE)
8862 || !validate_arg (len, INTEGER_TYPE))
8863 return NULL_TREE;
8865 /* If the LEN parameter is zero, return zero. */
8866 if (integer_zerop (len))
8867 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8868 arg1, arg2);
8870 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8871 if (operand_equal_p (arg1, arg2, 0))
8872 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8874 p1 = c_getstr (arg1);
8875 p2 = c_getstr (arg2);
8877 if (host_integerp (len, 1) && p1 && p2)
8879 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8880 if (i > 0)
8881 return integer_one_node;
8882 else if (i < 0)
8883 return integer_minus_one_node;
8884 else
8885 return integer_zero_node;
8888 /* If the second arg is "", and the length is greater than zero,
8889 return *(const unsigned char*)arg1. */
8890 if (p2 && *p2 == '\0'
8891 && TREE_CODE (len) == INTEGER_CST
8892 && tree_int_cst_sgn (len) == 1)
8894 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8895 tree cst_uchar_ptr_node
8896 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8898 return fold_convert_loc (loc, integer_type_node,
8899 build1 (INDIRECT_REF, cst_uchar_node,
8900 fold_convert_loc (loc,
8901 cst_uchar_ptr_node,
8902 arg1)));
8905 /* If the first arg is "", and the length is greater than zero,
8906 return -*(const unsigned char*)arg2. */
8907 if (p1 && *p1 == '\0'
8908 && TREE_CODE (len) == INTEGER_CST
8909 && tree_int_cst_sgn (len) == 1)
8911 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8912 tree cst_uchar_ptr_node
8913 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8915 tree temp = fold_convert_loc (loc, integer_type_node,
8916 build1 (INDIRECT_REF, cst_uchar_node,
8917 fold_convert_loc (loc,
8918 cst_uchar_ptr_node,
8919 arg2)));
8920 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8923 /* If len parameter is one, return an expression corresponding to
8924 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8925 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8927 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8928 tree cst_uchar_ptr_node
8929 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8931 tree ind1 = fold_convert_loc (loc, integer_type_node,
8932 build1 (INDIRECT_REF, cst_uchar_node,
8933 fold_convert_loc (loc,
8934 cst_uchar_ptr_node,
8935 arg1)));
8936 tree ind2 = fold_convert_loc (loc, integer_type_node,
8937 build1 (INDIRECT_REF, cst_uchar_node,
8938 fold_convert_loc (loc,
8939 cst_uchar_ptr_node,
8940 arg2)));
8941 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8944 return NULL_TREE;
8947 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8948 ARG. Return NULL_TREE if no simplification can be made. */
8950 static tree
8951 fold_builtin_signbit (location_t loc, tree arg, tree type)
8953 tree temp;
8955 if (!validate_arg (arg, REAL_TYPE))
8956 return NULL_TREE;
8958 /* If ARG is a compile-time constant, determine the result. */
8959 if (TREE_CODE (arg) == REAL_CST
8960 && !TREE_OVERFLOW (arg))
8962 REAL_VALUE_TYPE c;
8964 c = TREE_REAL_CST (arg);
8965 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8966 return fold_convert_loc (loc, type, temp);
8969 /* If ARG is non-negative, the result is always zero. */
8970 if (tree_expr_nonnegative_p (arg))
8971 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8973 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8974 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8975 return fold_build2_loc (loc, LT_EXPR, type, arg,
8976 build_real (TREE_TYPE (arg), dconst0));
8978 return NULL_TREE;
8981 /* Fold function call to builtin copysign, copysignf or copysignl with
8982 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8983 be made. */
8985 static tree
8986 fold_builtin_copysign (location_t loc, tree fndecl,
8987 tree arg1, tree arg2, tree type)
8989 tree tem;
8991 if (!validate_arg (arg1, REAL_TYPE)
8992 || !validate_arg (arg2, REAL_TYPE))
8993 return NULL_TREE;
8995 /* copysign(X,X) is X. */
8996 if (operand_equal_p (arg1, arg2, 0))
8997 return fold_convert_loc (loc, type, arg1);
8999 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9000 if (TREE_CODE (arg1) == REAL_CST
9001 && TREE_CODE (arg2) == REAL_CST
9002 && !TREE_OVERFLOW (arg1)
9003 && !TREE_OVERFLOW (arg2))
9005 REAL_VALUE_TYPE c1, c2;
9007 c1 = TREE_REAL_CST (arg1);
9008 c2 = TREE_REAL_CST (arg2);
9009 /* c1.sign := c2.sign. */
9010 real_copysign (&c1, &c2);
9011 return build_real (type, c1);
9014 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9015 Remember to evaluate Y for side-effects. */
9016 if (tree_expr_nonnegative_p (arg2))
9017 return omit_one_operand_loc (loc, type,
9018 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9019 arg2);
9021 /* Strip sign changing operations for the first argument. */
9022 tem = fold_strip_sign_ops (arg1);
9023 if (tem)
9024 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9026 return NULL_TREE;
9029 /* Fold a call to builtin isascii with argument ARG. */
9031 static tree
9032 fold_builtin_isascii (location_t loc, tree arg)
9034 if (!validate_arg (arg, INTEGER_TYPE))
9035 return NULL_TREE;
9036 else
9038 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9039 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9040 build_int_cst (NULL_TREE,
9041 ~ (unsigned HOST_WIDE_INT) 0x7f));
9042 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9043 arg, integer_zero_node);
9047 /* Fold a call to builtin toascii with argument ARG. */
9049 static tree
9050 fold_builtin_toascii (location_t loc, tree arg)
9052 if (!validate_arg (arg, INTEGER_TYPE))
9053 return NULL_TREE;
9055 /* Transform toascii(c) -> (c & 0x7f). */
9056 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9057 build_int_cst (NULL_TREE, 0x7f));
9060 /* Fold a call to builtin isdigit with argument ARG. */
9062 static tree
9063 fold_builtin_isdigit (location_t loc, tree arg)
9065 if (!validate_arg (arg, INTEGER_TYPE))
9066 return NULL_TREE;
9067 else
9069 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9070 /* According to the C standard, isdigit is unaffected by locale.
9071 However, it definitely is affected by the target character set. */
9072 unsigned HOST_WIDE_INT target_digit0
9073 = lang_hooks.to_target_charset ('0');
9075 if (target_digit0 == 0)
9076 return NULL_TREE;
9078 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9079 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9080 build_int_cst (unsigned_type_node, target_digit0));
9081 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9082 build_int_cst (unsigned_type_node, 9));
9086 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9088 static tree
9089 fold_builtin_fabs (location_t loc, tree arg, tree type)
9091 if (!validate_arg (arg, REAL_TYPE))
9092 return NULL_TREE;
9094 arg = fold_convert_loc (loc, type, arg);
9095 if (TREE_CODE (arg) == REAL_CST)
9096 return fold_abs_const (arg, type);
9097 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9100 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9102 static tree
9103 fold_builtin_abs (location_t loc, tree arg, tree type)
9105 if (!validate_arg (arg, INTEGER_TYPE))
9106 return NULL_TREE;
9108 arg = fold_convert_loc (loc, type, arg);
9109 if (TREE_CODE (arg) == INTEGER_CST)
9110 return fold_abs_const (arg, type);
9111 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9114 /* Fold a call to builtin fmin or fmax. */
9116 static tree
9117 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9118 tree type, bool max)
9120 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9122 /* Calculate the result when the argument is a constant. */
9123 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9125 if (res)
9126 return res;
9128 /* If either argument is NaN, return the other one. Avoid the
9129 transformation if we get (and honor) a signalling NaN. Using
9130 omit_one_operand() ensures we create a non-lvalue. */
9131 if (TREE_CODE (arg0) == REAL_CST
9132 && real_isnan (&TREE_REAL_CST (arg0))
9133 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9134 || ! TREE_REAL_CST (arg0).signalling))
9135 return omit_one_operand_loc (loc, type, arg1, arg0);
9136 if (TREE_CODE (arg1) == REAL_CST
9137 && real_isnan (&TREE_REAL_CST (arg1))
9138 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9139 || ! TREE_REAL_CST (arg1).signalling))
9140 return omit_one_operand_loc (loc, type, arg0, arg1);
9142 /* Transform fmin/fmax(x,x) -> x. */
9143 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9144 return omit_one_operand_loc (loc, type, arg0, arg1);
9146 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9147 functions to return the numeric arg if the other one is NaN.
9148 These tree codes don't honor that, so only transform if
9149 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9150 handled, so we don't have to worry about it either. */
9151 if (flag_finite_math_only)
9152 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9153 fold_convert_loc (loc, type, arg0),
9154 fold_convert_loc (loc, type, arg1));
9156 return NULL_TREE;
9159 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9161 static tree
9162 fold_builtin_carg (location_t loc, tree arg, tree type)
9164 if (validate_arg (arg, COMPLEX_TYPE)
9165 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9167 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9169 if (atan2_fn)
9171 tree new_arg = builtin_save_expr (arg);
9172 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9173 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9174 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9178 return NULL_TREE;
9181 /* Fold a call to builtin logb/ilogb. */
9183 static tree
9184 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9186 if (! validate_arg (arg, REAL_TYPE))
9187 return NULL_TREE;
9189 STRIP_NOPS (arg);
9191 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9193 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9195 switch (value->cl)
9197 case rvc_nan:
9198 case rvc_inf:
9199 /* If arg is Inf or NaN and we're logb, return it. */
9200 if (TREE_CODE (rettype) == REAL_TYPE)
9201 return fold_convert_loc (loc, rettype, arg);
9202 /* Fall through... */
9203 case rvc_zero:
9204 /* Zero may set errno and/or raise an exception for logb, also
9205 for ilogb we don't know FP_ILOGB0. */
9206 return NULL_TREE;
9207 case rvc_normal:
9208 /* For normal numbers, proceed iff radix == 2. In GCC,
9209 normalized significands are in the range [0.5, 1.0). We
9210 want the exponent as if they were [1.0, 2.0) so get the
9211 exponent and subtract 1. */
9212 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9213 return fold_convert_loc (loc, rettype,
9214 build_int_cst (NULL_TREE,
9215 REAL_EXP (value)-1));
9216 break;
9220 return NULL_TREE;
9223 /* Fold a call to builtin significand, if radix == 2. */
9225 static tree
9226 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9228 if (! validate_arg (arg, REAL_TYPE))
9229 return NULL_TREE;
9231 STRIP_NOPS (arg);
9233 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9235 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9237 switch (value->cl)
9239 case rvc_zero:
9240 case rvc_nan:
9241 case rvc_inf:
9242 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9243 return fold_convert_loc (loc, rettype, arg);
9244 case rvc_normal:
9245 /* For normal numbers, proceed iff radix == 2. */
9246 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9248 REAL_VALUE_TYPE result = *value;
9249 /* In GCC, normalized significands are in the range [0.5,
9250 1.0). We want them to be [1.0, 2.0) so set the
9251 exponent to 1. */
9252 SET_REAL_EXP (&result, 1);
9253 return build_real (rettype, result);
9255 break;
9259 return NULL_TREE;
9262 /* Fold a call to builtin frexp, we can assume the base is 2. */
9264 static tree
9265 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9267 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9268 return NULL_TREE;
9270 STRIP_NOPS (arg0);
9272 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9273 return NULL_TREE;
9275 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9277 /* Proceed if a valid pointer type was passed in. */
9278 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9280 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9281 tree frac, exp;
9283 switch (value->cl)
9285 case rvc_zero:
9286 /* For +-0, return (*exp = 0, +-0). */
9287 exp = integer_zero_node;
9288 frac = arg0;
9289 break;
9290 case rvc_nan:
9291 case rvc_inf:
9292 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9293 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9294 case rvc_normal:
9296 /* Since the frexp function always expects base 2, and in
9297 GCC normalized significands are already in the range
9298 [0.5, 1.0), we have exactly what frexp wants. */
9299 REAL_VALUE_TYPE frac_rvt = *value;
9300 SET_REAL_EXP (&frac_rvt, 0);
9301 frac = build_real (rettype, frac_rvt);
9302 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9304 break;
9305 default:
9306 gcc_unreachable ();
9309 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9310 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9311 TREE_SIDE_EFFECTS (arg1) = 1;
9312 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9315 return NULL_TREE;
9318 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9319 then we can assume the base is two. If it's false, then we have to
9320 check the mode of the TYPE parameter in certain cases. */
9322 static tree
9323 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9324 tree type, bool ldexp)
9326 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9328 STRIP_NOPS (arg0);
9329 STRIP_NOPS (arg1);
9331 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9332 if (real_zerop (arg0) || integer_zerop (arg1)
9333 || (TREE_CODE (arg0) == REAL_CST
9334 && !real_isfinite (&TREE_REAL_CST (arg0))))
9335 return omit_one_operand_loc (loc, type, arg0, arg1);
9337 /* If both arguments are constant, then try to evaluate it. */
9338 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9339 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9340 && host_integerp (arg1, 0))
9342 /* Bound the maximum adjustment to twice the range of the
9343 mode's valid exponents. Use abs to ensure the range is
9344 positive as a sanity check. */
9345 const long max_exp_adj = 2 *
9346 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9347 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9349 /* Get the user-requested adjustment. */
9350 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9352 /* The requested adjustment must be inside this range. This
9353 is a preliminary cap to avoid things like overflow, we
9354 may still fail to compute the result for other reasons. */
9355 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9357 REAL_VALUE_TYPE initial_result;
9359 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9361 /* Ensure we didn't overflow. */
9362 if (! real_isinf (&initial_result))
9364 const REAL_VALUE_TYPE trunc_result
9365 = real_value_truncate (TYPE_MODE (type), initial_result);
9367 /* Only proceed if the target mode can hold the
9368 resulting value. */
9369 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9370 return build_real (type, trunc_result);
9376 return NULL_TREE;
9379 /* Fold a call to builtin modf. */
9381 static tree
9382 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9384 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9385 return NULL_TREE;
9387 STRIP_NOPS (arg0);
9389 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9390 return NULL_TREE;
9392 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9394 /* Proceed if a valid pointer type was passed in. */
9395 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9397 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9398 REAL_VALUE_TYPE trunc, frac;
9400 switch (value->cl)
9402 case rvc_nan:
9403 case rvc_zero:
9404 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9405 trunc = frac = *value;
9406 break;
9407 case rvc_inf:
9408 /* For +-Inf, return (*arg1 = arg0, +-0). */
9409 frac = dconst0;
9410 frac.sign = value->sign;
9411 trunc = *value;
9412 break;
9413 case rvc_normal:
9414 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9415 real_trunc (&trunc, VOIDmode, value);
9416 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9417 /* If the original number was negative and already
9418 integral, then the fractional part is -0.0. */
9419 if (value->sign && frac.cl == rvc_zero)
9420 frac.sign = value->sign;
9421 break;
9424 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9425 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9426 build_real (rettype, trunc));
9427 TREE_SIDE_EFFECTS (arg1) = 1;
9428 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9429 build_real (rettype, frac));
9432 return NULL_TREE;
9435 /* Given a location LOC, an interclass builtin function decl FNDECL
9436 and its single argument ARG, return an folded expression computing
9437 the same, or NULL_TREE if we either couldn't or didn't want to fold
9438 (the latter happen if there's an RTL instruction available). */
9440 static tree
9441 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9443 enum machine_mode mode;
9445 if (!validate_arg (arg, REAL_TYPE))
9446 return NULL_TREE;
9448 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9449 return NULL_TREE;
9451 mode = TYPE_MODE (TREE_TYPE (arg));
9453 /* If there is no optab, try generic code. */
9454 switch (DECL_FUNCTION_CODE (fndecl))
9456 tree result;
9458 CASE_FLT_FN (BUILT_IN_ISINF):
9460 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9461 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9462 tree const type = TREE_TYPE (arg);
9463 REAL_VALUE_TYPE r;
9464 char buf[128];
9466 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9467 real_from_string (&r, buf);
9468 result = build_call_expr (isgr_fn, 2,
9469 fold_build1_loc (loc, ABS_EXPR, type, arg),
9470 build_real (type, r));
9471 return result;
9473 CASE_FLT_FN (BUILT_IN_FINITE):
9474 case BUILT_IN_ISFINITE:
9476 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9477 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9478 tree const type = TREE_TYPE (arg);
9479 REAL_VALUE_TYPE r;
9480 char buf[128];
9482 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9483 real_from_string (&r, buf);
9484 result = build_call_expr (isle_fn, 2,
9485 fold_build1_loc (loc, ABS_EXPR, type, arg),
9486 build_real (type, r));
9487 /*result = fold_build2_loc (loc, UNGT_EXPR,
9488 TREE_TYPE (TREE_TYPE (fndecl)),
9489 fold_build1_loc (loc, ABS_EXPR, type, arg),
9490 build_real (type, r));
9491 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9492 TREE_TYPE (TREE_TYPE (fndecl)),
9493 result);*/
9494 return result;
9496 case BUILT_IN_ISNORMAL:
9498 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9499 islessequal(fabs(x),DBL_MAX). */
9500 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9501 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9502 tree const type = TREE_TYPE (arg);
9503 REAL_VALUE_TYPE rmax, rmin;
9504 char buf[128];
9506 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9507 real_from_string (&rmax, buf);
9508 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9509 real_from_string (&rmin, buf);
9510 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9511 result = build_call_expr (isle_fn, 2, arg,
9512 build_real (type, rmax));
9513 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9514 build_call_expr (isge_fn, 2, arg,
9515 build_real (type, rmin)));
9516 return result;
9518 default:
9519 break;
9522 return NULL_TREE;
9525 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9526 ARG is the argument for the call. */
9528 static tree
9529 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9531 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9532 REAL_VALUE_TYPE r;
9534 if (!validate_arg (arg, REAL_TYPE))
9535 return NULL_TREE;
9537 switch (builtin_index)
9539 case BUILT_IN_ISINF:
9540 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9541 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9543 if (TREE_CODE (arg) == REAL_CST)
9545 r = TREE_REAL_CST (arg);
9546 if (real_isinf (&r))
9547 return real_compare (GT_EXPR, &r, &dconst0)
9548 ? integer_one_node : integer_minus_one_node;
9549 else
9550 return integer_zero_node;
9553 return NULL_TREE;
9555 case BUILT_IN_ISINF_SIGN:
9557 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9558 /* In a boolean context, GCC will fold the inner COND_EXPR to
9559 1. So e.g. "if (isinf_sign(x))" would be folded to just
9560 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9561 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9562 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9563 tree tmp = NULL_TREE;
9565 arg = builtin_save_expr (arg);
9567 if (signbit_fn && isinf_fn)
9569 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9570 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9572 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9573 signbit_call, integer_zero_node);
9574 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9575 isinf_call, integer_zero_node);
9577 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9578 integer_minus_one_node, integer_one_node);
9579 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9580 isinf_call, tmp,
9581 integer_zero_node);
9584 return tmp;
9587 case BUILT_IN_ISFINITE:
9588 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9589 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9590 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9592 if (TREE_CODE (arg) == REAL_CST)
9594 r = TREE_REAL_CST (arg);
9595 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9598 return NULL_TREE;
9600 case BUILT_IN_ISNAN:
9601 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9602 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9604 if (TREE_CODE (arg) == REAL_CST)
9606 r = TREE_REAL_CST (arg);
9607 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9610 arg = builtin_save_expr (arg);
9611 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9613 default:
9614 gcc_unreachable ();
9618 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9619 This builtin will generate code to return the appropriate floating
9620 point classification depending on the value of the floating point
9621 number passed in. The possible return values must be supplied as
9622 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9623 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9624 one floating point argument which is "type generic". */
9626 static tree
9627 fold_builtin_fpclassify (location_t loc, tree exp)
9629 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9630 arg, type, res, tmp;
9631 enum machine_mode mode;
9632 REAL_VALUE_TYPE r;
9633 char buf[128];
9635 /* Verify the required arguments in the original call. */
9636 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9637 INTEGER_TYPE, INTEGER_TYPE,
9638 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9639 return NULL_TREE;
9641 fp_nan = CALL_EXPR_ARG (exp, 0);
9642 fp_infinite = CALL_EXPR_ARG (exp, 1);
9643 fp_normal = CALL_EXPR_ARG (exp, 2);
9644 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9645 fp_zero = CALL_EXPR_ARG (exp, 4);
9646 arg = CALL_EXPR_ARG (exp, 5);
9647 type = TREE_TYPE (arg);
9648 mode = TYPE_MODE (type);
9649 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9651 /* fpclassify(x) ->
9652 isnan(x) ? FP_NAN :
9653 (fabs(x) == Inf ? FP_INFINITE :
9654 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9655 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9657 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9658 build_real (type, dconst0));
9659 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9660 tmp, fp_zero, fp_subnormal);
9662 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9663 real_from_string (&r, buf);
9664 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9665 arg, build_real (type, r));
9666 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9668 if (HONOR_INFINITIES (mode))
9670 real_inf (&r);
9671 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9672 build_real (type, r));
9673 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9674 fp_infinite, res);
9677 if (HONOR_NANS (mode))
9679 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9680 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9683 return res;
9686 /* Fold a call to an unordered comparison function such as
9687 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9688 being called and ARG0 and ARG1 are the arguments for the call.
9689 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9690 the opposite of the desired result. UNORDERED_CODE is used
9691 for modes that can hold NaNs and ORDERED_CODE is used for
9692 the rest. */
9694 static tree
9695 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9696 enum tree_code unordered_code,
9697 enum tree_code ordered_code)
9699 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9700 enum tree_code code;
9701 tree type0, type1;
9702 enum tree_code code0, code1;
9703 tree cmp_type = NULL_TREE;
9705 type0 = TREE_TYPE (arg0);
9706 type1 = TREE_TYPE (arg1);
9708 code0 = TREE_CODE (type0);
9709 code1 = TREE_CODE (type1);
9711 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9712 /* Choose the wider of two real types. */
9713 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9714 ? type0 : type1;
9715 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9716 cmp_type = type0;
9717 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9718 cmp_type = type1;
9720 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9721 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9723 if (unordered_code == UNORDERED_EXPR)
9725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9726 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9727 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9730 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9731 : ordered_code;
9732 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9733 fold_build2_loc (loc, code, type, arg0, arg1));
9736 /* Fold a call to built-in function FNDECL with 0 arguments.
9737 IGNORE is true if the result of the function call is ignored. This
9738 function returns NULL_TREE if no simplification was possible. */
9740 static tree
9741 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9743 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9744 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9745 switch (fcode)
9747 CASE_FLT_FN (BUILT_IN_INF):
9748 case BUILT_IN_INFD32:
9749 case BUILT_IN_INFD64:
9750 case BUILT_IN_INFD128:
9751 return fold_builtin_inf (loc, type, true);
9753 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9754 return fold_builtin_inf (loc, type, false);
9756 case BUILT_IN_CLASSIFY_TYPE:
9757 return fold_builtin_classify_type (NULL_TREE);
9759 default:
9760 break;
9762 return NULL_TREE;
9765 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9766 IGNORE is true if the result of the function call is ignored. This
9767 function returns NULL_TREE if no simplification was possible. */
9769 static tree
9770 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9772 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9773 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9774 switch (fcode)
9776 case BUILT_IN_CONSTANT_P:
9778 tree val = fold_builtin_constant_p (arg0);
9780 /* Gimplification will pull the CALL_EXPR for the builtin out of
9781 an if condition. When not optimizing, we'll not CSE it back.
9782 To avoid link error types of regressions, return false now. */
9783 if (!val && !optimize)
9784 val = integer_zero_node;
9786 return val;
9789 case BUILT_IN_CLASSIFY_TYPE:
9790 return fold_builtin_classify_type (arg0);
9792 case BUILT_IN_STRLEN:
9793 return fold_builtin_strlen (loc, type, arg0);
9795 CASE_FLT_FN (BUILT_IN_FABS):
9796 return fold_builtin_fabs (loc, arg0, type);
9798 case BUILT_IN_ABS:
9799 case BUILT_IN_LABS:
9800 case BUILT_IN_LLABS:
9801 case BUILT_IN_IMAXABS:
9802 return fold_builtin_abs (loc, arg0, type);
9804 CASE_FLT_FN (BUILT_IN_CONJ):
9805 if (validate_arg (arg0, COMPLEX_TYPE)
9806 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9807 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9808 break;
9810 CASE_FLT_FN (BUILT_IN_CREAL):
9811 if (validate_arg (arg0, COMPLEX_TYPE)
9812 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9813 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9814 break;
9816 CASE_FLT_FN (BUILT_IN_CIMAG):
9817 if (validate_arg (arg0, COMPLEX_TYPE)
9818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9819 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9820 break;
9822 CASE_FLT_FN (BUILT_IN_CCOS):
9823 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9825 CASE_FLT_FN (BUILT_IN_CCOSH):
9826 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9828 CASE_FLT_FN (BUILT_IN_CPROJ):
9829 return fold_builtin_cproj(loc, arg0, type);
9831 CASE_FLT_FN (BUILT_IN_CSIN):
9832 if (validate_arg (arg0, COMPLEX_TYPE)
9833 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9834 return do_mpc_arg1 (arg0, type, mpc_sin);
9835 break;
9837 CASE_FLT_FN (BUILT_IN_CSINH):
9838 if (validate_arg (arg0, COMPLEX_TYPE)
9839 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9840 return do_mpc_arg1 (arg0, type, mpc_sinh);
9841 break;
9843 CASE_FLT_FN (BUILT_IN_CTAN):
9844 if (validate_arg (arg0, COMPLEX_TYPE)
9845 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9846 return do_mpc_arg1 (arg0, type, mpc_tan);
9847 break;
9849 CASE_FLT_FN (BUILT_IN_CTANH):
9850 if (validate_arg (arg0, COMPLEX_TYPE)
9851 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9852 return do_mpc_arg1 (arg0, type, mpc_tanh);
9853 break;
9855 CASE_FLT_FN (BUILT_IN_CLOG):
9856 if (validate_arg (arg0, COMPLEX_TYPE)
9857 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9858 return do_mpc_arg1 (arg0, type, mpc_log);
9859 break;
9861 CASE_FLT_FN (BUILT_IN_CSQRT):
9862 if (validate_arg (arg0, COMPLEX_TYPE)
9863 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9864 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9865 break;
9867 CASE_FLT_FN (BUILT_IN_CASIN):
9868 if (validate_arg (arg0, COMPLEX_TYPE)
9869 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9870 return do_mpc_arg1 (arg0, type, mpc_asin);
9871 break;
9873 CASE_FLT_FN (BUILT_IN_CACOS):
9874 if (validate_arg (arg0, COMPLEX_TYPE)
9875 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9876 return do_mpc_arg1 (arg0, type, mpc_acos);
9877 break;
9879 CASE_FLT_FN (BUILT_IN_CATAN):
9880 if (validate_arg (arg0, COMPLEX_TYPE)
9881 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9882 return do_mpc_arg1 (arg0, type, mpc_atan);
9883 break;
9885 CASE_FLT_FN (BUILT_IN_CASINH):
9886 if (validate_arg (arg0, COMPLEX_TYPE)
9887 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9888 return do_mpc_arg1 (arg0, type, mpc_asinh);
9889 break;
9891 CASE_FLT_FN (BUILT_IN_CACOSH):
9892 if (validate_arg (arg0, COMPLEX_TYPE)
9893 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9894 return do_mpc_arg1 (arg0, type, mpc_acosh);
9895 break;
9897 CASE_FLT_FN (BUILT_IN_CATANH):
9898 if (validate_arg (arg0, COMPLEX_TYPE)
9899 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9900 return do_mpc_arg1 (arg0, type, mpc_atanh);
9901 break;
9903 CASE_FLT_FN (BUILT_IN_CABS):
9904 return fold_builtin_cabs (loc, arg0, type, fndecl);
9906 CASE_FLT_FN (BUILT_IN_CARG):
9907 return fold_builtin_carg (loc, arg0, type);
9909 CASE_FLT_FN (BUILT_IN_SQRT):
9910 return fold_builtin_sqrt (loc, arg0, type);
9912 CASE_FLT_FN (BUILT_IN_CBRT):
9913 return fold_builtin_cbrt (loc, arg0, type);
9915 CASE_FLT_FN (BUILT_IN_ASIN):
9916 if (validate_arg (arg0, REAL_TYPE))
9917 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9918 &dconstm1, &dconst1, true);
9919 break;
9921 CASE_FLT_FN (BUILT_IN_ACOS):
9922 if (validate_arg (arg0, REAL_TYPE))
9923 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9924 &dconstm1, &dconst1, true);
9925 break;
9927 CASE_FLT_FN (BUILT_IN_ATAN):
9928 if (validate_arg (arg0, REAL_TYPE))
9929 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9930 break;
9932 CASE_FLT_FN (BUILT_IN_ASINH):
9933 if (validate_arg (arg0, REAL_TYPE))
9934 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9935 break;
9937 CASE_FLT_FN (BUILT_IN_ACOSH):
9938 if (validate_arg (arg0, REAL_TYPE))
9939 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9940 &dconst1, NULL, true);
9941 break;
9943 CASE_FLT_FN (BUILT_IN_ATANH):
9944 if (validate_arg (arg0, REAL_TYPE))
9945 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9946 &dconstm1, &dconst1, false);
9947 break;
9949 CASE_FLT_FN (BUILT_IN_SIN):
9950 if (validate_arg (arg0, REAL_TYPE))
9951 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9952 break;
9954 CASE_FLT_FN (BUILT_IN_COS):
9955 return fold_builtin_cos (loc, arg0, type, fndecl);
9957 CASE_FLT_FN (BUILT_IN_TAN):
9958 return fold_builtin_tan (arg0, type);
9960 CASE_FLT_FN (BUILT_IN_CEXP):
9961 return fold_builtin_cexp (loc, arg0, type);
9963 CASE_FLT_FN (BUILT_IN_CEXPI):
9964 if (validate_arg (arg0, REAL_TYPE))
9965 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9966 break;
9968 CASE_FLT_FN (BUILT_IN_SINH):
9969 if (validate_arg (arg0, REAL_TYPE))
9970 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9971 break;
9973 CASE_FLT_FN (BUILT_IN_COSH):
9974 return fold_builtin_cosh (loc, arg0, type, fndecl);
9976 CASE_FLT_FN (BUILT_IN_TANH):
9977 if (validate_arg (arg0, REAL_TYPE))
9978 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9979 break;
9981 CASE_FLT_FN (BUILT_IN_ERF):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9984 break;
9986 CASE_FLT_FN (BUILT_IN_ERFC):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9989 break;
9991 CASE_FLT_FN (BUILT_IN_TGAMMA):
9992 if (validate_arg (arg0, REAL_TYPE))
9993 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9994 break;
9996 CASE_FLT_FN (BUILT_IN_EXP):
9997 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9999 CASE_FLT_FN (BUILT_IN_EXP2):
10000 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10002 CASE_FLT_FN (BUILT_IN_EXP10):
10003 CASE_FLT_FN (BUILT_IN_POW10):
10004 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10006 CASE_FLT_FN (BUILT_IN_EXPM1):
10007 if (validate_arg (arg0, REAL_TYPE))
10008 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10009 break;
10011 CASE_FLT_FN (BUILT_IN_LOG):
10012 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10014 CASE_FLT_FN (BUILT_IN_LOG2):
10015 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10017 CASE_FLT_FN (BUILT_IN_LOG10):
10018 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10020 CASE_FLT_FN (BUILT_IN_LOG1P):
10021 if (validate_arg (arg0, REAL_TYPE))
10022 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10023 &dconstm1, NULL, false);
10024 break;
10026 CASE_FLT_FN (BUILT_IN_J0):
10027 if (validate_arg (arg0, REAL_TYPE))
10028 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10029 NULL, NULL, 0);
10030 break;
10032 CASE_FLT_FN (BUILT_IN_J1):
10033 if (validate_arg (arg0, REAL_TYPE))
10034 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10035 NULL, NULL, 0);
10036 break;
10038 CASE_FLT_FN (BUILT_IN_Y0):
10039 if (validate_arg (arg0, REAL_TYPE))
10040 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10041 &dconst0, NULL, false);
10042 break;
10044 CASE_FLT_FN (BUILT_IN_Y1):
10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10047 &dconst0, NULL, false);
10048 break;
10050 CASE_FLT_FN (BUILT_IN_NAN):
10051 case BUILT_IN_NAND32:
10052 case BUILT_IN_NAND64:
10053 case BUILT_IN_NAND128:
10054 return fold_builtin_nan (arg0, type, true);
10056 CASE_FLT_FN (BUILT_IN_NANS):
10057 return fold_builtin_nan (arg0, type, false);
10059 CASE_FLT_FN (BUILT_IN_FLOOR):
10060 return fold_builtin_floor (loc, fndecl, arg0);
10062 CASE_FLT_FN (BUILT_IN_CEIL):
10063 return fold_builtin_ceil (loc, fndecl, arg0);
10065 CASE_FLT_FN (BUILT_IN_TRUNC):
10066 return fold_builtin_trunc (loc, fndecl, arg0);
10068 CASE_FLT_FN (BUILT_IN_ROUND):
10069 return fold_builtin_round (loc, fndecl, arg0);
10071 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10072 CASE_FLT_FN (BUILT_IN_RINT):
10073 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10075 CASE_FLT_FN (BUILT_IN_LCEIL):
10076 CASE_FLT_FN (BUILT_IN_LLCEIL):
10077 CASE_FLT_FN (BUILT_IN_LFLOOR):
10078 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10079 CASE_FLT_FN (BUILT_IN_LROUND):
10080 CASE_FLT_FN (BUILT_IN_LLROUND):
10081 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10083 CASE_FLT_FN (BUILT_IN_LRINT):
10084 CASE_FLT_FN (BUILT_IN_LLRINT):
10085 return fold_fixed_mathfn (loc, fndecl, arg0);
10087 case BUILT_IN_BSWAP32:
10088 case BUILT_IN_BSWAP64:
10089 return fold_builtin_bswap (fndecl, arg0);
10091 CASE_INT_FN (BUILT_IN_FFS):
10092 CASE_INT_FN (BUILT_IN_CLZ):
10093 CASE_INT_FN (BUILT_IN_CTZ):
10094 CASE_INT_FN (BUILT_IN_POPCOUNT):
10095 CASE_INT_FN (BUILT_IN_PARITY):
10096 return fold_builtin_bitop (fndecl, arg0);
10098 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10099 return fold_builtin_signbit (loc, arg0, type);
10101 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10102 return fold_builtin_significand (loc, arg0, type);
10104 CASE_FLT_FN (BUILT_IN_ILOGB):
10105 CASE_FLT_FN (BUILT_IN_LOGB):
10106 return fold_builtin_logb (loc, arg0, type);
10108 case BUILT_IN_ISASCII:
10109 return fold_builtin_isascii (loc, arg0);
10111 case BUILT_IN_TOASCII:
10112 return fold_builtin_toascii (loc, arg0);
10114 case BUILT_IN_ISDIGIT:
10115 return fold_builtin_isdigit (loc, arg0);
10117 CASE_FLT_FN (BUILT_IN_FINITE):
10118 case BUILT_IN_FINITED32:
10119 case BUILT_IN_FINITED64:
10120 case BUILT_IN_FINITED128:
10121 case BUILT_IN_ISFINITE:
10123 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10124 if (ret)
10125 return ret;
10126 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10129 CASE_FLT_FN (BUILT_IN_ISINF):
10130 case BUILT_IN_ISINFD32:
10131 case BUILT_IN_ISINFD64:
10132 case BUILT_IN_ISINFD128:
10134 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10135 if (ret)
10136 return ret;
10137 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10140 case BUILT_IN_ISNORMAL:
10141 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10143 case BUILT_IN_ISINF_SIGN:
10144 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10146 CASE_FLT_FN (BUILT_IN_ISNAN):
10147 case BUILT_IN_ISNAND32:
10148 case BUILT_IN_ISNAND64:
10149 case BUILT_IN_ISNAND128:
10150 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10152 case BUILT_IN_PRINTF:
10153 case BUILT_IN_PRINTF_UNLOCKED:
10154 case BUILT_IN_VPRINTF:
10155 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10157 case BUILT_IN_FREE:
10158 if (integer_zerop (arg0))
10159 return build_empty_stmt (loc);
10160 break;
10162 default:
10163 break;
10166 return NULL_TREE;
10170 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10171 IGNORE is true if the result of the function call is ignored. This
10172 function returns NULL_TREE if no simplification was possible. */
10174 static tree
10175 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10177 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10178 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10180 switch (fcode)
10182 CASE_FLT_FN (BUILT_IN_JN):
10183 if (validate_arg (arg0, INTEGER_TYPE)
10184 && validate_arg (arg1, REAL_TYPE))
10185 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10186 break;
10188 CASE_FLT_FN (BUILT_IN_YN):
10189 if (validate_arg (arg0, INTEGER_TYPE)
10190 && validate_arg (arg1, REAL_TYPE))
10191 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10192 &dconst0, false);
10193 break;
10195 CASE_FLT_FN (BUILT_IN_DREM):
10196 CASE_FLT_FN (BUILT_IN_REMAINDER):
10197 if (validate_arg (arg0, REAL_TYPE)
10198 && validate_arg(arg1, REAL_TYPE))
10199 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10200 break;
10202 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10203 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10204 if (validate_arg (arg0, REAL_TYPE)
10205 && validate_arg(arg1, POINTER_TYPE))
10206 return do_mpfr_lgamma_r (arg0, arg1, type);
10207 break;
10209 CASE_FLT_FN (BUILT_IN_ATAN2):
10210 if (validate_arg (arg0, REAL_TYPE)
10211 && validate_arg(arg1, REAL_TYPE))
10212 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10213 break;
10215 CASE_FLT_FN (BUILT_IN_FDIM):
10216 if (validate_arg (arg0, REAL_TYPE)
10217 && validate_arg(arg1, REAL_TYPE))
10218 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10219 break;
10221 CASE_FLT_FN (BUILT_IN_HYPOT):
10222 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10224 CASE_FLT_FN (BUILT_IN_CPOW):
10225 if (validate_arg (arg0, COMPLEX_TYPE)
10226 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10227 && validate_arg (arg1, COMPLEX_TYPE)
10228 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10229 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10230 break;
10232 CASE_FLT_FN (BUILT_IN_LDEXP):
10233 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10234 CASE_FLT_FN (BUILT_IN_SCALBN):
10235 CASE_FLT_FN (BUILT_IN_SCALBLN):
10236 return fold_builtin_load_exponent (loc, arg0, arg1,
10237 type, /*ldexp=*/false);
10239 CASE_FLT_FN (BUILT_IN_FREXP):
10240 return fold_builtin_frexp (loc, arg0, arg1, type);
10242 CASE_FLT_FN (BUILT_IN_MODF):
10243 return fold_builtin_modf (loc, arg0, arg1, type);
10245 case BUILT_IN_BZERO:
10246 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10248 case BUILT_IN_FPUTS:
10249 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10251 case BUILT_IN_FPUTS_UNLOCKED:
10252 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10254 case BUILT_IN_STRSTR:
10255 return fold_builtin_strstr (loc, arg0, arg1, type);
10257 case BUILT_IN_STRCAT:
10258 return fold_builtin_strcat (loc, arg0, arg1);
10260 case BUILT_IN_STRSPN:
10261 return fold_builtin_strspn (loc, arg0, arg1);
10263 case BUILT_IN_STRCSPN:
10264 return fold_builtin_strcspn (loc, arg0, arg1);
10266 case BUILT_IN_STRCHR:
10267 case BUILT_IN_INDEX:
10268 return fold_builtin_strchr (loc, arg0, arg1, type);
10270 case BUILT_IN_STRRCHR:
10271 case BUILT_IN_RINDEX:
10272 return fold_builtin_strrchr (loc, arg0, arg1, type);
10274 case BUILT_IN_STRCPY:
10275 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10277 case BUILT_IN_STPCPY:
10278 if (ignore)
10280 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10281 if (!fn)
10282 break;
10284 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10286 else
10287 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10288 break;
10290 case BUILT_IN_STRCMP:
10291 return fold_builtin_strcmp (loc, arg0, arg1);
10293 case BUILT_IN_STRPBRK:
10294 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10296 case BUILT_IN_EXPECT:
10297 return fold_builtin_expect (loc, arg0, arg1);
10299 CASE_FLT_FN (BUILT_IN_POW):
10300 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10302 CASE_FLT_FN (BUILT_IN_POWI):
10303 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10305 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10306 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10308 CASE_FLT_FN (BUILT_IN_FMIN):
10309 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10311 CASE_FLT_FN (BUILT_IN_FMAX):
10312 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10314 case BUILT_IN_ISGREATER:
10315 return fold_builtin_unordered_cmp (loc, fndecl,
10316 arg0, arg1, UNLE_EXPR, LE_EXPR);
10317 case BUILT_IN_ISGREATEREQUAL:
10318 return fold_builtin_unordered_cmp (loc, fndecl,
10319 arg0, arg1, UNLT_EXPR, LT_EXPR);
10320 case BUILT_IN_ISLESS:
10321 return fold_builtin_unordered_cmp (loc, fndecl,
10322 arg0, arg1, UNGE_EXPR, GE_EXPR);
10323 case BUILT_IN_ISLESSEQUAL:
10324 return fold_builtin_unordered_cmp (loc, fndecl,
10325 arg0, arg1, UNGT_EXPR, GT_EXPR);
10326 case BUILT_IN_ISLESSGREATER:
10327 return fold_builtin_unordered_cmp (loc, fndecl,
10328 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10329 case BUILT_IN_ISUNORDERED:
10330 return fold_builtin_unordered_cmp (loc, fndecl,
10331 arg0, arg1, UNORDERED_EXPR,
10332 NOP_EXPR);
10334 /* We do the folding for va_start in the expander. */
10335 case BUILT_IN_VA_START:
10336 break;
10338 case BUILT_IN_SPRINTF:
10339 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10341 case BUILT_IN_OBJECT_SIZE:
10342 return fold_builtin_object_size (arg0, arg1);
10344 case BUILT_IN_PRINTF:
10345 case BUILT_IN_PRINTF_UNLOCKED:
10346 case BUILT_IN_VPRINTF:
10347 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10349 case BUILT_IN_PRINTF_CHK:
10350 case BUILT_IN_VPRINTF_CHK:
10351 if (!validate_arg (arg0, INTEGER_TYPE)
10352 || TREE_SIDE_EFFECTS (arg0))
10353 return NULL_TREE;
10354 else
10355 return fold_builtin_printf (loc, fndecl,
10356 arg1, NULL_TREE, ignore, fcode);
10357 break;
10359 case BUILT_IN_FPRINTF:
10360 case BUILT_IN_FPRINTF_UNLOCKED:
10361 case BUILT_IN_VFPRINTF:
10362 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10363 ignore, fcode);
10365 default:
10366 break;
10368 return NULL_TREE;
10371 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10372 and ARG2. IGNORE is true if the result of the function call is ignored.
10373 This function returns NULL_TREE if no simplification was possible. */
10375 static tree
10376 fold_builtin_3 (location_t loc, tree fndecl,
10377 tree arg0, tree arg1, tree arg2, bool ignore)
10379 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10380 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10381 switch (fcode)
10384 CASE_FLT_FN (BUILT_IN_SINCOS):
10385 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10387 CASE_FLT_FN (BUILT_IN_FMA):
10388 if (validate_arg (arg0, REAL_TYPE)
10389 && validate_arg(arg1, REAL_TYPE)
10390 && validate_arg(arg2, REAL_TYPE))
10391 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10392 break;
10394 CASE_FLT_FN (BUILT_IN_REMQUO):
10395 if (validate_arg (arg0, REAL_TYPE)
10396 && validate_arg(arg1, REAL_TYPE)
10397 && validate_arg(arg2, POINTER_TYPE))
10398 return do_mpfr_remquo (arg0, arg1, arg2);
10399 break;
10401 case BUILT_IN_MEMSET:
10402 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10404 case BUILT_IN_BCOPY:
10405 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10406 void_type_node, true, /*endp=*/3);
10408 case BUILT_IN_MEMCPY:
10409 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10410 type, ignore, /*endp=*/0);
10412 case BUILT_IN_MEMPCPY:
10413 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10414 type, ignore, /*endp=*/1);
10416 case BUILT_IN_MEMMOVE:
10417 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10418 type, ignore, /*endp=*/3);
10420 case BUILT_IN_STRNCAT:
10421 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10423 case BUILT_IN_STRNCPY:
10424 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10426 case BUILT_IN_STRNCMP:
10427 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10429 case BUILT_IN_MEMCHR:
10430 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10432 case BUILT_IN_BCMP:
10433 case BUILT_IN_MEMCMP:
10434 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10436 case BUILT_IN_SPRINTF:
10437 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10439 case BUILT_IN_STRCPY_CHK:
10440 case BUILT_IN_STPCPY_CHK:
10441 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10442 ignore, fcode);
10444 case BUILT_IN_STRCAT_CHK:
10445 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10447 case BUILT_IN_PRINTF_CHK:
10448 case BUILT_IN_VPRINTF_CHK:
10449 if (!validate_arg (arg0, INTEGER_TYPE)
10450 || TREE_SIDE_EFFECTS (arg0))
10451 return NULL_TREE;
10452 else
10453 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10454 break;
10456 case BUILT_IN_FPRINTF:
10457 case BUILT_IN_FPRINTF_UNLOCKED:
10458 case BUILT_IN_VFPRINTF:
10459 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10460 ignore, fcode);
10462 case BUILT_IN_FPRINTF_CHK:
10463 case BUILT_IN_VFPRINTF_CHK:
10464 if (!validate_arg (arg1, INTEGER_TYPE)
10465 || TREE_SIDE_EFFECTS (arg1))
10466 return NULL_TREE;
10467 else
10468 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10469 ignore, fcode);
10471 default:
10472 break;
10474 return NULL_TREE;
10477 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10478 ARG2, and ARG3. IGNORE is true if the result of the function call is
10479 ignored. This function returns NULL_TREE if no simplification was
10480 possible. */
10482 static tree
10483 fold_builtin_4 (location_t loc, tree fndecl,
10484 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10486 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10488 switch (fcode)
10490 case BUILT_IN_MEMCPY_CHK:
10491 case BUILT_IN_MEMPCPY_CHK:
10492 case BUILT_IN_MEMMOVE_CHK:
10493 case BUILT_IN_MEMSET_CHK:
10494 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10495 NULL_TREE, ignore,
10496 DECL_FUNCTION_CODE (fndecl));
10498 case BUILT_IN_STRNCPY_CHK:
10499 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10501 case BUILT_IN_STRNCAT_CHK:
10502 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10504 case BUILT_IN_FPRINTF_CHK:
10505 case BUILT_IN_VFPRINTF_CHK:
10506 if (!validate_arg (arg1, INTEGER_TYPE)
10507 || TREE_SIDE_EFFECTS (arg1))
10508 return NULL_TREE;
10509 else
10510 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10511 ignore, fcode);
10512 break;
10514 default:
10515 break;
10517 return NULL_TREE;
10520 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10521 arguments, where NARGS <= 4. IGNORE is true if the result of the
10522 function call is ignored. This function returns NULL_TREE if no
10523 simplification was possible. Note that this only folds builtins with
10524 fixed argument patterns. Foldings that do varargs-to-varargs
10525 transformations, or that match calls with more than 4 arguments,
10526 need to be handled with fold_builtin_varargs instead. */
10528 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10530 static tree
10531 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10533 tree ret = NULL_TREE;
10535 switch (nargs)
10537 case 0:
10538 ret = fold_builtin_0 (loc, fndecl, ignore);
10539 break;
10540 case 1:
10541 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10542 break;
10543 case 2:
10544 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10545 break;
10546 case 3:
10547 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10548 break;
10549 case 4:
10550 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10551 ignore);
10552 break;
10553 default:
10554 break;
10556 if (ret)
10558 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10559 SET_EXPR_LOCATION (ret, loc);
10560 TREE_NO_WARNING (ret) = 1;
10561 return ret;
10563 return NULL_TREE;
10566 /* Builtins with folding operations that operate on "..." arguments
10567 need special handling; we need to store the arguments in a convenient
10568 data structure before attempting any folding. Fortunately there are
10569 only a few builtins that fall into this category. FNDECL is the
10570 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10571 result of the function call is ignored. */
10573 static tree
10574 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10575 bool ignore ATTRIBUTE_UNUSED)
10577 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10578 tree ret = NULL_TREE;
10580 switch (fcode)
10582 case BUILT_IN_SPRINTF_CHK:
10583 case BUILT_IN_VSPRINTF_CHK:
10584 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10585 break;
10587 case BUILT_IN_SNPRINTF_CHK:
10588 case BUILT_IN_VSNPRINTF_CHK:
10589 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10590 break;
10592 case BUILT_IN_FPCLASSIFY:
10593 ret = fold_builtin_fpclassify (loc, exp);
10594 break;
10596 default:
10597 break;
10599 if (ret)
10601 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10602 SET_EXPR_LOCATION (ret, loc);
10603 TREE_NO_WARNING (ret) = 1;
10604 return ret;
10606 return NULL_TREE;
10609 /* Return true if FNDECL shouldn't be folded right now.
10610 If a built-in function has an inline attribute always_inline
10611 wrapper, defer folding it after always_inline functions have
10612 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10613 might not be performed. */
10615 static bool
10616 avoid_folding_inline_builtin (tree fndecl)
10618 return (DECL_DECLARED_INLINE_P (fndecl)
10619 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10620 && cfun
10621 && !cfun->always_inline_functions_inlined
10622 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10625 /* A wrapper function for builtin folding that prevents warnings for
10626 "statement without effect" and the like, caused by removing the
10627 call node earlier than the warning is generated. */
10629 tree
10630 fold_call_expr (location_t loc, tree exp, bool ignore)
10632 tree ret = NULL_TREE;
10633 tree fndecl = get_callee_fndecl (exp);
10634 if (fndecl
10635 && TREE_CODE (fndecl) == FUNCTION_DECL
10636 && DECL_BUILT_IN (fndecl)
10637 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10638 yet. Defer folding until we see all the arguments
10639 (after inlining). */
10640 && !CALL_EXPR_VA_ARG_PACK (exp))
10642 int nargs = call_expr_nargs (exp);
10644 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10645 instead last argument is __builtin_va_arg_pack (). Defer folding
10646 even in that case, until arguments are finalized. */
10647 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10649 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10650 if (fndecl2
10651 && TREE_CODE (fndecl2) == FUNCTION_DECL
10652 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10653 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10654 return NULL_TREE;
10657 if (avoid_folding_inline_builtin (fndecl))
10658 return NULL_TREE;
10660 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10661 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10662 CALL_EXPR_ARGP (exp), ignore);
10663 else
10665 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10667 tree *args = CALL_EXPR_ARGP (exp);
10668 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10670 if (!ret)
10671 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10672 if (ret)
10673 return ret;
10676 return NULL_TREE;
10679 /* Conveniently construct a function call expression. FNDECL names the
10680 function to be called and N arguments are passed in the array
10681 ARGARRAY. */
10683 tree
10684 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10686 tree fntype = TREE_TYPE (fndecl);
10687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10689 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10692 /* Conveniently construct a function call expression. FNDECL names the
10693 function to be called and the arguments are passed in the vector
10694 VEC. */
10696 tree
10697 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10699 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10700 VEC_address (tree, vec));
10704 /* Conveniently construct a function call expression. FNDECL names the
10705 function to be called, N is the number of arguments, and the "..."
10706 parameters are the argument expressions. */
10708 tree
10709 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10711 va_list ap;
10712 tree *argarray = XALLOCAVEC (tree, n);
10713 int i;
10715 va_start (ap, n);
10716 for (i = 0; i < n; i++)
10717 argarray[i] = va_arg (ap, tree);
10718 va_end (ap);
10719 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10722 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10723 varargs macros aren't supported by all bootstrap compilers. */
10725 tree
10726 build_call_expr (tree fndecl, int n, ...)
10728 va_list ap;
10729 tree *argarray = XALLOCAVEC (tree, n);
10730 int i;
10732 va_start (ap, n);
10733 for (i = 0; i < n; i++)
10734 argarray[i] = va_arg (ap, tree);
10735 va_end (ap);
10736 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10739 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10740 N arguments are passed in the array ARGARRAY. */
10742 tree
10743 fold_builtin_call_array (location_t loc, tree type,
10744 tree fn,
10745 int n,
10746 tree *argarray)
10748 tree ret = NULL_TREE;
10749 tree exp;
10751 if (TREE_CODE (fn) == ADDR_EXPR)
10753 tree fndecl = TREE_OPERAND (fn, 0);
10754 if (TREE_CODE (fndecl) == FUNCTION_DECL
10755 && DECL_BUILT_IN (fndecl))
10757 /* If last argument is __builtin_va_arg_pack (), arguments to this
10758 function are not finalized yet. Defer folding until they are. */
10759 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10761 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10762 if (fndecl2
10763 && TREE_CODE (fndecl2) == FUNCTION_DECL
10764 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10765 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10766 return build_call_array_loc (loc, type, fn, n, argarray);
10768 if (avoid_folding_inline_builtin (fndecl))
10769 return build_call_array_loc (loc, type, fn, n, argarray);
10770 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10772 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10773 if (ret)
10774 return ret;
10776 return build_call_array_loc (loc, type, fn, n, argarray);
10778 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10780 /* First try the transformations that don't require consing up
10781 an exp. */
10782 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10783 if (ret)
10784 return ret;
10787 /* If we got this far, we need to build an exp. */
10788 exp = build_call_array_loc (loc, type, fn, n, argarray);
10789 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10790 return ret ? ret : exp;
10794 return build_call_array_loc (loc, type, fn, n, argarray);
10797 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10798 along with N new arguments specified as the "..." parameters. SKIP
10799 is the number of arguments in EXP to be omitted. This function is used
10800 to do varargs-to-varargs transformations. */
10802 static tree
10803 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10805 int oldnargs = call_expr_nargs (exp);
10806 int nargs = oldnargs - skip + n;
10807 tree fntype = TREE_TYPE (fndecl);
10808 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10809 tree *buffer;
10811 if (n > 0)
10813 int i, j;
10814 va_list ap;
10816 buffer = XALLOCAVEC (tree, nargs);
10817 va_start (ap, n);
10818 for (i = 0; i < n; i++)
10819 buffer[i] = va_arg (ap, tree);
10820 va_end (ap);
10821 for (j = skip; j < oldnargs; j++, i++)
10822 buffer[i] = CALL_EXPR_ARG (exp, j);
10824 else
10825 buffer = CALL_EXPR_ARGP (exp) + skip;
10827 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10830 /* Validate a single argument ARG against a tree code CODE representing
10831 a type. */
10833 static bool
10834 validate_arg (const_tree arg, enum tree_code code)
10836 if (!arg)
10837 return false;
10838 else if (code == POINTER_TYPE)
10839 return POINTER_TYPE_P (TREE_TYPE (arg));
10840 else if (code == INTEGER_TYPE)
10841 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10842 return code == TREE_CODE (TREE_TYPE (arg));
10845 /* This function validates the types of a function call argument list
10846 against a specified list of tree_codes. If the last specifier is a 0,
10847 that represents an ellipses, otherwise the last specifier must be a
10848 VOID_TYPE.
10850 This is the GIMPLE version of validate_arglist. Eventually we want to
10851 completely convert builtins.c to work from GIMPLEs and the tree based
10852 validate_arglist will then be removed. */
10854 bool
10855 validate_gimple_arglist (const_gimple call, ...)
10857 enum tree_code code;
10858 bool res = 0;
10859 va_list ap;
10860 const_tree arg;
10861 size_t i;
10863 va_start (ap, call);
10864 i = 0;
10868 code = (enum tree_code) va_arg (ap, int);
10869 switch (code)
10871 case 0:
10872 /* This signifies an ellipses, any further arguments are all ok. */
10873 res = true;
10874 goto end;
10875 case VOID_TYPE:
10876 /* This signifies an endlink, if no arguments remain, return
10877 true, otherwise return false. */
10878 res = (i == gimple_call_num_args (call));
10879 goto end;
10880 default:
10881 /* If no parameters remain or the parameter's code does not
10882 match the specified code, return false. Otherwise continue
10883 checking any remaining arguments. */
10884 arg = gimple_call_arg (call, i++);
10885 if (!validate_arg (arg, code))
10886 goto end;
10887 break;
10890 while (1);
10892 /* We need gotos here since we can only have one VA_CLOSE in a
10893 function. */
10894 end: ;
10895 va_end (ap);
10897 return res;
10900 /* This function validates the types of a function call argument list
10901 against a specified list of tree_codes. If the last specifier is a 0,
10902 that represents an ellipses, otherwise the last specifier must be a
10903 VOID_TYPE. */
10905 bool
10906 validate_arglist (const_tree callexpr, ...)
10908 enum tree_code code;
10909 bool res = 0;
10910 va_list ap;
10911 const_call_expr_arg_iterator iter;
10912 const_tree arg;
10914 va_start (ap, callexpr);
10915 init_const_call_expr_arg_iterator (callexpr, &iter);
10919 code = (enum tree_code) va_arg (ap, int);
10920 switch (code)
10922 case 0:
10923 /* This signifies an ellipses, any further arguments are all ok. */
10924 res = true;
10925 goto end;
10926 case VOID_TYPE:
10927 /* This signifies an endlink, if no arguments remain, return
10928 true, otherwise return false. */
10929 res = !more_const_call_expr_args_p (&iter);
10930 goto end;
10931 default:
10932 /* If no parameters remain or the parameter's code does not
10933 match the specified code, return false. Otherwise continue
10934 checking any remaining arguments. */
10935 arg = next_const_call_expr_arg (&iter);
10936 if (!validate_arg (arg, code))
10937 goto end;
10938 break;
10941 while (1);
10943 /* We need gotos here since we can only have one VA_CLOSE in a
10944 function. */
10945 end: ;
10946 va_end (ap);
10948 return res;
10951 /* Default target-specific builtin expander that does nothing. */
10954 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10955 rtx target ATTRIBUTE_UNUSED,
10956 rtx subtarget ATTRIBUTE_UNUSED,
10957 enum machine_mode mode ATTRIBUTE_UNUSED,
10958 int ignore ATTRIBUTE_UNUSED)
10960 return NULL_RTX;
10963 /* Returns true is EXP represents data that would potentially reside
10964 in a readonly section. */
10966 static bool
10967 readonly_data_expr (tree exp)
10969 STRIP_NOPS (exp);
10971 if (TREE_CODE (exp) != ADDR_EXPR)
10972 return false;
10974 exp = get_base_address (TREE_OPERAND (exp, 0));
10975 if (!exp)
10976 return false;
10978 /* Make sure we call decl_readonly_section only for trees it
10979 can handle (since it returns true for everything it doesn't
10980 understand). */
10981 if (TREE_CODE (exp) == STRING_CST
10982 || TREE_CODE (exp) == CONSTRUCTOR
10983 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10984 return decl_readonly_section (exp, 0);
10985 else
10986 return false;
10989 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10990 to the call, and TYPE is its return type.
10992 Return NULL_TREE if no simplification was possible, otherwise return the
10993 simplified form of the call as a tree.
10995 The simplified form may be a constant or other expression which
10996 computes the same value, but in a more efficient manner (including
10997 calls to other builtin functions).
10999 The call may contain arguments which need to be evaluated, but
11000 which are not useful to determine the result of the call. In
11001 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11002 COMPOUND_EXPR will be an argument which must be evaluated.
11003 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11004 COMPOUND_EXPR in the chain will contain the tree for the simplified
11005 form of the builtin function call. */
11007 static tree
11008 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11010 if (!validate_arg (s1, POINTER_TYPE)
11011 || !validate_arg (s2, POINTER_TYPE))
11012 return NULL_TREE;
11013 else
11015 tree fn;
11016 const char *p1, *p2;
11018 p2 = c_getstr (s2);
11019 if (p2 == NULL)
11020 return NULL_TREE;
11022 p1 = c_getstr (s1);
11023 if (p1 != NULL)
11025 const char *r = strstr (p1, p2);
11026 tree tem;
11028 if (r == NULL)
11029 return build_int_cst (TREE_TYPE (s1), 0);
11031 /* Return an offset into the constant string argument. */
11032 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11033 s1, size_int (r - p1));
11034 return fold_convert_loc (loc, type, tem);
11037 /* The argument is const char *, and the result is char *, so we need
11038 a type conversion here to avoid a warning. */
11039 if (p2[0] == '\0')
11040 return fold_convert_loc (loc, type, s1);
11042 if (p2[1] != '\0')
11043 return NULL_TREE;
11045 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11046 if (!fn)
11047 return NULL_TREE;
11049 /* New argument list transforming strstr(s1, s2) to
11050 strchr(s1, s2[0]). */
11051 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11055 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11056 the call, and TYPE is its return type.
11058 Return NULL_TREE if no simplification was possible, otherwise return the
11059 simplified form of the call as a tree.
11061 The simplified form may be a constant or other expression which
11062 computes the same value, but in a more efficient manner (including
11063 calls to other builtin functions).
11065 The call may contain arguments which need to be evaluated, but
11066 which are not useful to determine the result of the call. In
11067 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11068 COMPOUND_EXPR will be an argument which must be evaluated.
11069 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11070 COMPOUND_EXPR in the chain will contain the tree for the simplified
11071 form of the builtin function call. */
11073 static tree
11074 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11076 if (!validate_arg (s1, POINTER_TYPE)
11077 || !validate_arg (s2, INTEGER_TYPE))
11078 return NULL_TREE;
11079 else
11081 const char *p1;
11083 if (TREE_CODE (s2) != INTEGER_CST)
11084 return NULL_TREE;
11086 p1 = c_getstr (s1);
11087 if (p1 != NULL)
11089 char c;
11090 const char *r;
11091 tree tem;
11093 if (target_char_cast (s2, &c))
11094 return NULL_TREE;
11096 r = strchr (p1, c);
11098 if (r == NULL)
11099 return build_int_cst (TREE_TYPE (s1), 0);
11101 /* Return an offset into the constant string argument. */
11102 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11103 s1, size_int (r - p1));
11104 return fold_convert_loc (loc, type, tem);
11106 return NULL_TREE;
11110 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11111 the call, and TYPE is its return type.
11113 Return NULL_TREE if no simplification was possible, otherwise return the
11114 simplified form of the call as a tree.
11116 The simplified form may be a constant or other expression which
11117 computes the same value, but in a more efficient manner (including
11118 calls to other builtin functions).
11120 The call may contain arguments which need to be evaluated, but
11121 which are not useful to determine the result of the call. In
11122 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11123 COMPOUND_EXPR will be an argument which must be evaluated.
11124 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11125 COMPOUND_EXPR in the chain will contain the tree for the simplified
11126 form of the builtin function call. */
11128 static tree
11129 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11131 if (!validate_arg (s1, POINTER_TYPE)
11132 || !validate_arg (s2, INTEGER_TYPE))
11133 return NULL_TREE;
11134 else
11136 tree fn;
11137 const char *p1;
11139 if (TREE_CODE (s2) != INTEGER_CST)
11140 return NULL_TREE;
11142 p1 = c_getstr (s1);
11143 if (p1 != NULL)
11145 char c;
11146 const char *r;
11147 tree tem;
11149 if (target_char_cast (s2, &c))
11150 return NULL_TREE;
11152 r = strrchr (p1, c);
11154 if (r == NULL)
11155 return build_int_cst (TREE_TYPE (s1), 0);
11157 /* Return an offset into the constant string argument. */
11158 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11159 s1, size_int (r - p1));
11160 return fold_convert_loc (loc, type, tem);
11163 if (! integer_zerop (s2))
11164 return NULL_TREE;
11166 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11167 if (!fn)
11168 return NULL_TREE;
11170 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11171 return build_call_expr_loc (loc, fn, 2, s1, s2);
11175 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11176 to the call, and TYPE is its return type.
11178 Return NULL_TREE if no simplification was possible, otherwise return the
11179 simplified form of the call as a tree.
11181 The simplified form may be a constant or other expression which
11182 computes the same value, but in a more efficient manner (including
11183 calls to other builtin functions).
11185 The call may contain arguments which need to be evaluated, but
11186 which are not useful to determine the result of the call. In
11187 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11188 COMPOUND_EXPR will be an argument which must be evaluated.
11189 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11190 COMPOUND_EXPR in the chain will contain the tree for the simplified
11191 form of the builtin function call. */
11193 static tree
11194 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11196 if (!validate_arg (s1, POINTER_TYPE)
11197 || !validate_arg (s2, POINTER_TYPE))
11198 return NULL_TREE;
11199 else
11201 tree fn;
11202 const char *p1, *p2;
11204 p2 = c_getstr (s2);
11205 if (p2 == NULL)
11206 return NULL_TREE;
11208 p1 = c_getstr (s1);
11209 if (p1 != NULL)
11211 const char *r = strpbrk (p1, p2);
11212 tree tem;
11214 if (r == NULL)
11215 return build_int_cst (TREE_TYPE (s1), 0);
11217 /* Return an offset into the constant string argument. */
11218 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11219 s1, size_int (r - p1));
11220 return fold_convert_loc (loc, type, tem);
11223 if (p2[0] == '\0')
11224 /* strpbrk(x, "") == NULL.
11225 Evaluate and ignore s1 in case it had side-effects. */
11226 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11228 if (p2[1] != '\0')
11229 return NULL_TREE; /* Really call strpbrk. */
11231 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11232 if (!fn)
11233 return NULL_TREE;
11235 /* New argument list transforming strpbrk(s1, s2) to
11236 strchr(s1, s2[0]). */
11237 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11241 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11242 to the call.
11244 Return NULL_TREE if no simplification was possible, otherwise return the
11245 simplified form of the call as a tree.
11247 The simplified form may be a constant or other expression which
11248 computes the same value, but in a more efficient manner (including
11249 calls to other builtin functions).
11251 The call may contain arguments which need to be evaluated, but
11252 which are not useful to determine the result of the call. In
11253 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11254 COMPOUND_EXPR will be an argument which must be evaluated.
11255 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11256 COMPOUND_EXPR in the chain will contain the tree for the simplified
11257 form of the builtin function call. */
11259 static tree
11260 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11262 if (!validate_arg (dst, POINTER_TYPE)
11263 || !validate_arg (src, POINTER_TYPE))
11264 return NULL_TREE;
11265 else
11267 const char *p = c_getstr (src);
11269 /* If the string length is zero, return the dst parameter. */
11270 if (p && *p == '\0')
11271 return dst;
11273 if (optimize_insn_for_speed_p ())
11275 /* See if we can store by pieces into (dst + strlen(dst)). */
11276 tree newdst, call;
11277 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11278 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11280 if (!strlen_fn || !strcpy_fn)
11281 return NULL_TREE;
11283 /* If we don't have a movstr we don't want to emit an strcpy
11284 call. We have to do that if the length of the source string
11285 isn't computable (in that case we can use memcpy probably
11286 later expanding to a sequence of mov instructions). If we
11287 have movstr instructions we can emit strcpy calls. */
11288 if (!HAVE_movstr)
11290 tree len = c_strlen (src, 1);
11291 if (! len || TREE_SIDE_EFFECTS (len))
11292 return NULL_TREE;
11295 /* Stabilize the argument list. */
11296 dst = builtin_save_expr (dst);
11298 /* Create strlen (dst). */
11299 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11300 /* Create (dst p+ strlen (dst)). */
11302 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11303 TREE_TYPE (dst), dst, newdst);
11304 newdst = builtin_save_expr (newdst);
11306 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11307 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11309 return NULL_TREE;
11313 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11314 arguments to the call.
11316 Return NULL_TREE if no simplification was possible, otherwise return the
11317 simplified form of the call as a tree.
11319 The simplified form may be a constant or other expression which
11320 computes the same value, but in a more efficient manner (including
11321 calls to other builtin functions).
11323 The call may contain arguments which need to be evaluated, but
11324 which are not useful to determine the result of the call. In
11325 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11326 COMPOUND_EXPR will be an argument which must be evaluated.
11327 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11328 COMPOUND_EXPR in the chain will contain the tree for the simplified
11329 form of the builtin function call. */
11331 static tree
11332 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11334 if (!validate_arg (dst, POINTER_TYPE)
11335 || !validate_arg (src, POINTER_TYPE)
11336 || !validate_arg (len, INTEGER_TYPE))
11337 return NULL_TREE;
11338 else
11340 const char *p = c_getstr (src);
11342 /* If the requested length is zero, or the src parameter string
11343 length is zero, return the dst parameter. */
11344 if (integer_zerop (len) || (p && *p == '\0'))
11345 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11347 /* If the requested len is greater than or equal to the string
11348 length, call strcat. */
11349 if (TREE_CODE (len) == INTEGER_CST && p
11350 && compare_tree_int (len, strlen (p)) >= 0)
11352 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11354 /* If the replacement _DECL isn't initialized, don't do the
11355 transformation. */
11356 if (!fn)
11357 return NULL_TREE;
11359 return build_call_expr_loc (loc, fn, 2, dst, src);
11361 return NULL_TREE;
11365 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11366 to the call.
11368 Return NULL_TREE if no simplification was possible, otherwise return the
11369 simplified form of the call as a tree.
11371 The simplified form may be a constant or other expression which
11372 computes the same value, but in a more efficient manner (including
11373 calls to other builtin functions).
11375 The call may contain arguments which need to be evaluated, but
11376 which are not useful to determine the result of the call. In
11377 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11378 COMPOUND_EXPR will be an argument which must be evaluated.
11379 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11380 COMPOUND_EXPR in the chain will contain the tree for the simplified
11381 form of the builtin function call. */
11383 static tree
11384 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11386 if (!validate_arg (s1, POINTER_TYPE)
11387 || !validate_arg (s2, POINTER_TYPE))
11388 return NULL_TREE;
11389 else
11391 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11393 /* If both arguments are constants, evaluate at compile-time. */
11394 if (p1 && p2)
11396 const size_t r = strspn (p1, p2);
11397 return size_int (r);
11400 /* If either argument is "", return NULL_TREE. */
11401 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11402 /* Evaluate and ignore both arguments in case either one has
11403 side-effects. */
11404 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11405 s1, s2);
11406 return NULL_TREE;
11410 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11411 to the call.
11413 Return NULL_TREE if no simplification was possible, otherwise return the
11414 simplified form of the call as a tree.
11416 The simplified form may be a constant or other expression which
11417 computes the same value, but in a more efficient manner (including
11418 calls to other builtin functions).
11420 The call may contain arguments which need to be evaluated, but
11421 which are not useful to determine the result of the call. In
11422 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11423 COMPOUND_EXPR will be an argument which must be evaluated.
11424 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11425 COMPOUND_EXPR in the chain will contain the tree for the simplified
11426 form of the builtin function call. */
11428 static tree
11429 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11431 if (!validate_arg (s1, POINTER_TYPE)
11432 || !validate_arg (s2, POINTER_TYPE))
11433 return NULL_TREE;
11434 else
11436 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11438 /* If both arguments are constants, evaluate at compile-time. */
11439 if (p1 && p2)
11441 const size_t r = strcspn (p1, p2);
11442 return size_int (r);
11445 /* If the first argument is "", return NULL_TREE. */
11446 if (p1 && *p1 == '\0')
11448 /* Evaluate and ignore argument s2 in case it has
11449 side-effects. */
11450 return omit_one_operand_loc (loc, size_type_node,
11451 size_zero_node, s2);
11454 /* If the second argument is "", return __builtin_strlen(s1). */
11455 if (p2 && *p2 == '\0')
11457 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11459 /* If the replacement _DECL isn't initialized, don't do the
11460 transformation. */
11461 if (!fn)
11462 return NULL_TREE;
11464 return build_call_expr_loc (loc, fn, 1, s1);
11466 return NULL_TREE;
11470 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11471 to the call. IGNORE is true if the value returned
11472 by the builtin will be ignored. UNLOCKED is true is true if this
11473 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11474 the known length of the string. Return NULL_TREE if no simplification
11475 was possible. */
11477 tree
11478 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11479 bool ignore, bool unlocked, tree len)
11481 /* If we're using an unlocked function, assume the other unlocked
11482 functions exist explicitly. */
11483 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11484 : implicit_built_in_decls[BUILT_IN_FPUTC];
11485 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11486 : implicit_built_in_decls[BUILT_IN_FWRITE];
11488 /* If the return value is used, don't do the transformation. */
11489 if (!ignore)
11490 return NULL_TREE;
11492 /* Verify the arguments in the original call. */
11493 if (!validate_arg (arg0, POINTER_TYPE)
11494 || !validate_arg (arg1, POINTER_TYPE))
11495 return NULL_TREE;
11497 if (! len)
11498 len = c_strlen (arg0, 0);
11500 /* Get the length of the string passed to fputs. If the length
11501 can't be determined, punt. */
11502 if (!len
11503 || TREE_CODE (len) != INTEGER_CST)
11504 return NULL_TREE;
11506 switch (compare_tree_int (len, 1))
11508 case -1: /* length is 0, delete the call entirely . */
11509 return omit_one_operand_loc (loc, integer_type_node,
11510 integer_zero_node, arg1);;
11512 case 0: /* length is 1, call fputc. */
11514 const char *p = c_getstr (arg0);
11516 if (p != NULL)
11518 if (fn_fputc)
11519 return build_call_expr_loc (loc, fn_fputc, 2,
11520 build_int_cst (NULL_TREE, p[0]), arg1);
11521 else
11522 return NULL_TREE;
11525 /* FALLTHROUGH */
11526 case 1: /* length is greater than 1, call fwrite. */
11528 /* If optimizing for size keep fputs. */
11529 if (optimize_function_for_size_p (cfun))
11530 return NULL_TREE;
11531 /* New argument list transforming fputs(string, stream) to
11532 fwrite(string, 1, len, stream). */
11533 if (fn_fwrite)
11534 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11535 size_one_node, len, arg1);
11536 else
11537 return NULL_TREE;
11539 default:
11540 gcc_unreachable ();
11542 return NULL_TREE;
11545 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11546 produced. False otherwise. This is done so that we don't output the error
11547 or warning twice or three times. */
11549 bool
11550 fold_builtin_next_arg (tree exp, bool va_start_p)
11552 tree fntype = TREE_TYPE (current_function_decl);
11553 int nargs = call_expr_nargs (exp);
11554 tree arg;
11556 if (TYPE_ARG_TYPES (fntype) == 0
11557 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11558 == void_type_node))
11560 error ("%<va_start%> used in function with fixed args");
11561 return true;
11564 if (va_start_p)
11566 if (va_start_p && (nargs != 2))
11568 error ("wrong number of arguments to function %<va_start%>");
11569 return true;
11571 arg = CALL_EXPR_ARG (exp, 1);
11573 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11574 when we checked the arguments and if needed issued a warning. */
11575 else
11577 if (nargs == 0)
11579 /* Evidently an out of date version of <stdarg.h>; can't validate
11580 va_start's second argument, but can still work as intended. */
11581 warning (0, "%<__builtin_next_arg%> called without an argument");
11582 return true;
11584 else if (nargs > 1)
11586 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11587 return true;
11589 arg = CALL_EXPR_ARG (exp, 0);
11592 if (TREE_CODE (arg) == SSA_NAME)
11593 arg = SSA_NAME_VAR (arg);
11595 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11596 or __builtin_next_arg (0) the first time we see it, after checking
11597 the arguments and if needed issuing a warning. */
11598 if (!integer_zerop (arg))
11600 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11602 /* Strip off all nops for the sake of the comparison. This
11603 is not quite the same as STRIP_NOPS. It does more.
11604 We must also strip off INDIRECT_EXPR for C++ reference
11605 parameters. */
11606 while (CONVERT_EXPR_P (arg)
11607 || TREE_CODE (arg) == INDIRECT_REF)
11608 arg = TREE_OPERAND (arg, 0);
11609 if (arg != last_parm)
11611 /* FIXME: Sometimes with the tree optimizers we can get the
11612 not the last argument even though the user used the last
11613 argument. We just warn and set the arg to be the last
11614 argument so that we will get wrong-code because of
11615 it. */
11616 warning (0, "second parameter of %<va_start%> not last named argument");
11619 /* Undefined by C99 7.15.1.4p4 (va_start):
11620 "If the parameter parmN is declared with the register storage
11621 class, with a function or array type, or with a type that is
11622 not compatible with the type that results after application of
11623 the default argument promotions, the behavior is undefined."
11625 else if (DECL_REGISTER (arg))
11626 warning (0, "undefined behaviour when second parameter of "
11627 "%<va_start%> is declared with %<register%> storage");
11629 /* We want to verify the second parameter just once before the tree
11630 optimizers are run and then avoid keeping it in the tree,
11631 as otherwise we could warn even for correct code like:
11632 void foo (int i, ...)
11633 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11634 if (va_start_p)
11635 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11636 else
11637 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11639 return false;
11643 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11644 ORIG may be null if this is a 2-argument call. We don't attempt to
11645 simplify calls with more than 3 arguments.
11647 Return NULL_TREE if no simplification was possible, otherwise return the
11648 simplified form of the call as a tree. If IGNORED is true, it means that
11649 the caller does not use the returned value of the function. */
11651 static tree
11652 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11653 tree orig, int ignored)
11655 tree call, retval;
11656 const char *fmt_str = NULL;
11658 /* Verify the required arguments in the original call. We deal with two
11659 types of sprintf() calls: 'sprintf (str, fmt)' and
11660 'sprintf (dest, "%s", orig)'. */
11661 if (!validate_arg (dest, POINTER_TYPE)
11662 || !validate_arg (fmt, POINTER_TYPE))
11663 return NULL_TREE;
11664 if (orig && !validate_arg (orig, POINTER_TYPE))
11665 return NULL_TREE;
11667 /* Check whether the format is a literal string constant. */
11668 fmt_str = c_getstr (fmt);
11669 if (fmt_str == NULL)
11670 return NULL_TREE;
11672 call = NULL_TREE;
11673 retval = NULL_TREE;
11675 if (!init_target_chars ())
11676 return NULL_TREE;
11678 /* If the format doesn't contain % args or %%, use strcpy. */
11679 if (strchr (fmt_str, target_percent) == NULL)
11681 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11683 if (!fn)
11684 return NULL_TREE;
11686 /* Don't optimize sprintf (buf, "abc", ptr++). */
11687 if (orig)
11688 return NULL_TREE;
11690 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11691 'format' is known to contain no % formats. */
11692 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11693 if (!ignored)
11694 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11697 /* If the format is "%s", use strcpy if the result isn't used. */
11698 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11700 tree fn;
11701 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11703 if (!fn)
11704 return NULL_TREE;
11706 /* Don't crash on sprintf (str1, "%s"). */
11707 if (!orig)
11708 return NULL_TREE;
11710 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11711 if (!ignored)
11713 retval = c_strlen (orig, 1);
11714 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11715 return NULL_TREE;
11717 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11720 if (call && retval)
11722 retval = fold_convert_loc
11723 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11724 retval);
11725 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11727 else
11728 return call;
11731 /* Expand a call EXP to __builtin_object_size. */
11734 expand_builtin_object_size (tree exp)
11736 tree ost;
11737 int object_size_type;
11738 tree fndecl = get_callee_fndecl (exp);
11740 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11742 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11743 exp, fndecl);
11744 expand_builtin_trap ();
11745 return const0_rtx;
11748 ost = CALL_EXPR_ARG (exp, 1);
11749 STRIP_NOPS (ost);
11751 if (TREE_CODE (ost) != INTEGER_CST
11752 || tree_int_cst_sgn (ost) < 0
11753 || compare_tree_int (ost, 3) > 0)
11755 error ("%Klast argument of %D is not integer constant between 0 and 3",
11756 exp, fndecl);
11757 expand_builtin_trap ();
11758 return const0_rtx;
11761 object_size_type = tree_low_cst (ost, 0);
11763 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11766 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11767 FCODE is the BUILT_IN_* to use.
11768 Return NULL_RTX if we failed; the caller should emit a normal call,
11769 otherwise try to get the result in TARGET, if convenient (and in
11770 mode MODE if that's convenient). */
11772 static rtx
11773 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11774 enum built_in_function fcode)
11776 tree dest, src, len, size;
11778 if (!validate_arglist (exp,
11779 POINTER_TYPE,
11780 fcode == BUILT_IN_MEMSET_CHK
11781 ? INTEGER_TYPE : POINTER_TYPE,
11782 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11783 return NULL_RTX;
11785 dest = CALL_EXPR_ARG (exp, 0);
11786 src = CALL_EXPR_ARG (exp, 1);
11787 len = CALL_EXPR_ARG (exp, 2);
11788 size = CALL_EXPR_ARG (exp, 3);
11790 if (! host_integerp (size, 1))
11791 return NULL_RTX;
11793 if (host_integerp (len, 1) || integer_all_onesp (size))
11795 tree fn;
11797 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11799 warning_at (tree_nonartificial_location (exp),
11800 0, "%Kcall to %D will always overflow destination buffer",
11801 exp, get_callee_fndecl (exp));
11802 return NULL_RTX;
11805 fn = NULL_TREE;
11806 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11807 mem{cpy,pcpy,move,set} is available. */
11808 switch (fcode)
11810 case BUILT_IN_MEMCPY_CHK:
11811 fn = built_in_decls[BUILT_IN_MEMCPY];
11812 break;
11813 case BUILT_IN_MEMPCPY_CHK:
11814 fn = built_in_decls[BUILT_IN_MEMPCPY];
11815 break;
11816 case BUILT_IN_MEMMOVE_CHK:
11817 fn = built_in_decls[BUILT_IN_MEMMOVE];
11818 break;
11819 case BUILT_IN_MEMSET_CHK:
11820 fn = built_in_decls[BUILT_IN_MEMSET];
11821 break;
11822 default:
11823 break;
11826 if (! fn)
11827 return NULL_RTX;
11829 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11830 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11831 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11832 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11834 else if (fcode == BUILT_IN_MEMSET_CHK)
11835 return NULL_RTX;
11836 else
11838 unsigned int dest_align
11839 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11841 /* If DEST is not a pointer type, call the normal function. */
11842 if (dest_align == 0)
11843 return NULL_RTX;
11845 /* If SRC and DEST are the same (and not volatile), do nothing. */
11846 if (operand_equal_p (src, dest, 0))
11848 tree expr;
11850 if (fcode != BUILT_IN_MEMPCPY_CHK)
11852 /* Evaluate and ignore LEN in case it has side-effects. */
11853 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11854 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11857 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11858 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11861 /* __memmove_chk special case. */
11862 if (fcode == BUILT_IN_MEMMOVE_CHK)
11864 unsigned int src_align
11865 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11867 if (src_align == 0)
11868 return NULL_RTX;
11870 /* If src is categorized for a readonly section we can use
11871 normal __memcpy_chk. */
11872 if (readonly_data_expr (src))
11874 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11875 if (!fn)
11876 return NULL_RTX;
11877 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11878 dest, src, len, size);
11879 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11880 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11881 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11884 return NULL_RTX;
11888 /* Emit warning if a buffer overflow is detected at compile time. */
11890 static void
11891 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11893 int is_strlen = 0;
11894 tree len, size;
11895 location_t loc = tree_nonartificial_location (exp);
11897 switch (fcode)
11899 case BUILT_IN_STRCPY_CHK:
11900 case BUILT_IN_STPCPY_CHK:
11901 /* For __strcat_chk the warning will be emitted only if overflowing
11902 by at least strlen (dest) + 1 bytes. */
11903 case BUILT_IN_STRCAT_CHK:
11904 len = CALL_EXPR_ARG (exp, 1);
11905 size = CALL_EXPR_ARG (exp, 2);
11906 is_strlen = 1;
11907 break;
11908 case BUILT_IN_STRNCAT_CHK:
11909 case BUILT_IN_STRNCPY_CHK:
11910 len = CALL_EXPR_ARG (exp, 2);
11911 size = CALL_EXPR_ARG (exp, 3);
11912 break;
11913 case BUILT_IN_SNPRINTF_CHK:
11914 case BUILT_IN_VSNPRINTF_CHK:
11915 len = CALL_EXPR_ARG (exp, 1);
11916 size = CALL_EXPR_ARG (exp, 3);
11917 break;
11918 default:
11919 gcc_unreachable ();
11922 if (!len || !size)
11923 return;
11925 if (! host_integerp (size, 1) || integer_all_onesp (size))
11926 return;
11928 if (is_strlen)
11930 len = c_strlen (len, 1);
11931 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11932 return;
11934 else if (fcode == BUILT_IN_STRNCAT_CHK)
11936 tree src = CALL_EXPR_ARG (exp, 1);
11937 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11938 return;
11939 src = c_strlen (src, 1);
11940 if (! src || ! host_integerp (src, 1))
11942 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11943 exp, get_callee_fndecl (exp));
11944 return;
11946 else if (tree_int_cst_lt (src, size))
11947 return;
11949 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11950 return;
11952 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11953 exp, get_callee_fndecl (exp));
11956 /* Emit warning if a buffer overflow is detected at compile time
11957 in __sprintf_chk/__vsprintf_chk calls. */
11959 static void
11960 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11962 tree size, len, fmt;
11963 const char *fmt_str;
11964 int nargs = call_expr_nargs (exp);
11966 /* Verify the required arguments in the original call. */
11968 if (nargs < 4)
11969 return;
11970 size = CALL_EXPR_ARG (exp, 2);
11971 fmt = CALL_EXPR_ARG (exp, 3);
11973 if (! host_integerp (size, 1) || integer_all_onesp (size))
11974 return;
11976 /* Check whether the format is a literal string constant. */
11977 fmt_str = c_getstr (fmt);
11978 if (fmt_str == NULL)
11979 return;
11981 if (!init_target_chars ())
11982 return;
11984 /* If the format doesn't contain % args or %%, we know its size. */
11985 if (strchr (fmt_str, target_percent) == 0)
11986 len = build_int_cstu (size_type_node, strlen (fmt_str));
11987 /* If the format is "%s" and first ... argument is a string literal,
11988 we know it too. */
11989 else if (fcode == BUILT_IN_SPRINTF_CHK
11990 && strcmp (fmt_str, target_percent_s) == 0)
11992 tree arg;
11994 if (nargs < 5)
11995 return;
11996 arg = CALL_EXPR_ARG (exp, 4);
11997 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11998 return;
12000 len = c_strlen (arg, 1);
12001 if (!len || ! host_integerp (len, 1))
12002 return;
12004 else
12005 return;
12007 if (! tree_int_cst_lt (len, size))
12008 warning_at (tree_nonartificial_location (exp),
12009 0, "%Kcall to %D will always overflow destination buffer",
12010 exp, get_callee_fndecl (exp));
12013 /* Emit warning if a free is called with address of a variable. */
12015 static void
12016 maybe_emit_free_warning (tree exp)
12018 tree arg = CALL_EXPR_ARG (exp, 0);
12020 STRIP_NOPS (arg);
12021 if (TREE_CODE (arg) != ADDR_EXPR)
12022 return;
12024 arg = get_base_address (TREE_OPERAND (arg, 0));
12025 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12026 return;
12028 if (SSA_VAR_P (arg))
12029 warning_at (tree_nonartificial_location (exp),
12030 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12031 else
12032 warning_at (tree_nonartificial_location (exp),
12033 0, "%Kattempt to free a non-heap object", exp);
12036 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12037 if possible. */
12039 tree
12040 fold_builtin_object_size (tree ptr, tree ost)
12042 unsigned HOST_WIDE_INT bytes;
12043 int object_size_type;
12045 if (!validate_arg (ptr, POINTER_TYPE)
12046 || !validate_arg (ost, INTEGER_TYPE))
12047 return NULL_TREE;
12049 STRIP_NOPS (ost);
12051 if (TREE_CODE (ost) != INTEGER_CST
12052 || tree_int_cst_sgn (ost) < 0
12053 || compare_tree_int (ost, 3) > 0)
12054 return NULL_TREE;
12056 object_size_type = tree_low_cst (ost, 0);
12058 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12059 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12060 and (size_t) 0 for types 2 and 3. */
12061 if (TREE_SIDE_EFFECTS (ptr))
12062 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12064 if (TREE_CODE (ptr) == ADDR_EXPR)
12066 bytes = compute_builtin_object_size (ptr, object_size_type);
12067 if (double_int_fits_to_tree_p (size_type_node,
12068 uhwi_to_double_int (bytes)))
12069 return build_int_cstu (size_type_node, bytes);
12071 else if (TREE_CODE (ptr) == SSA_NAME)
12073 /* If object size is not known yet, delay folding until
12074 later. Maybe subsequent passes will help determining
12075 it. */
12076 bytes = compute_builtin_object_size (ptr, object_size_type);
12077 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12078 && double_int_fits_to_tree_p (size_type_node,
12079 uhwi_to_double_int (bytes)))
12080 return build_int_cstu (size_type_node, bytes);
12083 return NULL_TREE;
12086 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12087 DEST, SRC, LEN, and SIZE are the arguments to the call.
12088 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12089 code of the builtin. If MAXLEN is not NULL, it is maximum length
12090 passed as third argument. */
12092 tree
12093 fold_builtin_memory_chk (location_t loc, tree fndecl,
12094 tree dest, tree src, tree len, tree size,
12095 tree maxlen, bool ignore,
12096 enum built_in_function fcode)
12098 tree fn;
12100 if (!validate_arg (dest, POINTER_TYPE)
12101 || !validate_arg (src,
12102 (fcode == BUILT_IN_MEMSET_CHK
12103 ? INTEGER_TYPE : POINTER_TYPE))
12104 || !validate_arg (len, INTEGER_TYPE)
12105 || !validate_arg (size, INTEGER_TYPE))
12106 return NULL_TREE;
12108 /* If SRC and DEST are the same (and not volatile), return DEST
12109 (resp. DEST+LEN for __mempcpy_chk). */
12110 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12112 if (fcode != BUILT_IN_MEMPCPY_CHK)
12113 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12114 dest, len);
12115 else
12117 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12118 dest, len);
12119 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12123 if (! host_integerp (size, 1))
12124 return NULL_TREE;
12126 if (! integer_all_onesp (size))
12128 if (! host_integerp (len, 1))
12130 /* If LEN is not constant, try MAXLEN too.
12131 For MAXLEN only allow optimizing into non-_ocs function
12132 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12133 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12135 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12137 /* (void) __mempcpy_chk () can be optimized into
12138 (void) __memcpy_chk (). */
12139 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12140 if (!fn)
12141 return NULL_TREE;
12143 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12145 return NULL_TREE;
12148 else
12149 maxlen = len;
12151 if (tree_int_cst_lt (size, maxlen))
12152 return NULL_TREE;
12155 fn = NULL_TREE;
12156 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12157 mem{cpy,pcpy,move,set} is available. */
12158 switch (fcode)
12160 case BUILT_IN_MEMCPY_CHK:
12161 fn = built_in_decls[BUILT_IN_MEMCPY];
12162 break;
12163 case BUILT_IN_MEMPCPY_CHK:
12164 fn = built_in_decls[BUILT_IN_MEMPCPY];
12165 break;
12166 case BUILT_IN_MEMMOVE_CHK:
12167 fn = built_in_decls[BUILT_IN_MEMMOVE];
12168 break;
12169 case BUILT_IN_MEMSET_CHK:
12170 fn = built_in_decls[BUILT_IN_MEMSET];
12171 break;
12172 default:
12173 break;
12176 if (!fn)
12177 return NULL_TREE;
12179 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12182 /* Fold a call to the __st[rp]cpy_chk builtin.
12183 DEST, SRC, and SIZE are the arguments to the call.
12184 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12185 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12186 strings passed as second argument. */
12188 tree
12189 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12190 tree src, tree size,
12191 tree maxlen, bool ignore,
12192 enum built_in_function fcode)
12194 tree len, fn;
12196 if (!validate_arg (dest, POINTER_TYPE)
12197 || !validate_arg (src, POINTER_TYPE)
12198 || !validate_arg (size, INTEGER_TYPE))
12199 return NULL_TREE;
12201 /* If SRC and DEST are the same (and not volatile), return DEST. */
12202 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12203 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12205 if (! host_integerp (size, 1))
12206 return NULL_TREE;
12208 if (! integer_all_onesp (size))
12210 len = c_strlen (src, 1);
12211 if (! len || ! host_integerp (len, 1))
12213 /* If LEN is not constant, try MAXLEN too.
12214 For MAXLEN only allow optimizing into non-_ocs function
12215 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12216 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12218 if (fcode == BUILT_IN_STPCPY_CHK)
12220 if (! ignore)
12221 return NULL_TREE;
12223 /* If return value of __stpcpy_chk is ignored,
12224 optimize into __strcpy_chk. */
12225 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12226 if (!fn)
12227 return NULL_TREE;
12229 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12232 if (! len || TREE_SIDE_EFFECTS (len))
12233 return NULL_TREE;
12235 /* If c_strlen returned something, but not a constant,
12236 transform __strcpy_chk into __memcpy_chk. */
12237 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12238 if (!fn)
12239 return NULL_TREE;
12241 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12242 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12243 build_call_expr_loc (loc, fn, 4,
12244 dest, src, len, size));
12247 else
12248 maxlen = len;
12250 if (! tree_int_cst_lt (maxlen, size))
12251 return NULL_TREE;
12254 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12255 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12256 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12257 if (!fn)
12258 return NULL_TREE;
12260 return build_call_expr_loc (loc, fn, 2, dest, src);
12263 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12264 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12265 length passed as third argument. */
12267 tree
12268 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12269 tree len, tree size, tree maxlen)
12271 tree fn;
12273 if (!validate_arg (dest, POINTER_TYPE)
12274 || !validate_arg (src, POINTER_TYPE)
12275 || !validate_arg (len, INTEGER_TYPE)
12276 || !validate_arg (size, INTEGER_TYPE))
12277 return NULL_TREE;
12279 if (! host_integerp (size, 1))
12280 return NULL_TREE;
12282 if (! integer_all_onesp (size))
12284 if (! host_integerp (len, 1))
12286 /* If LEN is not constant, try MAXLEN too.
12287 For MAXLEN only allow optimizing into non-_ocs function
12288 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12289 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12290 return NULL_TREE;
12292 else
12293 maxlen = len;
12295 if (tree_int_cst_lt (size, maxlen))
12296 return NULL_TREE;
12299 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12300 fn = built_in_decls[BUILT_IN_STRNCPY];
12301 if (!fn)
12302 return NULL_TREE;
12304 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12307 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12308 are the arguments to the call. */
12310 static tree
12311 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12312 tree src, tree size)
12314 tree fn;
12315 const char *p;
12317 if (!validate_arg (dest, POINTER_TYPE)
12318 || !validate_arg (src, POINTER_TYPE)
12319 || !validate_arg (size, INTEGER_TYPE))
12320 return NULL_TREE;
12322 p = c_getstr (src);
12323 /* If the SRC parameter is "", return DEST. */
12324 if (p && *p == '\0')
12325 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12327 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12328 return NULL_TREE;
12330 /* If __builtin_strcat_chk is used, assume strcat is available. */
12331 fn = built_in_decls[BUILT_IN_STRCAT];
12332 if (!fn)
12333 return NULL_TREE;
12335 return build_call_expr_loc (loc, fn, 2, dest, src);
12338 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12339 LEN, and SIZE. */
12341 static tree
12342 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12343 tree dest, tree src, tree len, tree size)
12345 tree fn;
12346 const char *p;
12348 if (!validate_arg (dest, POINTER_TYPE)
12349 || !validate_arg (src, POINTER_TYPE)
12350 || !validate_arg (size, INTEGER_TYPE)
12351 || !validate_arg (size, INTEGER_TYPE))
12352 return NULL_TREE;
12354 p = c_getstr (src);
12355 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12356 if (p && *p == '\0')
12357 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12358 else if (integer_zerop (len))
12359 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12361 if (! host_integerp (size, 1))
12362 return NULL_TREE;
12364 if (! integer_all_onesp (size))
12366 tree src_len = c_strlen (src, 1);
12367 if (src_len
12368 && host_integerp (src_len, 1)
12369 && host_integerp (len, 1)
12370 && ! tree_int_cst_lt (len, src_len))
12372 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12373 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12374 if (!fn)
12375 return NULL_TREE;
12377 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12379 return NULL_TREE;
12382 /* If __builtin_strncat_chk is used, assume strncat is available. */
12383 fn = built_in_decls[BUILT_IN_STRNCAT];
12384 if (!fn)
12385 return NULL_TREE;
12387 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12390 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12391 a normal call should be emitted rather than expanding the function
12392 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12394 static tree
12395 fold_builtin_sprintf_chk (location_t loc, tree exp,
12396 enum built_in_function fcode)
12398 tree dest, size, len, fn, fmt, flag;
12399 const char *fmt_str;
12400 int nargs = call_expr_nargs (exp);
12402 /* Verify the required arguments in the original call. */
12403 if (nargs < 4)
12404 return NULL_TREE;
12405 dest = CALL_EXPR_ARG (exp, 0);
12406 if (!validate_arg (dest, POINTER_TYPE))
12407 return NULL_TREE;
12408 flag = CALL_EXPR_ARG (exp, 1);
12409 if (!validate_arg (flag, INTEGER_TYPE))
12410 return NULL_TREE;
12411 size = CALL_EXPR_ARG (exp, 2);
12412 if (!validate_arg (size, INTEGER_TYPE))
12413 return NULL_TREE;
12414 fmt = CALL_EXPR_ARG (exp, 3);
12415 if (!validate_arg (fmt, POINTER_TYPE))
12416 return NULL_TREE;
12418 if (! host_integerp (size, 1))
12419 return NULL_TREE;
12421 len = NULL_TREE;
12423 if (!init_target_chars ())
12424 return NULL_TREE;
12426 /* Check whether the format is a literal string constant. */
12427 fmt_str = c_getstr (fmt);
12428 if (fmt_str != NULL)
12430 /* If the format doesn't contain % args or %%, we know the size. */
12431 if (strchr (fmt_str, target_percent) == 0)
12433 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12434 len = build_int_cstu (size_type_node, strlen (fmt_str));
12436 /* If the format is "%s" and first ... argument is a string literal,
12437 we know the size too. */
12438 else if (fcode == BUILT_IN_SPRINTF_CHK
12439 && strcmp (fmt_str, target_percent_s) == 0)
12441 tree arg;
12443 if (nargs == 5)
12445 arg = CALL_EXPR_ARG (exp, 4);
12446 if (validate_arg (arg, POINTER_TYPE))
12448 len = c_strlen (arg, 1);
12449 if (! len || ! host_integerp (len, 1))
12450 len = NULL_TREE;
12456 if (! integer_all_onesp (size))
12458 if (! len || ! tree_int_cst_lt (len, size))
12459 return NULL_TREE;
12462 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12463 or if format doesn't contain % chars or is "%s". */
12464 if (! integer_zerop (flag))
12466 if (fmt_str == NULL)
12467 return NULL_TREE;
12468 if (strchr (fmt_str, target_percent) != NULL
12469 && strcmp (fmt_str, target_percent_s))
12470 return NULL_TREE;
12473 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12474 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12475 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12476 if (!fn)
12477 return NULL_TREE;
12479 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12482 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12483 a normal call should be emitted rather than expanding the function
12484 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12485 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12486 passed as second argument. */
12488 tree
12489 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12490 enum built_in_function fcode)
12492 tree dest, size, len, fn, fmt, flag;
12493 const char *fmt_str;
12495 /* Verify the required arguments in the original call. */
12496 if (call_expr_nargs (exp) < 5)
12497 return NULL_TREE;
12498 dest = CALL_EXPR_ARG (exp, 0);
12499 if (!validate_arg (dest, POINTER_TYPE))
12500 return NULL_TREE;
12501 len = CALL_EXPR_ARG (exp, 1);
12502 if (!validate_arg (len, INTEGER_TYPE))
12503 return NULL_TREE;
12504 flag = CALL_EXPR_ARG (exp, 2);
12505 if (!validate_arg (flag, INTEGER_TYPE))
12506 return NULL_TREE;
12507 size = CALL_EXPR_ARG (exp, 3);
12508 if (!validate_arg (size, INTEGER_TYPE))
12509 return NULL_TREE;
12510 fmt = CALL_EXPR_ARG (exp, 4);
12511 if (!validate_arg (fmt, POINTER_TYPE))
12512 return NULL_TREE;
12514 if (! host_integerp (size, 1))
12515 return NULL_TREE;
12517 if (! integer_all_onesp (size))
12519 if (! host_integerp (len, 1))
12521 /* If LEN is not constant, try MAXLEN too.
12522 For MAXLEN only allow optimizing into non-_ocs function
12523 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12524 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12525 return NULL_TREE;
12527 else
12528 maxlen = len;
12530 if (tree_int_cst_lt (size, maxlen))
12531 return NULL_TREE;
12534 if (!init_target_chars ())
12535 return NULL_TREE;
12537 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12538 or if format doesn't contain % chars or is "%s". */
12539 if (! integer_zerop (flag))
12541 fmt_str = c_getstr (fmt);
12542 if (fmt_str == NULL)
12543 return NULL_TREE;
12544 if (strchr (fmt_str, target_percent) != NULL
12545 && strcmp (fmt_str, target_percent_s))
12546 return NULL_TREE;
12549 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12550 available. */
12551 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12552 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12553 if (!fn)
12554 return NULL_TREE;
12556 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12559 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12560 FMT and ARG are the arguments to the call; we don't fold cases with
12561 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12563 Return NULL_TREE if no simplification was possible, otherwise return the
12564 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12565 code of the function to be simplified. */
12567 static tree
12568 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12569 tree arg, bool ignore,
12570 enum built_in_function fcode)
12572 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12573 const char *fmt_str = NULL;
12575 /* If the return value is used, don't do the transformation. */
12576 if (! ignore)
12577 return NULL_TREE;
12579 /* Verify the required arguments in the original call. */
12580 if (!validate_arg (fmt, POINTER_TYPE))
12581 return NULL_TREE;
12583 /* Check whether the format is a literal string constant. */
12584 fmt_str = c_getstr (fmt);
12585 if (fmt_str == NULL)
12586 return NULL_TREE;
12588 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12590 /* If we're using an unlocked function, assume the other
12591 unlocked functions exist explicitly. */
12592 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12593 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12595 else
12597 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12598 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12601 if (!init_target_chars ())
12602 return NULL_TREE;
12604 if (strcmp (fmt_str, target_percent_s) == 0
12605 || strchr (fmt_str, target_percent) == NULL)
12607 const char *str;
12609 if (strcmp (fmt_str, target_percent_s) == 0)
12611 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12612 return NULL_TREE;
12614 if (!arg || !validate_arg (arg, POINTER_TYPE))
12615 return NULL_TREE;
12617 str = c_getstr (arg);
12618 if (str == NULL)
12619 return NULL_TREE;
12621 else
12623 /* The format specifier doesn't contain any '%' characters. */
12624 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12625 && arg)
12626 return NULL_TREE;
12627 str = fmt_str;
12630 /* If the string was "", printf does nothing. */
12631 if (str[0] == '\0')
12632 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12634 /* If the string has length of 1, call putchar. */
12635 if (str[1] == '\0')
12637 /* Given printf("c"), (where c is any one character,)
12638 convert "c"[0] to an int and pass that to the replacement
12639 function. */
12640 newarg = build_int_cst (NULL_TREE, str[0]);
12641 if (fn_putchar)
12642 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12644 else
12646 /* If the string was "string\n", call puts("string"). */
12647 size_t len = strlen (str);
12648 if ((unsigned char)str[len - 1] == target_newline)
12650 /* Create a NUL-terminated string that's one char shorter
12651 than the original, stripping off the trailing '\n'. */
12652 char *newstr = XALLOCAVEC (char, len);
12653 memcpy (newstr, str, len - 1);
12654 newstr[len - 1] = 0;
12656 newarg = build_string_literal (len, newstr);
12657 if (fn_puts)
12658 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12660 else
12661 /* We'd like to arrange to call fputs(string,stdout) here,
12662 but we need stdout and don't have a way to get it yet. */
12663 return NULL_TREE;
12667 /* The other optimizations can be done only on the non-va_list variants. */
12668 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12669 return NULL_TREE;
12671 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12672 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12674 if (!arg || !validate_arg (arg, POINTER_TYPE))
12675 return NULL_TREE;
12676 if (fn_puts)
12677 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12680 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12681 else if (strcmp (fmt_str, target_percent_c) == 0)
12683 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12684 return NULL_TREE;
12685 if (fn_putchar)
12686 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12689 if (!call)
12690 return NULL_TREE;
12692 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12695 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12696 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12697 more than 3 arguments, and ARG may be null in the 2-argument case.
12699 Return NULL_TREE if no simplification was possible, otherwise return the
12700 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12701 code of the function to be simplified. */
12703 static tree
12704 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12705 tree fmt, tree arg, bool ignore,
12706 enum built_in_function fcode)
12708 tree fn_fputc, fn_fputs, call = NULL_TREE;
12709 const char *fmt_str = NULL;
12711 /* If the return value is used, don't do the transformation. */
12712 if (! ignore)
12713 return NULL_TREE;
12715 /* Verify the required arguments in the original call. */
12716 if (!validate_arg (fp, POINTER_TYPE))
12717 return NULL_TREE;
12718 if (!validate_arg (fmt, POINTER_TYPE))
12719 return NULL_TREE;
12721 /* Check whether the format is a literal string constant. */
12722 fmt_str = c_getstr (fmt);
12723 if (fmt_str == NULL)
12724 return NULL_TREE;
12726 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12728 /* If we're using an unlocked function, assume the other
12729 unlocked functions exist explicitly. */
12730 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12731 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12733 else
12735 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12736 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12739 if (!init_target_chars ())
12740 return NULL_TREE;
12742 /* If the format doesn't contain % args or %%, use strcpy. */
12743 if (strchr (fmt_str, target_percent) == NULL)
12745 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12746 && arg)
12747 return NULL_TREE;
12749 /* If the format specifier was "", fprintf does nothing. */
12750 if (fmt_str[0] == '\0')
12752 /* If FP has side-effects, just wait until gimplification is
12753 done. */
12754 if (TREE_SIDE_EFFECTS (fp))
12755 return NULL_TREE;
12757 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12760 /* When "string" doesn't contain %, replace all cases of
12761 fprintf (fp, string) with fputs (string, fp). The fputs
12762 builtin will take care of special cases like length == 1. */
12763 if (fn_fputs)
12764 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12767 /* The other optimizations can be done only on the non-va_list variants. */
12768 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12769 return NULL_TREE;
12771 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12772 else if (strcmp (fmt_str, target_percent_s) == 0)
12774 if (!arg || !validate_arg (arg, POINTER_TYPE))
12775 return NULL_TREE;
12776 if (fn_fputs)
12777 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12780 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12781 else if (strcmp (fmt_str, target_percent_c) == 0)
12783 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12784 return NULL_TREE;
12785 if (fn_fputc)
12786 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12789 if (!call)
12790 return NULL_TREE;
12791 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12794 /* Initialize format string characters in the target charset. */
12796 static bool
12797 init_target_chars (void)
12799 static bool init;
12800 if (!init)
12802 target_newline = lang_hooks.to_target_charset ('\n');
12803 target_percent = lang_hooks.to_target_charset ('%');
12804 target_c = lang_hooks.to_target_charset ('c');
12805 target_s = lang_hooks.to_target_charset ('s');
12806 if (target_newline == 0 || target_percent == 0 || target_c == 0
12807 || target_s == 0)
12808 return false;
12810 target_percent_c[0] = target_percent;
12811 target_percent_c[1] = target_c;
12812 target_percent_c[2] = '\0';
12814 target_percent_s[0] = target_percent;
12815 target_percent_s[1] = target_s;
12816 target_percent_s[2] = '\0';
12818 target_percent_s_newline[0] = target_percent;
12819 target_percent_s_newline[1] = target_s;
12820 target_percent_s_newline[2] = target_newline;
12821 target_percent_s_newline[3] = '\0';
12823 init = true;
12825 return true;
12828 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12829 and no overflow/underflow occurred. INEXACT is true if M was not
12830 exactly calculated. TYPE is the tree type for the result. This
12831 function assumes that you cleared the MPFR flags and then
12832 calculated M to see if anything subsequently set a flag prior to
12833 entering this function. Return NULL_TREE if any checks fail. */
12835 static tree
12836 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12838 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12839 overflow/underflow occurred. If -frounding-math, proceed iff the
12840 result of calling FUNC was exact. */
12841 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12842 && (!flag_rounding_math || !inexact))
12844 REAL_VALUE_TYPE rr;
12846 real_from_mpfr (&rr, m, type, GMP_RNDN);
12847 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12848 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12849 but the mpft_t is not, then we underflowed in the
12850 conversion. */
12851 if (real_isfinite (&rr)
12852 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12854 REAL_VALUE_TYPE rmode;
12856 real_convert (&rmode, TYPE_MODE (type), &rr);
12857 /* Proceed iff the specified mode can hold the value. */
12858 if (real_identical (&rmode, &rr))
12859 return build_real (type, rmode);
12862 return NULL_TREE;
12865 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12866 number and no overflow/underflow occurred. INEXACT is true if M
12867 was not exactly calculated. TYPE is the tree type for the result.
12868 This function assumes that you cleared the MPFR flags and then
12869 calculated M to see if anything subsequently set a flag prior to
12870 entering this function. Return NULL_TREE if any checks fail, if
12871 FORCE_CONVERT is true, then bypass the checks. */
12873 static tree
12874 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12876 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12877 overflow/underflow occurred. If -frounding-math, proceed iff the
12878 result of calling FUNC was exact. */
12879 if (force_convert
12880 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12881 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12882 && (!flag_rounding_math || !inexact)))
12884 REAL_VALUE_TYPE re, im;
12886 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12887 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12888 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12889 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12890 but the mpft_t is not, then we underflowed in the
12891 conversion. */
12892 if (force_convert
12893 || (real_isfinite (&re) && real_isfinite (&im)
12894 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12895 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12897 REAL_VALUE_TYPE re_mode, im_mode;
12899 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12900 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12901 /* Proceed iff the specified mode can hold the value. */
12902 if (force_convert
12903 || (real_identical (&re_mode, &re)
12904 && real_identical (&im_mode, &im)))
12905 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12906 build_real (TREE_TYPE (type), im_mode));
12909 return NULL_TREE;
12912 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12913 FUNC on it and return the resulting value as a tree with type TYPE.
12914 If MIN and/or MAX are not NULL, then the supplied ARG must be
12915 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12916 acceptable values, otherwise they are not. The mpfr precision is
12917 set to the precision of TYPE. We assume that function FUNC returns
12918 zero if the result could be calculated exactly within the requested
12919 precision. */
12921 static tree
12922 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12923 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12924 bool inclusive)
12926 tree result = NULL_TREE;
12928 STRIP_NOPS (arg);
12930 /* To proceed, MPFR must exactly represent the target floating point
12931 format, which only happens when the target base equals two. */
12932 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12933 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12935 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12937 if (real_isfinite (ra)
12938 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12939 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12941 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12942 const int prec = fmt->p;
12943 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12944 int inexact;
12945 mpfr_t m;
12947 mpfr_init2 (m, prec);
12948 mpfr_from_real (m, ra, GMP_RNDN);
12949 mpfr_clear_flags ();
12950 inexact = func (m, m, rnd);
12951 result = do_mpfr_ckconv (m, type, inexact);
12952 mpfr_clear (m);
12956 return result;
12959 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12960 FUNC on it and return the resulting value as a tree with type TYPE.
12961 The mpfr precision is set to the precision of TYPE. We assume that
12962 function FUNC returns zero if the result could be calculated
12963 exactly within the requested precision. */
12965 static tree
12966 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12967 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12969 tree result = NULL_TREE;
12971 STRIP_NOPS (arg1);
12972 STRIP_NOPS (arg2);
12974 /* To proceed, MPFR must exactly represent the target floating point
12975 format, which only happens when the target base equals two. */
12976 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12977 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12978 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12980 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12981 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12983 if (real_isfinite (ra1) && real_isfinite (ra2))
12985 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12986 const int prec = fmt->p;
12987 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12988 int inexact;
12989 mpfr_t m1, m2;
12991 mpfr_inits2 (prec, m1, m2, NULL);
12992 mpfr_from_real (m1, ra1, GMP_RNDN);
12993 mpfr_from_real (m2, ra2, GMP_RNDN);
12994 mpfr_clear_flags ();
12995 inexact = func (m1, m1, m2, rnd);
12996 result = do_mpfr_ckconv (m1, type, inexact);
12997 mpfr_clears (m1, m2, NULL);
13001 return result;
13004 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13005 FUNC on it and return the resulting value as a tree with type TYPE.
13006 The mpfr precision is set to the precision of TYPE. We assume that
13007 function FUNC returns zero if the result could be calculated
13008 exactly within the requested precision. */
13010 static tree
13011 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13012 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13014 tree result = NULL_TREE;
13016 STRIP_NOPS (arg1);
13017 STRIP_NOPS (arg2);
13018 STRIP_NOPS (arg3);
13020 /* To proceed, MPFR must exactly represent the target floating point
13021 format, which only happens when the target base equals two. */
13022 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13023 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13024 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13025 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13027 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13028 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13029 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13031 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13033 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13034 const int prec = fmt->p;
13035 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13036 int inexact;
13037 mpfr_t m1, m2, m3;
13039 mpfr_inits2 (prec, m1, m2, m3, NULL);
13040 mpfr_from_real (m1, ra1, GMP_RNDN);
13041 mpfr_from_real (m2, ra2, GMP_RNDN);
13042 mpfr_from_real (m3, ra3, GMP_RNDN);
13043 mpfr_clear_flags ();
13044 inexact = func (m1, m1, m2, m3, rnd);
13045 result = do_mpfr_ckconv (m1, type, inexact);
13046 mpfr_clears (m1, m2, m3, NULL);
13050 return result;
13053 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13054 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13055 If ARG_SINP and ARG_COSP are NULL then the result is returned
13056 as a complex value.
13057 The type is taken from the type of ARG and is used for setting the
13058 precision of the calculation and results. */
13060 static tree
13061 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13063 tree const type = TREE_TYPE (arg);
13064 tree result = NULL_TREE;
13066 STRIP_NOPS (arg);
13068 /* To proceed, MPFR must exactly represent the target floating point
13069 format, which only happens when the target base equals two. */
13070 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13071 && TREE_CODE (arg) == REAL_CST
13072 && !TREE_OVERFLOW (arg))
13074 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13076 if (real_isfinite (ra))
13078 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13079 const int prec = fmt->p;
13080 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13081 tree result_s, result_c;
13082 int inexact;
13083 mpfr_t m, ms, mc;
13085 mpfr_inits2 (prec, m, ms, mc, NULL);
13086 mpfr_from_real (m, ra, GMP_RNDN);
13087 mpfr_clear_flags ();
13088 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13089 result_s = do_mpfr_ckconv (ms, type, inexact);
13090 result_c = do_mpfr_ckconv (mc, type, inexact);
13091 mpfr_clears (m, ms, mc, NULL);
13092 if (result_s && result_c)
13094 /* If we are to return in a complex value do so. */
13095 if (!arg_sinp && !arg_cosp)
13096 return build_complex (build_complex_type (type),
13097 result_c, result_s);
13099 /* Dereference the sin/cos pointer arguments. */
13100 arg_sinp = build_fold_indirect_ref (arg_sinp);
13101 arg_cosp = build_fold_indirect_ref (arg_cosp);
13102 /* Proceed if valid pointer type were passed in. */
13103 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13104 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13106 /* Set the values. */
13107 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13108 result_s);
13109 TREE_SIDE_EFFECTS (result_s) = 1;
13110 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13111 result_c);
13112 TREE_SIDE_EFFECTS (result_c) = 1;
13113 /* Combine the assignments into a compound expr. */
13114 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13115 result_s, result_c));
13120 return result;
13123 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13124 two-argument mpfr order N Bessel function FUNC on them and return
13125 the resulting value as a tree with type TYPE. The mpfr precision
13126 is set to the precision of TYPE. We assume that function FUNC
13127 returns zero if the result could be calculated exactly within the
13128 requested precision. */
13129 static tree
13130 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13131 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13132 const REAL_VALUE_TYPE *min, bool inclusive)
13134 tree result = NULL_TREE;
13136 STRIP_NOPS (arg1);
13137 STRIP_NOPS (arg2);
13139 /* To proceed, MPFR must exactly represent the target floating point
13140 format, which only happens when the target base equals two. */
13141 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13142 && host_integerp (arg1, 0)
13143 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13145 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13146 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13148 if (n == (long)n
13149 && real_isfinite (ra)
13150 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13152 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13153 const int prec = fmt->p;
13154 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13155 int inexact;
13156 mpfr_t m;
13158 mpfr_init2 (m, prec);
13159 mpfr_from_real (m, ra, GMP_RNDN);
13160 mpfr_clear_flags ();
13161 inexact = func (m, n, m, rnd);
13162 result = do_mpfr_ckconv (m, type, inexact);
13163 mpfr_clear (m);
13167 return result;
13170 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13171 the pointer *(ARG_QUO) and return the result. The type is taken
13172 from the type of ARG0 and is used for setting the precision of the
13173 calculation and results. */
13175 static tree
13176 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13178 tree const type = TREE_TYPE (arg0);
13179 tree result = NULL_TREE;
13181 STRIP_NOPS (arg0);
13182 STRIP_NOPS (arg1);
13184 /* To proceed, MPFR must exactly represent the target floating point
13185 format, which only happens when the target base equals two. */
13186 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13187 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13188 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13190 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13191 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13193 if (real_isfinite (ra0) && real_isfinite (ra1))
13195 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13196 const int prec = fmt->p;
13197 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13198 tree result_rem;
13199 long integer_quo;
13200 mpfr_t m0, m1;
13202 mpfr_inits2 (prec, m0, m1, NULL);
13203 mpfr_from_real (m0, ra0, GMP_RNDN);
13204 mpfr_from_real (m1, ra1, GMP_RNDN);
13205 mpfr_clear_flags ();
13206 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13207 /* Remquo is independent of the rounding mode, so pass
13208 inexact=0 to do_mpfr_ckconv(). */
13209 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13210 mpfr_clears (m0, m1, NULL);
13211 if (result_rem)
13213 /* MPFR calculates quo in the host's long so it may
13214 return more bits in quo than the target int can hold
13215 if sizeof(host long) > sizeof(target int). This can
13216 happen even for native compilers in LP64 mode. In
13217 these cases, modulo the quo value with the largest
13218 number that the target int can hold while leaving one
13219 bit for the sign. */
13220 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13221 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13223 /* Dereference the quo pointer argument. */
13224 arg_quo = build_fold_indirect_ref (arg_quo);
13225 /* Proceed iff a valid pointer type was passed in. */
13226 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13228 /* Set the value. */
13229 tree result_quo = fold_build2 (MODIFY_EXPR,
13230 TREE_TYPE (arg_quo), arg_quo,
13231 build_int_cst (NULL, integer_quo));
13232 TREE_SIDE_EFFECTS (result_quo) = 1;
13233 /* Combine the quo assignment with the rem. */
13234 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13235 result_quo, result_rem));
13240 return result;
13243 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13244 resulting value as a tree with type TYPE. The mpfr precision is
13245 set to the precision of TYPE. We assume that this mpfr function
13246 returns zero if the result could be calculated exactly within the
13247 requested precision. In addition, the integer pointer represented
13248 by ARG_SG will be dereferenced and set to the appropriate signgam
13249 (-1,1) value. */
13251 static tree
13252 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13254 tree result = NULL_TREE;
13256 STRIP_NOPS (arg);
13258 /* To proceed, MPFR must exactly represent the target floating point
13259 format, which only happens when the target base equals two. Also
13260 verify ARG is a constant and that ARG_SG is an int pointer. */
13261 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13262 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13263 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13264 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13266 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13268 /* In addition to NaN and Inf, the argument cannot be zero or a
13269 negative integer. */
13270 if (real_isfinite (ra)
13271 && ra->cl != rvc_zero
13272 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13274 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13275 const int prec = fmt->p;
13276 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13277 int inexact, sg;
13278 mpfr_t m;
13279 tree result_lg;
13281 mpfr_init2 (m, prec);
13282 mpfr_from_real (m, ra, GMP_RNDN);
13283 mpfr_clear_flags ();
13284 inexact = mpfr_lgamma (m, &sg, m, rnd);
13285 result_lg = do_mpfr_ckconv (m, type, inexact);
13286 mpfr_clear (m);
13287 if (result_lg)
13289 tree result_sg;
13291 /* Dereference the arg_sg pointer argument. */
13292 arg_sg = build_fold_indirect_ref (arg_sg);
13293 /* Assign the signgam value into *arg_sg. */
13294 result_sg = fold_build2 (MODIFY_EXPR,
13295 TREE_TYPE (arg_sg), arg_sg,
13296 build_int_cst (NULL, sg));
13297 TREE_SIDE_EFFECTS (result_sg) = 1;
13298 /* Combine the signgam assignment with the lgamma result. */
13299 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13300 result_sg, result_lg));
13305 return result;
13308 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13309 function FUNC on it and return the resulting value as a tree with
13310 type TYPE. The mpfr precision is set to the precision of TYPE. We
13311 assume that function FUNC returns zero if the result could be
13312 calculated exactly within the requested precision. */
13314 static tree
13315 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13317 tree result = NULL_TREE;
13319 STRIP_NOPS (arg);
13321 /* To proceed, MPFR must exactly represent the target floating point
13322 format, which only happens when the target base equals two. */
13323 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13324 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13325 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13327 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13328 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13330 if (real_isfinite (re) && real_isfinite (im))
13332 const struct real_format *const fmt =
13333 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13334 const int prec = fmt->p;
13335 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13336 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13337 int inexact;
13338 mpc_t m;
13340 mpc_init2 (m, prec);
13341 mpfr_from_real (mpc_realref(m), re, rnd);
13342 mpfr_from_real (mpc_imagref(m), im, rnd);
13343 mpfr_clear_flags ();
13344 inexact = func (m, m, crnd);
13345 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13346 mpc_clear (m);
13350 return result;
13353 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13354 mpc function FUNC on it and return the resulting value as a tree
13355 with type TYPE. The mpfr precision is set to the precision of
13356 TYPE. We assume that function FUNC returns zero if the result
13357 could be calculated exactly within the requested precision. If
13358 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13359 in the arguments and/or results. */
13361 tree
13362 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13363 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13365 tree result = NULL_TREE;
13367 STRIP_NOPS (arg0);
13368 STRIP_NOPS (arg1);
13370 /* To proceed, MPFR must exactly represent the target floating point
13371 format, which only happens when the target base equals two. */
13372 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13373 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13374 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13375 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13376 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13378 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13379 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13380 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13381 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13383 if (do_nonfinite
13384 || (real_isfinite (re0) && real_isfinite (im0)
13385 && real_isfinite (re1) && real_isfinite (im1)))
13387 const struct real_format *const fmt =
13388 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13389 const int prec = fmt->p;
13390 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13391 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13392 int inexact;
13393 mpc_t m0, m1;
13395 mpc_init2 (m0, prec);
13396 mpc_init2 (m1, prec);
13397 mpfr_from_real (mpc_realref(m0), re0, rnd);
13398 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13399 mpfr_from_real (mpc_realref(m1), re1, rnd);
13400 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13401 mpfr_clear_flags ();
13402 inexact = func (m0, m0, m1, crnd);
13403 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13404 mpc_clear (m0);
13405 mpc_clear (m1);
13409 return result;
13412 /* FIXME tuples.
13413 The functions below provide an alternate interface for folding
13414 builtin function calls presented as GIMPLE_CALL statements rather
13415 than as CALL_EXPRs. The folded result is still expressed as a
13416 tree. There is too much code duplication in the handling of
13417 varargs functions, and a more intrusive re-factoring would permit
13418 better sharing of code between the tree and statement-based
13419 versions of these functions. */
13421 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13422 along with N new arguments specified as the "..." parameters. SKIP
13423 is the number of arguments in STMT to be omitted. This function is used
13424 to do varargs-to-varargs transformations. */
13426 static tree
13427 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13429 int oldnargs = gimple_call_num_args (stmt);
13430 int nargs = oldnargs - skip + n;
13431 tree fntype = TREE_TYPE (fndecl);
13432 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13433 tree *buffer;
13434 int i, j;
13435 va_list ap;
13436 location_t loc = gimple_location (stmt);
13438 buffer = XALLOCAVEC (tree, nargs);
13439 va_start (ap, n);
13440 for (i = 0; i < n; i++)
13441 buffer[i] = va_arg (ap, tree);
13442 va_end (ap);
13443 for (j = skip; j < oldnargs; j++, i++)
13444 buffer[i] = gimple_call_arg (stmt, j);
13446 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13449 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13450 a normal call should be emitted rather than expanding the function
13451 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13453 static tree
13454 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13456 tree dest, size, len, fn, fmt, flag;
13457 const char *fmt_str;
13458 int nargs = gimple_call_num_args (stmt);
13460 /* Verify the required arguments in the original call. */
13461 if (nargs < 4)
13462 return NULL_TREE;
13463 dest = gimple_call_arg (stmt, 0);
13464 if (!validate_arg (dest, POINTER_TYPE))
13465 return NULL_TREE;
13466 flag = gimple_call_arg (stmt, 1);
13467 if (!validate_arg (flag, INTEGER_TYPE))
13468 return NULL_TREE;
13469 size = gimple_call_arg (stmt, 2);
13470 if (!validate_arg (size, INTEGER_TYPE))
13471 return NULL_TREE;
13472 fmt = gimple_call_arg (stmt, 3);
13473 if (!validate_arg (fmt, POINTER_TYPE))
13474 return NULL_TREE;
13476 if (! host_integerp (size, 1))
13477 return NULL_TREE;
13479 len = NULL_TREE;
13481 if (!init_target_chars ())
13482 return NULL_TREE;
13484 /* Check whether the format is a literal string constant. */
13485 fmt_str = c_getstr (fmt);
13486 if (fmt_str != NULL)
13488 /* If the format doesn't contain % args or %%, we know the size. */
13489 if (strchr (fmt_str, target_percent) == 0)
13491 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13492 len = build_int_cstu (size_type_node, strlen (fmt_str));
13494 /* If the format is "%s" and first ... argument is a string literal,
13495 we know the size too. */
13496 else if (fcode == BUILT_IN_SPRINTF_CHK
13497 && strcmp (fmt_str, target_percent_s) == 0)
13499 tree arg;
13501 if (nargs == 5)
13503 arg = gimple_call_arg (stmt, 4);
13504 if (validate_arg (arg, POINTER_TYPE))
13506 len = c_strlen (arg, 1);
13507 if (! len || ! host_integerp (len, 1))
13508 len = NULL_TREE;
13514 if (! integer_all_onesp (size))
13516 if (! len || ! tree_int_cst_lt (len, size))
13517 return NULL_TREE;
13520 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13521 or if format doesn't contain % chars or is "%s". */
13522 if (! integer_zerop (flag))
13524 if (fmt_str == NULL)
13525 return NULL_TREE;
13526 if (strchr (fmt_str, target_percent) != NULL
13527 && strcmp (fmt_str, target_percent_s))
13528 return NULL_TREE;
13531 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13532 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13533 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13534 if (!fn)
13535 return NULL_TREE;
13537 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13540 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13541 a normal call should be emitted rather than expanding the function
13542 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13543 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13544 passed as second argument. */
13546 tree
13547 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13548 enum built_in_function fcode)
13550 tree dest, size, len, fn, fmt, flag;
13551 const char *fmt_str;
13553 /* Verify the required arguments in the original call. */
13554 if (gimple_call_num_args (stmt) < 5)
13555 return NULL_TREE;
13556 dest = gimple_call_arg (stmt, 0);
13557 if (!validate_arg (dest, POINTER_TYPE))
13558 return NULL_TREE;
13559 len = gimple_call_arg (stmt, 1);
13560 if (!validate_arg (len, INTEGER_TYPE))
13561 return NULL_TREE;
13562 flag = gimple_call_arg (stmt, 2);
13563 if (!validate_arg (flag, INTEGER_TYPE))
13564 return NULL_TREE;
13565 size = gimple_call_arg (stmt, 3);
13566 if (!validate_arg (size, INTEGER_TYPE))
13567 return NULL_TREE;
13568 fmt = gimple_call_arg (stmt, 4);
13569 if (!validate_arg (fmt, POINTER_TYPE))
13570 return NULL_TREE;
13572 if (! host_integerp (size, 1))
13573 return NULL_TREE;
13575 if (! integer_all_onesp (size))
13577 if (! host_integerp (len, 1))
13579 /* If LEN is not constant, try MAXLEN too.
13580 For MAXLEN only allow optimizing into non-_ocs function
13581 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13582 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13583 return NULL_TREE;
13585 else
13586 maxlen = len;
13588 if (tree_int_cst_lt (size, maxlen))
13589 return NULL_TREE;
13592 if (!init_target_chars ())
13593 return NULL_TREE;
13595 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13596 or if format doesn't contain % chars or is "%s". */
13597 if (! integer_zerop (flag))
13599 fmt_str = c_getstr (fmt);
13600 if (fmt_str == NULL)
13601 return NULL_TREE;
13602 if (strchr (fmt_str, target_percent) != NULL
13603 && strcmp (fmt_str, target_percent_s))
13604 return NULL_TREE;
13607 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13608 available. */
13609 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13610 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13611 if (!fn)
13612 return NULL_TREE;
13614 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13617 /* Builtins with folding operations that operate on "..." arguments
13618 need special handling; we need to store the arguments in a convenient
13619 data structure before attempting any folding. Fortunately there are
13620 only a few builtins that fall into this category. FNDECL is the
13621 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13622 result of the function call is ignored. */
13624 static tree
13625 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13626 bool ignore ATTRIBUTE_UNUSED)
13628 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13629 tree ret = NULL_TREE;
13631 switch (fcode)
13633 case BUILT_IN_SPRINTF_CHK:
13634 case BUILT_IN_VSPRINTF_CHK:
13635 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13636 break;
13638 case BUILT_IN_SNPRINTF_CHK:
13639 case BUILT_IN_VSNPRINTF_CHK:
13640 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13642 default:
13643 break;
13645 if (ret)
13647 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13648 TREE_NO_WARNING (ret) = 1;
13649 return ret;
13651 return NULL_TREE;
13654 /* A wrapper function for builtin folding that prevents warnings for
13655 "statement without effect" and the like, caused by removing the
13656 call node earlier than the warning is generated. */
13658 tree
13659 fold_call_stmt (gimple stmt, bool ignore)
13661 tree ret = NULL_TREE;
13662 tree fndecl = gimple_call_fndecl (stmt);
13663 location_t loc = gimple_location (stmt);
13664 if (fndecl
13665 && TREE_CODE (fndecl) == FUNCTION_DECL
13666 && DECL_BUILT_IN (fndecl)
13667 && !gimple_call_va_arg_pack_p (stmt))
13669 int nargs = gimple_call_num_args (stmt);
13671 if (avoid_folding_inline_builtin (fndecl))
13672 return NULL_TREE;
13673 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13675 return targetm.fold_builtin (fndecl, nargs,
13676 (nargs > 0
13677 ? gimple_call_arg_ptr (stmt, 0)
13678 : &error_mark_node), ignore);
13680 else
13682 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13684 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13685 int i;
13686 for (i = 0; i < nargs; i++)
13687 args[i] = gimple_call_arg (stmt, i);
13688 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13690 if (!ret)
13691 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13692 if (ret)
13694 /* Propagate location information from original call to
13695 expansion of builtin. Otherwise things like
13696 maybe_emit_chk_warning, that operate on the expansion
13697 of a builtin, will use the wrong location information. */
13698 if (gimple_has_location (stmt))
13700 tree realret = ret;
13701 if (TREE_CODE (ret) == NOP_EXPR)
13702 realret = TREE_OPERAND (ret, 0);
13703 if (CAN_HAVE_LOCATION_P (realret)
13704 && !EXPR_HAS_LOCATION (realret))
13705 SET_EXPR_LOCATION (realret, loc);
13706 return realret;
13708 return ret;
13712 return NULL_TREE;
13715 /* Look up the function in built_in_decls that corresponds to DECL
13716 and set ASMSPEC as its user assembler name. DECL must be a
13717 function decl that declares a builtin. */
13719 void
13720 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13722 tree builtin;
13723 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13724 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13725 && asmspec != 0);
13727 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13728 set_user_assembler_name (builtin, asmspec);
13729 switch (DECL_FUNCTION_CODE (decl))
13731 case BUILT_IN_MEMCPY:
13732 init_block_move_fn (asmspec);
13733 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13734 break;
13735 case BUILT_IN_MEMSET:
13736 init_block_clear_fn (asmspec);
13737 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13738 break;
13739 case BUILT_IN_MEMMOVE:
13740 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13741 break;
13742 case BUILT_IN_MEMCMP:
13743 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13744 break;
13745 case BUILT_IN_ABORT:
13746 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13747 break;
13748 case BUILT_IN_FFS:
13749 if (INT_TYPE_SIZE < BITS_PER_WORD)
13751 set_user_assembler_libfunc ("ffs", asmspec);
13752 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13753 MODE_INT, 0), "ffs");
13755 break;
13756 default:
13757 break;
13761 /* Return true if DECL is a builtin that expands to a constant or similarly
13762 simple code. */
13763 bool
13764 is_simple_builtin (tree decl)
13766 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13767 switch (DECL_FUNCTION_CODE (decl))
13769 /* Builtins that expand to constants. */
13770 case BUILT_IN_CONSTANT_P:
13771 case BUILT_IN_EXPECT:
13772 case BUILT_IN_OBJECT_SIZE:
13773 case BUILT_IN_UNREACHABLE:
13774 /* Simple register moves or loads from stack. */
13775 case BUILT_IN_RETURN_ADDRESS:
13776 case BUILT_IN_EXTRACT_RETURN_ADDR:
13777 case BUILT_IN_FROB_RETURN_ADDR:
13778 case BUILT_IN_RETURN:
13779 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13780 case BUILT_IN_FRAME_ADDRESS:
13781 case BUILT_IN_VA_END:
13782 case BUILT_IN_STACK_SAVE:
13783 case BUILT_IN_STACK_RESTORE:
13784 /* Exception state returns or moves registers around. */
13785 case BUILT_IN_EH_FILTER:
13786 case BUILT_IN_EH_POINTER:
13787 case BUILT_IN_EH_COPY_VALUES:
13788 return true;
13790 default:
13791 return false;
13794 return false;
13797 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13798 most probably expanded inline into reasonably simple code. This is a
13799 superset of is_simple_builtin. */
13800 bool
13801 is_inexpensive_builtin (tree decl)
13803 if (!decl)
13804 return false;
13805 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13806 return true;
13807 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13808 switch (DECL_FUNCTION_CODE (decl))
13810 case BUILT_IN_ABS:
13811 case BUILT_IN_ALLOCA:
13812 case BUILT_IN_BSWAP32:
13813 case BUILT_IN_BSWAP64:
13814 case BUILT_IN_CLZ:
13815 case BUILT_IN_CLZIMAX:
13816 case BUILT_IN_CLZL:
13817 case BUILT_IN_CLZLL:
13818 case BUILT_IN_CTZ:
13819 case BUILT_IN_CTZIMAX:
13820 case BUILT_IN_CTZL:
13821 case BUILT_IN_CTZLL:
13822 case BUILT_IN_FFS:
13823 case BUILT_IN_FFSIMAX:
13824 case BUILT_IN_FFSL:
13825 case BUILT_IN_FFSLL:
13826 case BUILT_IN_IMAXABS:
13827 case BUILT_IN_FINITE:
13828 case BUILT_IN_FINITEF:
13829 case BUILT_IN_FINITEL:
13830 case BUILT_IN_FINITED32:
13831 case BUILT_IN_FINITED64:
13832 case BUILT_IN_FINITED128:
13833 case BUILT_IN_FPCLASSIFY:
13834 case BUILT_IN_ISFINITE:
13835 case BUILT_IN_ISINF_SIGN:
13836 case BUILT_IN_ISINF:
13837 case BUILT_IN_ISINFF:
13838 case BUILT_IN_ISINFL:
13839 case BUILT_IN_ISINFD32:
13840 case BUILT_IN_ISINFD64:
13841 case BUILT_IN_ISINFD128:
13842 case BUILT_IN_ISNAN:
13843 case BUILT_IN_ISNANF:
13844 case BUILT_IN_ISNANL:
13845 case BUILT_IN_ISNAND32:
13846 case BUILT_IN_ISNAND64:
13847 case BUILT_IN_ISNAND128:
13848 case BUILT_IN_ISNORMAL:
13849 case BUILT_IN_ISGREATER:
13850 case BUILT_IN_ISGREATEREQUAL:
13851 case BUILT_IN_ISLESS:
13852 case BUILT_IN_ISLESSEQUAL:
13853 case BUILT_IN_ISLESSGREATER:
13854 case BUILT_IN_ISUNORDERED:
13855 case BUILT_IN_VA_ARG_PACK:
13856 case BUILT_IN_VA_ARG_PACK_LEN:
13857 case BUILT_IN_VA_COPY:
13858 case BUILT_IN_TRAP:
13859 case BUILT_IN_SAVEREGS:
13860 case BUILT_IN_POPCOUNTL:
13861 case BUILT_IN_POPCOUNTLL:
13862 case BUILT_IN_POPCOUNTIMAX:
13863 case BUILT_IN_POPCOUNT:
13864 case BUILT_IN_PARITYL:
13865 case BUILT_IN_PARITYLL:
13866 case BUILT_IN_PARITYIMAX:
13867 case BUILT_IN_PARITY:
13868 case BUILT_IN_LABS:
13869 case BUILT_IN_LLABS:
13870 case BUILT_IN_PREFETCH:
13871 return true;
13873 default:
13874 return is_simple_builtin (decl);
13877 return false;