2009-06-08 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / builtins.c
bloba555e4fa14bec2a0bc8802d92e52497a307b0638
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list (tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (tree);
158 static tree fold_builtin_inf (tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (tree, tree);
168 static tree fold_builtin_cbrt (tree, tree);
169 static tree fold_builtin_pow (tree, tree, tree, tree);
170 static tree fold_builtin_powi (tree, tree, tree, tree);
171 static tree fold_builtin_cos (tree, tree, tree);
172 static tree fold_builtin_cosh (tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (tree, tree);
175 static tree fold_builtin_floor (tree, tree);
176 static tree fold_builtin_ceil (tree, tree);
177 static tree fold_builtin_round (tree, tree);
178 static tree fold_builtin_int_roundingfn (tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (tree, tree, tree);
182 static tree fold_builtin_memchr (tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (tree, tree, tree);
184 static tree fold_builtin_strcmp (tree, tree);
185 static tree fold_builtin_strncmp (tree, tree, tree);
186 static tree fold_builtin_signbit (tree, tree);
187 static tree fold_builtin_copysign (tree, tree, tree, tree);
188 static tree fold_builtin_isascii (tree);
189 static tree fold_builtin_toascii (tree);
190 static tree fold_builtin_isdigit (tree);
191 static tree fold_builtin_fabs (tree, tree);
192 static tree fold_builtin_abs (tree, tree);
193 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
194 enum tree_code);
195 static tree fold_builtin_n (tree, tree *, int, bool);
196 static tree fold_builtin_0 (tree, bool);
197 static tree fold_builtin_1 (tree, tree, bool);
198 static tree fold_builtin_2 (tree, tree, tree, bool);
199 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (tree, tree, bool);
203 static tree fold_builtin_strpbrk (tree, tree, tree);
204 static tree fold_builtin_strstr (tree, tree, tree);
205 static tree fold_builtin_strrchr (tree, tree, tree);
206 static tree fold_builtin_strcat (tree, tree);
207 static tree fold_builtin_strncat (tree, tree, tree);
208 static tree fold_builtin_strspn (tree, tree);
209 static tree fold_builtin_strcspn (tree, tree);
210 static tree fold_builtin_sprintf (tree, tree, tree, int);
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
222 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
247 bool
248 is_builtin_name (const char *name)
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
261 static bool
262 called_as_built_in (tree node)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
266 will have. */
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
278 unsigned int inner;
280 inner = max_align;
281 if (handled_component_p (exp))
283 HOST_WIDE_INT bitsize, bitpos;
284 tree offset;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
290 if (bitpos)
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
292 while (offset)
294 tree next_offset;
296 if (TREE_CODE (offset) == PLUS_EXPR)
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
301 else
302 next_offset = NULL;
303 if (host_integerp (offset, 1))
305 /* Any overflow in calculating offset_bits won't change
306 the alignment. */
307 unsigned offset_bits
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
310 if (offset_bits)
311 inner = MIN (inner, (offset_bits & -offset_bits));
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
316 /* Any overflow in calculating offset_factor won't change
317 the alignment. */
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
320 * BITS_PER_UNIT);
322 if (offset_factor)
323 inner = MIN (inner, (offset_factor & -offset_factor));
325 else
327 inner = MIN (inner, BITS_PER_UNIT);
328 break;
330 offset = next_offset;
333 if (DECL_P (exp))
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
338 #endif
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
342 else
343 align = MIN (align, inner);
344 return MIN (align, max_align);
347 /* Return the alignment in bits of EXP, a pointer valued expression.
348 But don't return more than MAX_ALIGN no matter what.
349 The alignment returned is, by default, the alignment of the thing that
350 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
352 Otherwise, look at the expression to see if we can do better, i.e., if the
353 expression is actually pointing at an object whose alignment is tighter. */
356 get_pointer_alignment (tree exp, unsigned int max_align)
358 unsigned int align, inner;
360 /* We rely on TER to compute accurate alignment information. */
361 if (!(optimize && flag_tree_ter))
362 return 0;
364 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
365 return 0;
367 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
368 align = MIN (align, max_align);
370 while (1)
372 switch (TREE_CODE (exp))
374 CASE_CONVERT:
375 exp = TREE_OPERAND (exp, 0);
376 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
377 return align;
379 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
380 align = MIN (inner, max_align);
381 break;
383 case POINTER_PLUS_EXPR:
384 /* If sum of pointer + int, restrict our maximum alignment to that
385 imposed by the integer. If not, we can't do any better than
386 ALIGN. */
387 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
388 return align;
390 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
391 & (max_align / BITS_PER_UNIT - 1))
392 != 0)
393 max_align >>= 1;
395 exp = TREE_OPERAND (exp, 0);
396 break;
398 case ADDR_EXPR:
399 /* See what we are pointing at and look at its alignment. */
400 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
402 default:
403 return align;
408 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
409 way, because it could contain a zero byte in the middle.
410 TREE_STRING_LENGTH is the size of the character array, not the string.
412 ONLY_VALUE should be nonzero if the result is not going to be emitted
413 into the instruction stream and zero if it is going to be expanded.
414 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
415 is returned, otherwise NULL, since
416 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
417 evaluate the side-effects.
419 The value returned is of type `ssizetype'.
421 Unfortunately, string_constant can't access the values of const char
422 arrays with initializers, so neither can we do so here. */
424 tree
425 c_strlen (tree src, int only_value)
427 tree offset_node;
428 HOST_WIDE_INT offset;
429 int max;
430 const char *ptr;
432 STRIP_NOPS (src);
433 if (TREE_CODE (src) == COND_EXPR
434 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
436 tree len1, len2;
438 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
439 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
440 if (tree_int_cst_equal (len1, len2))
441 return len1;
444 if (TREE_CODE (src) == COMPOUND_EXPR
445 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
446 return c_strlen (TREE_OPERAND (src, 1), only_value);
448 src = string_constant (src, &offset_node);
449 if (src == 0)
450 return NULL_TREE;
452 max = TREE_STRING_LENGTH (src) - 1;
453 ptr = TREE_STRING_POINTER (src);
455 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
457 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
458 compute the offset to the following null if we don't know where to
459 start searching for it. */
460 int i;
462 for (i = 0; i < max; i++)
463 if (ptr[i] == 0)
464 return NULL_TREE;
466 /* We don't know the starting offset, but we do know that the string
467 has no internal zero bytes. We can assume that the offset falls
468 within the bounds of the string; otherwise, the programmer deserves
469 what he gets. Subtract the offset from the length of the string,
470 and return that. This would perhaps not be valid if we were dealing
471 with named arrays in addition to literal string constants. */
473 return size_diffop (size_int (max), offset_node);
476 /* We have a known offset into the string. Start searching there for
477 a null character if we can represent it as a single HOST_WIDE_INT. */
478 if (offset_node == 0)
479 offset = 0;
480 else if (! host_integerp (offset_node, 0))
481 offset = -1;
482 else
483 offset = tree_low_cst (offset_node, 0);
485 /* If the offset is known to be out of bounds, warn, and call strlen at
486 runtime. */
487 if (offset < 0 || offset > max)
489 /* Suppress multiple warnings for propagated constant strings. */
490 if (! TREE_NO_WARNING (src))
492 warning (0, "offset outside bounds of constant string");
493 TREE_NO_WARNING (src) = 1;
495 return NULL_TREE;
498 /* Use strlen to search for the first zero byte. Since any strings
499 constructed with build_string will have nulls appended, we win even
500 if we get handed something like (char[4])"abcd".
502 Since OFFSET is our starting index into the string, no further
503 calculation is needed. */
504 return ssize_int (strlen (ptr + offset));
507 /* Return a char pointer for a C string if it is a string constant
508 or sum of string constant and integer constant. */
510 static const char *
511 c_getstr (tree src)
513 tree offset_node;
515 src = string_constant (src, &offset_node);
516 if (src == 0)
517 return 0;
519 if (offset_node == 0)
520 return TREE_STRING_POINTER (src);
521 else if (!host_integerp (offset_node, 1)
522 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
523 return 0;
525 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
528 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
529 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
531 static rtx
532 c_readstr (const char *str, enum machine_mode mode)
534 HOST_WIDE_INT c[2];
535 HOST_WIDE_INT ch;
536 unsigned int i, j;
538 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
540 c[0] = 0;
541 c[1] = 0;
542 ch = 1;
543 for (i = 0; i < GET_MODE_SIZE (mode); i++)
545 j = i;
546 if (WORDS_BIG_ENDIAN)
547 j = GET_MODE_SIZE (mode) - i - 1;
548 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
549 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
550 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
551 j *= BITS_PER_UNIT;
552 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
554 if (ch)
555 ch = (unsigned char) str[i];
556 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
558 return immed_double_const (c[0], c[1], mode);
561 /* Cast a target constant CST to target CHAR and if that value fits into
562 host char type, return zero and put that value into variable pointed to by
563 P. */
565 static int
566 target_char_cast (tree cst, char *p)
568 unsigned HOST_WIDE_INT val, hostval;
570 if (!host_integerp (cst, 1)
571 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
572 return 1;
574 val = tree_low_cst (cst, 1);
575 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
576 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
578 hostval = val;
579 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
580 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
582 if (val != hostval)
583 return 1;
585 *p = hostval;
586 return 0;
589 /* Similar to save_expr, but assumes that arbitrary code is not executed
590 in between the multiple evaluations. In particular, we assume that a
591 non-addressable local variable will not be modified. */
593 static tree
594 builtin_save_expr (tree exp)
596 if (TREE_ADDRESSABLE (exp) == 0
597 && (TREE_CODE (exp) == PARM_DECL
598 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
599 return exp;
601 return save_expr (exp);
604 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
605 times to get the address of either a higher stack frame, or a return
606 address located within it (depending on FNDECL_CODE). */
608 static rtx
609 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
611 int i;
613 #ifdef INITIAL_FRAME_ADDRESS_RTX
614 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
615 #else
616 rtx tem;
618 /* For a zero count with __builtin_return_address, we don't care what
619 frame address we return, because target-specific definitions will
620 override us. Therefore frame pointer elimination is OK, and using
621 the soft frame pointer is OK.
623 For a nonzero count, or a zero count with __builtin_frame_address,
624 we require a stable offset from the current frame pointer to the
625 previous one, so we must use the hard frame pointer, and
626 we must disable frame pointer elimination. */
627 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
628 tem = frame_pointer_rtx;
629 else
631 tem = hard_frame_pointer_rtx;
633 /* Tell reload not to eliminate the frame pointer. */
634 crtl->accesses_prior_frames = 1;
636 #endif
638 /* Some machines need special handling before we can access
639 arbitrary frames. For example, on the SPARC, we must first flush
640 all register windows to the stack. */
641 #ifdef SETUP_FRAME_ADDRESSES
642 if (count > 0)
643 SETUP_FRAME_ADDRESSES ();
644 #endif
646 /* On the SPARC, the return address is not in the frame, it is in a
647 register. There is no way to access it off of the current frame
648 pointer, but it can be accessed off the previous frame pointer by
649 reading the value from the register window save area. */
650 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
651 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
652 count--;
653 #endif
655 /* Scan back COUNT frames to the specified frame. */
656 for (i = 0; i < count; i++)
658 /* Assume the dynamic chain pointer is in the word that the
659 frame address points to, unless otherwise specified. */
660 #ifdef DYNAMIC_CHAIN_ADDRESS
661 tem = DYNAMIC_CHAIN_ADDRESS (tem);
662 #endif
663 tem = memory_address (Pmode, tem);
664 tem = gen_frame_mem (Pmode, tem);
665 tem = copy_to_reg (tem);
668 /* For __builtin_frame_address, return what we've got. But, on
669 the SPARC for example, we may have to add a bias. */
670 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
671 #ifdef FRAME_ADDR_RTX
672 return FRAME_ADDR_RTX (tem);
673 #else
674 return tem;
675 #endif
677 /* For __builtin_return_address, get the return address from that frame. */
678 #ifdef RETURN_ADDR_RTX
679 tem = RETURN_ADDR_RTX (count, tem);
680 #else
681 tem = memory_address (Pmode,
682 plus_constant (tem, GET_MODE_SIZE (Pmode)));
683 tem = gen_frame_mem (Pmode, tem);
684 #endif
685 return tem;
688 /* Alias set used for setjmp buffer. */
689 static alias_set_type setjmp_alias_set = -1;
691 /* Construct the leading half of a __builtin_setjmp call. Control will
692 return to RECEIVER_LABEL. This is also called directly by the SJLJ
693 exception handling code. */
695 void
696 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
698 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
699 rtx stack_save;
700 rtx mem;
702 if (setjmp_alias_set == -1)
703 setjmp_alias_set = new_alias_set ();
705 buf_addr = convert_memory_address (Pmode, buf_addr);
707 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
709 /* We store the frame pointer and the address of receiver_label in
710 the buffer and use the rest of it for the stack save area, which
711 is machine-dependent. */
713 mem = gen_rtx_MEM (Pmode, buf_addr);
714 set_mem_alias_set (mem, setjmp_alias_set);
715 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
717 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
718 set_mem_alias_set (mem, setjmp_alias_set);
720 emit_move_insn (validize_mem (mem),
721 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
723 stack_save = gen_rtx_MEM (sa_mode,
724 plus_constant (buf_addr,
725 2 * GET_MODE_SIZE (Pmode)));
726 set_mem_alias_set (stack_save, setjmp_alias_set);
727 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
729 /* If there is further processing to do, do it. */
730 #ifdef HAVE_builtin_setjmp_setup
731 if (HAVE_builtin_setjmp_setup)
732 emit_insn (gen_builtin_setjmp_setup (buf_addr));
733 #endif
735 /* Tell optimize_save_area_alloca that extra work is going to
736 need to go on during alloca. */
737 cfun->calls_setjmp = 1;
739 /* We have a nonlocal label. */
740 cfun->has_nonlocal_label = 1;
743 /* Construct the trailing part of a __builtin_setjmp call. This is
744 also called directly by the SJLJ exception handling code. */
746 void
747 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
749 /* Clobber the FP when we get here, so we have to make sure it's
750 marked as used by this function. */
751 emit_use (hard_frame_pointer_rtx);
753 /* Mark the static chain as clobbered here so life information
754 doesn't get messed up for it. */
755 emit_clobber (static_chain_rtx);
757 /* Now put in the code to restore the frame pointer, and argument
758 pointer, if needed. */
759 #ifdef HAVE_nonlocal_goto
760 if (! HAVE_nonlocal_goto)
761 #endif
763 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
764 /* This might change the hard frame pointer in ways that aren't
765 apparent to early optimization passes, so force a clobber. */
766 emit_clobber (hard_frame_pointer_rtx);
769 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
770 if (fixed_regs[ARG_POINTER_REGNUM])
772 #ifdef ELIMINABLE_REGS
773 size_t i;
774 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
776 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
777 if (elim_regs[i].from == ARG_POINTER_REGNUM
778 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
779 break;
781 if (i == ARRAY_SIZE (elim_regs))
782 #endif
784 /* Now restore our arg pointer from the address at which it
785 was saved in our stack frame. */
786 emit_move_insn (crtl->args.internal_arg_pointer,
787 copy_to_reg (get_arg_pointer_save_area ()));
790 #endif
792 #ifdef HAVE_builtin_setjmp_receiver
793 if (HAVE_builtin_setjmp_receiver)
794 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
795 else
796 #endif
797 #ifdef HAVE_nonlocal_goto_receiver
798 if (HAVE_nonlocal_goto_receiver)
799 emit_insn (gen_nonlocal_goto_receiver ());
800 else
801 #endif
802 { /* Nothing */ }
804 /* We must not allow the code we just generated to be reordered by
805 scheduling. Specifically, the update of the frame pointer must
806 happen immediately, not later. */
807 emit_insn (gen_blockage ());
810 /* __builtin_longjmp is passed a pointer to an array of five words (not
811 all will be used on all machines). It operates similarly to the C
812 library function of the same name, but is more efficient. Much of
813 the code below is copied from the handling of non-local gotos. */
815 static void
816 expand_builtin_longjmp (rtx buf_addr, rtx value)
818 rtx fp, lab, stack, insn, last;
819 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
821 /* DRAP is needed for stack realign if longjmp is expanded to current
822 function */
823 if (SUPPORTS_STACK_ALIGNMENT)
824 crtl->need_drap = true;
826 if (setjmp_alias_set == -1)
827 setjmp_alias_set = new_alias_set ();
829 buf_addr = convert_memory_address (Pmode, buf_addr);
831 buf_addr = force_reg (Pmode, buf_addr);
833 /* We used to store value in static_chain_rtx, but that fails if pointers
834 are smaller than integers. We instead require that the user must pass
835 a second argument of 1, because that is what builtin_setjmp will
836 return. This also makes EH slightly more efficient, since we are no
837 longer copying around a value that we don't care about. */
838 gcc_assert (value == const1_rtx);
840 last = get_last_insn ();
841 #ifdef HAVE_builtin_longjmp
842 if (HAVE_builtin_longjmp)
843 emit_insn (gen_builtin_longjmp (buf_addr));
844 else
845 #endif
847 fp = gen_rtx_MEM (Pmode, buf_addr);
848 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
849 GET_MODE_SIZE (Pmode)));
851 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
852 2 * GET_MODE_SIZE (Pmode)));
853 set_mem_alias_set (fp, setjmp_alias_set);
854 set_mem_alias_set (lab, setjmp_alias_set);
855 set_mem_alias_set (stack, setjmp_alias_set);
857 /* Pick up FP, label, and SP from the block and jump. This code is
858 from expand_goto in stmt.c; see there for detailed comments. */
859 #ifdef HAVE_nonlocal_goto
860 if (HAVE_nonlocal_goto)
861 /* We have to pass a value to the nonlocal_goto pattern that will
862 get copied into the static_chain pointer, but it does not matter
863 what that value is, because builtin_setjmp does not use it. */
864 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
865 else
866 #endif
868 lab = copy_to_reg (lab);
870 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
871 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
873 emit_move_insn (hard_frame_pointer_rtx, fp);
874 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
876 emit_use (hard_frame_pointer_rtx);
877 emit_use (stack_pointer_rtx);
878 emit_indirect_jump (lab);
882 /* Search backwards and mark the jump insn as a non-local goto.
883 Note that this precludes the use of __builtin_longjmp to a
884 __builtin_setjmp target in the same function. However, we've
885 already cautioned the user that these functions are for
886 internal exception handling use only. */
887 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
889 gcc_assert (insn != last);
891 if (JUMP_P (insn))
893 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
894 break;
896 else if (CALL_P (insn))
897 break;
901 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
902 and the address of the save area. */
904 static rtx
905 expand_builtin_nonlocal_goto (tree exp)
907 tree t_label, t_save_area;
908 rtx r_label, r_save_area, r_fp, r_sp, insn;
910 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
911 return NULL_RTX;
913 t_label = CALL_EXPR_ARG (exp, 0);
914 t_save_area = CALL_EXPR_ARG (exp, 1);
916 r_label = expand_normal (t_label);
917 r_label = convert_memory_address (Pmode, r_label);
918 r_save_area = expand_normal (t_save_area);
919 r_save_area = convert_memory_address (Pmode, r_save_area);
920 /* Copy the address of the save location to a register just in case it was based
921 on the frame pointer. */
922 r_save_area = copy_to_reg (r_save_area);
923 r_fp = gen_rtx_MEM (Pmode, r_save_area);
924 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
925 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
927 crtl->has_nonlocal_goto = 1;
929 #ifdef HAVE_nonlocal_goto
930 /* ??? We no longer need to pass the static chain value, afaik. */
931 if (HAVE_nonlocal_goto)
932 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
933 else
934 #endif
936 r_label = copy_to_reg (r_label);
938 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
939 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
941 /* Restore frame pointer for containing function.
942 This sets the actual hard register used for the frame pointer
943 to the location of the function's incoming static chain info.
944 The non-local goto handler will then adjust it to contain the
945 proper value and reload the argument pointer, if needed. */
946 emit_move_insn (hard_frame_pointer_rtx, r_fp);
947 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
949 /* USE of hard_frame_pointer_rtx added for consistency;
950 not clear if really needed. */
951 emit_use (hard_frame_pointer_rtx);
952 emit_use (stack_pointer_rtx);
954 /* If the architecture is using a GP register, we must
955 conservatively assume that the target function makes use of it.
956 The prologue of functions with nonlocal gotos must therefore
957 initialize the GP register to the appropriate value, and we
958 must then make sure that this value is live at the point
959 of the jump. (Note that this doesn't necessarily apply
960 to targets with a nonlocal_goto pattern; they are free
961 to implement it in their own way. Note also that this is
962 a no-op if the GP register is a global invariant.) */
963 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
964 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
965 emit_use (pic_offset_table_rtx);
967 emit_indirect_jump (r_label);
970 /* Search backwards to the jump insn and mark it as a
971 non-local goto. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
974 if (JUMP_P (insn))
976 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
977 break;
979 else if (CALL_P (insn))
980 break;
983 return const0_rtx;
986 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
987 (not all will be used on all machines) that was passed to __builtin_setjmp.
988 It updates the stack pointer in that block to correspond to the current
989 stack pointer. */
991 static void
992 expand_builtin_update_setjmp_buf (rtx buf_addr)
994 enum machine_mode sa_mode = Pmode;
995 rtx stack_save;
998 #ifdef HAVE_save_stack_nonlocal
999 if (HAVE_save_stack_nonlocal)
1000 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1001 #endif
1002 #ifdef STACK_SAVEAREA_MODE
1003 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004 #endif
1006 stack_save
1007 = gen_rtx_MEM (sa_mode,
1008 memory_address
1009 (sa_mode,
1010 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1012 #ifdef HAVE_setjmp
1013 if (HAVE_setjmp)
1014 emit_insn (gen_setjmp ());
1015 #endif
1017 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1020 /* Expand a call to __builtin_prefetch. For a target that does not support
1021 data prefetch, evaluate the memory address argument in case it has side
1022 effects. */
1024 static void
1025 expand_builtin_prefetch (tree exp)
1027 tree arg0, arg1, arg2;
1028 int nargs;
1029 rtx op0, op1, op2;
1031 if (!validate_arglist (exp, POINTER_TYPE, 0))
1032 return;
1034 arg0 = CALL_EXPR_ARG (exp, 0);
1036 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1037 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1038 locality). */
1039 nargs = call_expr_nargs (exp);
1040 if (nargs > 1)
1041 arg1 = CALL_EXPR_ARG (exp, 1);
1042 else
1043 arg1 = integer_zero_node;
1044 if (nargs > 2)
1045 arg2 = CALL_EXPR_ARG (exp, 2);
1046 else
1047 arg2 = build_int_cst (NULL_TREE, 3);
1049 /* Argument 0 is an address. */
1050 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1052 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1053 if (TREE_CODE (arg1) != INTEGER_CST)
1055 error ("second argument to %<__builtin_prefetch%> must be a constant");
1056 arg1 = integer_zero_node;
1058 op1 = expand_normal (arg1);
1059 /* Argument 1 must be either zero or one. */
1060 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1062 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1063 " using zero");
1064 op1 = const0_rtx;
1067 /* Argument 2 (locality) must be a compile-time constant int. */
1068 if (TREE_CODE (arg2) != INTEGER_CST)
1070 error ("third argument to %<__builtin_prefetch%> must be a constant");
1071 arg2 = integer_zero_node;
1073 op2 = expand_normal (arg2);
1074 /* Argument 2 must be 0, 1, 2, or 3. */
1075 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1077 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1078 op2 = const0_rtx;
1081 #ifdef HAVE_prefetch
1082 if (HAVE_prefetch)
1084 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1085 (op0,
1086 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1087 || (GET_MODE (op0) != Pmode))
1089 op0 = convert_memory_address (Pmode, op0);
1090 op0 = force_reg (Pmode, op0);
1092 emit_insn (gen_prefetch (op0, op1, op2));
1094 #endif
1096 /* Don't do anything with direct references to volatile memory, but
1097 generate code to handle other side effects. */
1098 if (!MEM_P (op0) && side_effects_p (op0))
1099 emit_insn (op0);
1102 /* Get a MEM rtx for expression EXP which is the address of an operand
1103 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1104 the maximum length of the block of memory that might be accessed or
1105 NULL if unknown. */
1107 static rtx
1108 get_memory_rtx (tree exp, tree len)
1110 tree orig_exp = exp;
1111 rtx addr, mem;
1112 HOST_WIDE_INT off;
1114 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1115 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1116 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1117 exp = TREE_OPERAND (exp, 0);
1119 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1120 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1122 /* Get an expression we can use to find the attributes to assign to MEM.
1123 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1124 we can. First remove any nops. */
1125 while (CONVERT_EXPR_P (exp)
1126 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1127 exp = TREE_OPERAND (exp, 0);
1129 off = 0;
1130 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1131 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1132 && host_integerp (TREE_OPERAND (exp, 1), 0)
1133 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1134 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1135 else if (TREE_CODE (exp) == ADDR_EXPR)
1136 exp = TREE_OPERAND (exp, 0);
1137 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1138 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1139 else
1140 exp = NULL;
1142 /* Honor attributes derived from exp, except for the alias set
1143 (as builtin stringops may alias with anything) and the size
1144 (as stringops may access multiple array elements). */
1145 if (exp)
1147 set_mem_attributes (mem, exp, 0);
1149 if (off)
1150 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1152 /* Allow the string and memory builtins to overflow from one
1153 field into another, see http://gcc.gnu.org/PR23561.
1154 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1155 memory accessed by the string or memory builtin will fit
1156 within the field. */
1157 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1159 tree mem_expr = MEM_EXPR (mem);
1160 HOST_WIDE_INT offset = -1, length = -1;
1161 tree inner = exp;
1163 while (TREE_CODE (inner) == ARRAY_REF
1164 || CONVERT_EXPR_P (inner)
1165 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1166 || TREE_CODE (inner) == SAVE_EXPR)
1167 inner = TREE_OPERAND (inner, 0);
1169 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1171 if (MEM_OFFSET (mem)
1172 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1173 offset = INTVAL (MEM_OFFSET (mem));
1175 if (offset >= 0 && len && host_integerp (len, 0))
1176 length = tree_low_cst (len, 0);
1178 while (TREE_CODE (inner) == COMPONENT_REF)
1180 tree field = TREE_OPERAND (inner, 1);
1181 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1182 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1184 /* Bitfields are generally not byte-addressable. */
1185 gcc_assert (!DECL_BIT_FIELD (field)
1186 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1187 % BITS_PER_UNIT) == 0
1188 && host_integerp (DECL_SIZE (field), 0)
1189 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1190 % BITS_PER_UNIT) == 0));
1192 /* If we can prove that the memory starting at XEXP (mem, 0) and
1193 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1194 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1195 fields without DECL_SIZE_UNIT like flexible array members. */
1196 if (length >= 0
1197 && DECL_SIZE_UNIT (field)
1198 && host_integerp (DECL_SIZE_UNIT (field), 0))
1200 HOST_WIDE_INT size
1201 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1202 if (offset <= size
1203 && length <= size
1204 && offset + length <= size)
1205 break;
1208 if (offset >= 0
1209 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1210 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1211 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1212 / BITS_PER_UNIT;
1213 else
1215 offset = -1;
1216 length = -1;
1219 mem_expr = TREE_OPERAND (mem_expr, 0);
1220 inner = TREE_OPERAND (inner, 0);
1223 if (mem_expr == NULL)
1224 offset = -1;
1225 if (mem_expr != MEM_EXPR (mem))
1227 set_mem_expr (mem, mem_expr);
1228 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1231 set_mem_alias_set (mem, 0);
1232 set_mem_size (mem, NULL_RTX);
1235 return mem;
1238 /* Built-in functions to perform an untyped call and return. */
1240 /* For each register that may be used for calling a function, this
1241 gives a mode used to copy the register's value. VOIDmode indicates
1242 the register is not used for calling a function. If the machine
1243 has register windows, this gives only the outbound registers.
1244 INCOMING_REGNO gives the corresponding inbound register. */
1245 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1247 /* For each register that may be used for returning values, this gives
1248 a mode used to copy the register's value. VOIDmode indicates the
1249 register is not used for returning values. If the machine has
1250 register windows, this gives only the outbound registers.
1251 INCOMING_REGNO gives the corresponding inbound register. */
1252 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1254 /* For each register that may be used for calling a function, this
1255 gives the offset of that register into the block returned by
1256 __builtin_apply_args. 0 indicates that the register is not
1257 used for calling a function. */
1258 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1260 /* Return the size required for the block returned by __builtin_apply_args,
1261 and initialize apply_args_mode. */
1263 static int
1264 apply_args_size (void)
1266 static int size = -1;
1267 int align;
1268 unsigned int regno;
1269 enum machine_mode mode;
1271 /* The values computed by this function never change. */
1272 if (size < 0)
1274 /* The first value is the incoming arg-pointer. */
1275 size = GET_MODE_SIZE (Pmode);
1277 /* The second value is the structure value address unless this is
1278 passed as an "invisible" first argument. */
1279 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1280 size += GET_MODE_SIZE (Pmode);
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 if (FUNCTION_ARG_REGNO_P (regno))
1285 mode = reg_raw_mode[regno];
1287 gcc_assert (mode != VOIDmode);
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 apply_args_reg_offset[regno] = size;
1293 size += GET_MODE_SIZE (mode);
1294 apply_args_mode[regno] = mode;
1296 else
1298 apply_args_mode[regno] = VOIDmode;
1299 apply_args_reg_offset[regno] = 0;
1302 return size;
1305 /* Return the size required for the block returned by __builtin_apply,
1306 and initialize apply_result_mode. */
1308 static int
1309 apply_result_size (void)
1311 static int size = -1;
1312 int align, regno;
1313 enum machine_mode mode;
1315 /* The values computed by this function never change. */
1316 if (size < 0)
1318 size = 0;
1320 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1321 if (FUNCTION_VALUE_REGNO_P (regno))
1323 mode = reg_raw_mode[regno];
1325 gcc_assert (mode != VOIDmode);
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1330 size += GET_MODE_SIZE (mode);
1331 apply_result_mode[regno] = mode;
1333 else
1334 apply_result_mode[regno] = VOIDmode;
1336 /* Allow targets that use untyped_call and untyped_return to override
1337 the size so that machine-specific information can be stored here. */
1338 #ifdef APPLY_RESULT_SIZE
1339 size = APPLY_RESULT_SIZE;
1340 #endif
1342 return size;
1345 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1346 /* Create a vector describing the result block RESULT. If SAVEP is true,
1347 the result block is used to save the values; otherwise it is used to
1348 restore the values. */
1350 static rtx
1351 result_vector (int savep, rtx result)
1353 int regno, size, align, nelts;
1354 enum machine_mode mode;
1355 rtx reg, mem;
1356 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1358 size = nelts = 0;
1359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1360 if ((mode = apply_result_mode[regno]) != VOIDmode)
1362 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1363 if (size % align != 0)
1364 size = CEIL (size, align) * align;
1365 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1366 mem = adjust_address (result, mode, size);
1367 savevec[nelts++] = (savep
1368 ? gen_rtx_SET (VOIDmode, mem, reg)
1369 : gen_rtx_SET (VOIDmode, reg, mem));
1370 size += GET_MODE_SIZE (mode);
1372 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1374 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1376 /* Save the state required to perform an untyped call with the same
1377 arguments as were passed to the current function. */
1379 static rtx
1380 expand_builtin_apply_args_1 (void)
1382 rtx registers, tem;
1383 int size, align, regno;
1384 enum machine_mode mode;
1385 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1387 /* Create a block where the arg-pointer, structure value address,
1388 and argument registers can be saved. */
1389 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1391 /* Walk past the arg-pointer and structure value address. */
1392 size = GET_MODE_SIZE (Pmode);
1393 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1394 size += GET_MODE_SIZE (Pmode);
1396 /* Save each register used in calling a function to the block. */
1397 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1398 if ((mode = apply_args_mode[regno]) != VOIDmode)
1400 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1401 if (size % align != 0)
1402 size = CEIL (size, align) * align;
1404 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1406 emit_move_insn (adjust_address (registers, mode, size), tem);
1407 size += GET_MODE_SIZE (mode);
1410 /* Save the arg pointer to the block. */
1411 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1412 #ifdef STACK_GROWS_DOWNWARD
1413 /* We need the pointer as the caller actually passed them to us, not
1414 as we might have pretended they were passed. Make sure it's a valid
1415 operand, as emit_move_insn isn't expected to handle a PLUS. */
1417 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1418 NULL_RTX);
1419 #endif
1420 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1422 size = GET_MODE_SIZE (Pmode);
1424 /* Save the structure value address unless this is passed as an
1425 "invisible" first argument. */
1426 if (struct_incoming_value)
1428 emit_move_insn (adjust_address (registers, Pmode, size),
1429 copy_to_reg (struct_incoming_value));
1430 size += GET_MODE_SIZE (Pmode);
1433 /* Return the address of the block. */
1434 return copy_addr_to_reg (XEXP (registers, 0));
1437 /* __builtin_apply_args returns block of memory allocated on
1438 the stack into which is stored the arg pointer, structure
1439 value address, static chain, and all the registers that might
1440 possibly be used in performing a function call. The code is
1441 moved to the start of the function so the incoming values are
1442 saved. */
1444 static rtx
1445 expand_builtin_apply_args (void)
1447 /* Don't do __builtin_apply_args more than once in a function.
1448 Save the result of the first call and reuse it. */
1449 if (apply_args_value != 0)
1450 return apply_args_value;
1452 /* When this function is called, it means that registers must be
1453 saved on entry to this function. So we migrate the
1454 call to the first insn of this function. */
1455 rtx temp;
1456 rtx seq;
1458 start_sequence ();
1459 temp = expand_builtin_apply_args_1 ();
1460 seq = get_insns ();
1461 end_sequence ();
1463 apply_args_value = temp;
1465 /* Put the insns after the NOTE that starts the function.
1466 If this is inside a start_sequence, make the outer-level insn
1467 chain current, so the code is placed at the start of the
1468 function. If internal_arg_pointer is a non-virtual pseudo,
1469 it needs to be placed after the function that initializes
1470 that pseudo. */
1471 push_topmost_sequence ();
1472 if (REG_P (crtl->args.internal_arg_pointer)
1473 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1474 emit_insn_before (seq, parm_birth_insn);
1475 else
1476 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1477 pop_topmost_sequence ();
1478 return temp;
1482 /* Perform an untyped call and save the state required to perform an
1483 untyped return of whatever value was returned by the given function. */
1485 static rtx
1486 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1488 int size, align, regno;
1489 enum machine_mode mode;
1490 rtx incoming_args, result, reg, dest, src, call_insn;
1491 rtx old_stack_level = 0;
1492 rtx call_fusage = 0;
1493 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1495 arguments = convert_memory_address (Pmode, arguments);
1497 /* Create a block where the return registers can be saved. */
1498 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1500 /* Fetch the arg pointer from the ARGUMENTS block. */
1501 incoming_args = gen_reg_rtx (Pmode);
1502 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1503 #ifndef STACK_GROWS_DOWNWARD
1504 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1505 incoming_args, 0, OPTAB_LIB_WIDEN);
1506 #endif
1508 /* Push a new argument block and copy the arguments. Do not allow
1509 the (potential) memcpy call below to interfere with our stack
1510 manipulations. */
1511 do_pending_stack_adjust ();
1512 NO_DEFER_POP;
1514 /* Save the stack with nonlocal if available. */
1515 #ifdef HAVE_save_stack_nonlocal
1516 if (HAVE_save_stack_nonlocal)
1517 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1518 else
1519 #endif
1520 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1522 /* Allocate a block of memory onto the stack and copy the memory
1523 arguments to the outgoing arguments address. */
1524 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1526 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1527 may have already set current_function_calls_alloca to true.
1528 current_function_calls_alloca won't be set if argsize is zero,
1529 so we have to guarantee need_drap is true here. */
1530 if (SUPPORTS_STACK_ALIGNMENT)
1531 crtl->need_drap = true;
1533 dest = virtual_outgoing_args_rtx;
1534 #ifndef STACK_GROWS_DOWNWARD
1535 if (GET_CODE (argsize) == CONST_INT)
1536 dest = plus_constant (dest, -INTVAL (argsize));
1537 else
1538 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1539 #endif
1540 dest = gen_rtx_MEM (BLKmode, dest);
1541 set_mem_align (dest, PARM_BOUNDARY);
1542 src = gen_rtx_MEM (BLKmode, incoming_args);
1543 set_mem_align (src, PARM_BOUNDARY);
1544 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1546 /* Refer to the argument block. */
1547 apply_args_size ();
1548 arguments = gen_rtx_MEM (BLKmode, arguments);
1549 set_mem_align (arguments, PARM_BOUNDARY);
1551 /* Walk past the arg-pointer and structure value address. */
1552 size = GET_MODE_SIZE (Pmode);
1553 if (struct_value)
1554 size += GET_MODE_SIZE (Pmode);
1556 /* Restore each of the registers previously saved. Make USE insns
1557 for each of these registers for use in making the call. */
1558 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1559 if ((mode = apply_args_mode[regno]) != VOIDmode)
1561 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1562 if (size % align != 0)
1563 size = CEIL (size, align) * align;
1564 reg = gen_rtx_REG (mode, regno);
1565 emit_move_insn (reg, adjust_address (arguments, mode, size));
1566 use_reg (&call_fusage, reg);
1567 size += GET_MODE_SIZE (mode);
1570 /* Restore the structure value address unless this is passed as an
1571 "invisible" first argument. */
1572 size = GET_MODE_SIZE (Pmode);
1573 if (struct_value)
1575 rtx value = gen_reg_rtx (Pmode);
1576 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1577 emit_move_insn (struct_value, value);
1578 if (REG_P (struct_value))
1579 use_reg (&call_fusage, struct_value);
1580 size += GET_MODE_SIZE (Pmode);
1583 /* All arguments and registers used for the call are set up by now! */
1584 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1586 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1587 and we don't want to load it into a register as an optimization,
1588 because prepare_call_address already did it if it should be done. */
1589 if (GET_CODE (function) != SYMBOL_REF)
1590 function = memory_address (FUNCTION_MODE, function);
1592 /* Generate the actual call instruction and save the return value. */
1593 #ifdef HAVE_untyped_call
1594 if (HAVE_untyped_call)
1595 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1596 result, result_vector (1, result)));
1597 else
1598 #endif
1599 #ifdef HAVE_call_value
1600 if (HAVE_call_value)
1602 rtx valreg = 0;
1604 /* Locate the unique return register. It is not possible to
1605 express a call that sets more than one return register using
1606 call_value; use untyped_call for that. In fact, untyped_call
1607 only needs to save the return registers in the given block. */
1608 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1609 if ((mode = apply_result_mode[regno]) != VOIDmode)
1611 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1613 valreg = gen_rtx_REG (mode, regno);
1616 emit_call_insn (GEN_CALL_VALUE (valreg,
1617 gen_rtx_MEM (FUNCTION_MODE, function),
1618 const0_rtx, NULL_RTX, const0_rtx));
1620 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1622 else
1623 #endif
1624 gcc_unreachable ();
1626 /* Find the CALL insn we just emitted, and attach the register usage
1627 information. */
1628 call_insn = last_call_insn ();
1629 add_function_usage_to (call_insn, call_fusage);
1631 /* Restore the stack. */
1632 #ifdef HAVE_save_stack_nonlocal
1633 if (HAVE_save_stack_nonlocal)
1634 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1635 else
1636 #endif
1637 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1639 OK_DEFER_POP;
1641 /* Return the address of the result block. */
1642 result = copy_addr_to_reg (XEXP (result, 0));
1643 return convert_memory_address (ptr_mode, result);
1646 /* Perform an untyped return. */
1648 static void
1649 expand_builtin_return (rtx result)
1651 int size, align, regno;
1652 enum machine_mode mode;
1653 rtx reg;
1654 rtx call_fusage = 0;
1656 result = convert_memory_address (Pmode, result);
1658 apply_result_size ();
1659 result = gen_rtx_MEM (BLKmode, result);
1661 #ifdef HAVE_untyped_return
1662 if (HAVE_untyped_return)
1664 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1665 emit_barrier ();
1666 return;
1668 #endif
1670 /* Restore the return value and note that each value is used. */
1671 size = 0;
1672 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1673 if ((mode = apply_result_mode[regno]) != VOIDmode)
1675 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1676 if (size % align != 0)
1677 size = CEIL (size, align) * align;
1678 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1679 emit_move_insn (reg, adjust_address (result, mode, size));
1681 push_to_sequence (call_fusage);
1682 emit_use (reg);
1683 call_fusage = get_insns ();
1684 end_sequence ();
1685 size += GET_MODE_SIZE (mode);
1688 /* Put the USE insns before the return. */
1689 emit_insn (call_fusage);
1691 /* Return whatever values was restored by jumping directly to the end
1692 of the function. */
1693 expand_naked_return ();
1696 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1698 static enum type_class
1699 type_to_class (tree type)
1701 switch (TREE_CODE (type))
1703 case VOID_TYPE: return void_type_class;
1704 case INTEGER_TYPE: return integer_type_class;
1705 case ENUMERAL_TYPE: return enumeral_type_class;
1706 case BOOLEAN_TYPE: return boolean_type_class;
1707 case POINTER_TYPE: return pointer_type_class;
1708 case REFERENCE_TYPE: return reference_type_class;
1709 case OFFSET_TYPE: return offset_type_class;
1710 case REAL_TYPE: return real_type_class;
1711 case COMPLEX_TYPE: return complex_type_class;
1712 case FUNCTION_TYPE: return function_type_class;
1713 case METHOD_TYPE: return method_type_class;
1714 case RECORD_TYPE: return record_type_class;
1715 case UNION_TYPE:
1716 case QUAL_UNION_TYPE: return union_type_class;
1717 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1718 ? string_type_class : array_type_class);
1719 case LANG_TYPE: return lang_type_class;
1720 default: return no_type_class;
1724 /* Expand a call EXP to __builtin_classify_type. */
1726 static rtx
1727 expand_builtin_classify_type (tree exp)
1729 if (call_expr_nargs (exp))
1730 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1731 return GEN_INT (no_type_class);
1734 /* This helper macro, meant to be used in mathfn_built_in below,
1735 determines which among a set of three builtin math functions is
1736 appropriate for a given type mode. The `F' and `L' cases are
1737 automatically generated from the `double' case. */
1738 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1739 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1740 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1741 fcodel = BUILT_IN_MATHFN##L ; break;
1742 /* Similar to above, but appends _R after any F/L suffix. */
1743 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1744 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1745 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1746 fcodel = BUILT_IN_MATHFN##L_R ; break;
1748 /* Return mathematic function equivalent to FN but operating directly
1749 on TYPE, if available. If IMPLICIT is true find the function in
1750 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1751 can't do the conversion, return zero. */
1753 static tree
1754 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1756 tree const *const fn_arr
1757 = implicit ? implicit_built_in_decls : built_in_decls;
1758 enum built_in_function fcode, fcodef, fcodel;
1760 switch (fn)
1762 CASE_MATHFN (BUILT_IN_ACOS)
1763 CASE_MATHFN (BUILT_IN_ACOSH)
1764 CASE_MATHFN (BUILT_IN_ASIN)
1765 CASE_MATHFN (BUILT_IN_ASINH)
1766 CASE_MATHFN (BUILT_IN_ATAN)
1767 CASE_MATHFN (BUILT_IN_ATAN2)
1768 CASE_MATHFN (BUILT_IN_ATANH)
1769 CASE_MATHFN (BUILT_IN_CBRT)
1770 CASE_MATHFN (BUILT_IN_CEIL)
1771 CASE_MATHFN (BUILT_IN_CEXPI)
1772 CASE_MATHFN (BUILT_IN_COPYSIGN)
1773 CASE_MATHFN (BUILT_IN_COS)
1774 CASE_MATHFN (BUILT_IN_COSH)
1775 CASE_MATHFN (BUILT_IN_DREM)
1776 CASE_MATHFN (BUILT_IN_ERF)
1777 CASE_MATHFN (BUILT_IN_ERFC)
1778 CASE_MATHFN (BUILT_IN_EXP)
1779 CASE_MATHFN (BUILT_IN_EXP10)
1780 CASE_MATHFN (BUILT_IN_EXP2)
1781 CASE_MATHFN (BUILT_IN_EXPM1)
1782 CASE_MATHFN (BUILT_IN_FABS)
1783 CASE_MATHFN (BUILT_IN_FDIM)
1784 CASE_MATHFN (BUILT_IN_FLOOR)
1785 CASE_MATHFN (BUILT_IN_FMA)
1786 CASE_MATHFN (BUILT_IN_FMAX)
1787 CASE_MATHFN (BUILT_IN_FMIN)
1788 CASE_MATHFN (BUILT_IN_FMOD)
1789 CASE_MATHFN (BUILT_IN_FREXP)
1790 CASE_MATHFN (BUILT_IN_GAMMA)
1791 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1792 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1793 CASE_MATHFN (BUILT_IN_HYPOT)
1794 CASE_MATHFN (BUILT_IN_ILOGB)
1795 CASE_MATHFN (BUILT_IN_INF)
1796 CASE_MATHFN (BUILT_IN_ISINF)
1797 CASE_MATHFN (BUILT_IN_J0)
1798 CASE_MATHFN (BUILT_IN_J1)
1799 CASE_MATHFN (BUILT_IN_JN)
1800 CASE_MATHFN (BUILT_IN_LCEIL)
1801 CASE_MATHFN (BUILT_IN_LDEXP)
1802 CASE_MATHFN (BUILT_IN_LFLOOR)
1803 CASE_MATHFN (BUILT_IN_LGAMMA)
1804 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1805 CASE_MATHFN (BUILT_IN_LLCEIL)
1806 CASE_MATHFN (BUILT_IN_LLFLOOR)
1807 CASE_MATHFN (BUILT_IN_LLRINT)
1808 CASE_MATHFN (BUILT_IN_LLROUND)
1809 CASE_MATHFN (BUILT_IN_LOG)
1810 CASE_MATHFN (BUILT_IN_LOG10)
1811 CASE_MATHFN (BUILT_IN_LOG1P)
1812 CASE_MATHFN (BUILT_IN_LOG2)
1813 CASE_MATHFN (BUILT_IN_LOGB)
1814 CASE_MATHFN (BUILT_IN_LRINT)
1815 CASE_MATHFN (BUILT_IN_LROUND)
1816 CASE_MATHFN (BUILT_IN_MODF)
1817 CASE_MATHFN (BUILT_IN_NAN)
1818 CASE_MATHFN (BUILT_IN_NANS)
1819 CASE_MATHFN (BUILT_IN_NEARBYINT)
1820 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1821 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1822 CASE_MATHFN (BUILT_IN_POW)
1823 CASE_MATHFN (BUILT_IN_POWI)
1824 CASE_MATHFN (BUILT_IN_POW10)
1825 CASE_MATHFN (BUILT_IN_REMAINDER)
1826 CASE_MATHFN (BUILT_IN_REMQUO)
1827 CASE_MATHFN (BUILT_IN_RINT)
1828 CASE_MATHFN (BUILT_IN_ROUND)
1829 CASE_MATHFN (BUILT_IN_SCALB)
1830 CASE_MATHFN (BUILT_IN_SCALBLN)
1831 CASE_MATHFN (BUILT_IN_SCALBN)
1832 CASE_MATHFN (BUILT_IN_SIGNBIT)
1833 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1834 CASE_MATHFN (BUILT_IN_SIN)
1835 CASE_MATHFN (BUILT_IN_SINCOS)
1836 CASE_MATHFN (BUILT_IN_SINH)
1837 CASE_MATHFN (BUILT_IN_SQRT)
1838 CASE_MATHFN (BUILT_IN_TAN)
1839 CASE_MATHFN (BUILT_IN_TANH)
1840 CASE_MATHFN (BUILT_IN_TGAMMA)
1841 CASE_MATHFN (BUILT_IN_TRUNC)
1842 CASE_MATHFN (BUILT_IN_Y0)
1843 CASE_MATHFN (BUILT_IN_Y1)
1844 CASE_MATHFN (BUILT_IN_YN)
1846 default:
1847 return NULL_TREE;
1850 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1851 return fn_arr[fcode];
1852 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1853 return fn_arr[fcodef];
1854 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1855 return fn_arr[fcodel];
1856 else
1857 return NULL_TREE;
1860 /* Like mathfn_built_in_1(), but always use the implicit array. */
1862 tree
1863 mathfn_built_in (tree type, enum built_in_function fn)
1865 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1868 /* If errno must be maintained, expand the RTL to check if the result,
1869 TARGET, of a built-in function call, EXP, is NaN, and if so set
1870 errno to EDOM. */
1872 static void
1873 expand_errno_check (tree exp, rtx target)
1875 rtx lab = gen_label_rtx ();
1877 /* Test the result; if it is NaN, set errno=EDOM because
1878 the argument was not in the domain. */
1879 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1880 0, lab);
1882 #ifdef TARGET_EDOM
1883 /* If this built-in doesn't throw an exception, set errno directly. */
1884 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1886 #ifdef GEN_ERRNO_RTX
1887 rtx errno_rtx = GEN_ERRNO_RTX;
1888 #else
1889 rtx errno_rtx
1890 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1891 #endif
1892 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1893 emit_label (lab);
1894 return;
1896 #endif
1898 /* Make sure the library call isn't expanded as a tail call. */
1899 CALL_EXPR_TAILCALL (exp) = 0;
1901 /* We can't set errno=EDOM directly; let the library call do it.
1902 Pop the arguments right away in case the call gets deleted. */
1903 NO_DEFER_POP;
1904 expand_call (exp, target, 0);
1905 OK_DEFER_POP;
1906 emit_label (lab);
1909 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1910 Return NULL_RTX if a normal call should be emitted rather than expanding
1911 the function in-line. EXP is the expression that is a call to the builtin
1912 function; if convenient, the result should be placed in TARGET.
1913 SUBTARGET may be used as the target for computing one of EXP's operands. */
1915 static rtx
1916 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1918 optab builtin_optab;
1919 rtx op0, insns, before_call;
1920 tree fndecl = get_callee_fndecl (exp);
1921 enum machine_mode mode;
1922 bool errno_set = false;
1923 tree arg;
1925 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1926 return NULL_RTX;
1928 arg = CALL_EXPR_ARG (exp, 0);
1930 switch (DECL_FUNCTION_CODE (fndecl))
1932 CASE_FLT_FN (BUILT_IN_SQRT):
1933 errno_set = ! tree_expr_nonnegative_p (arg);
1934 builtin_optab = sqrt_optab;
1935 break;
1936 CASE_FLT_FN (BUILT_IN_EXP):
1937 errno_set = true; builtin_optab = exp_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXP10):
1939 CASE_FLT_FN (BUILT_IN_POW10):
1940 errno_set = true; builtin_optab = exp10_optab; break;
1941 CASE_FLT_FN (BUILT_IN_EXP2):
1942 errno_set = true; builtin_optab = exp2_optab; break;
1943 CASE_FLT_FN (BUILT_IN_EXPM1):
1944 errno_set = true; builtin_optab = expm1_optab; break;
1945 CASE_FLT_FN (BUILT_IN_LOGB):
1946 errno_set = true; builtin_optab = logb_optab; break;
1947 CASE_FLT_FN (BUILT_IN_LOG):
1948 errno_set = true; builtin_optab = log_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOG10):
1950 errno_set = true; builtin_optab = log10_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG2):
1952 errno_set = true; builtin_optab = log2_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG1P):
1954 errno_set = true; builtin_optab = log1p_optab; break;
1955 CASE_FLT_FN (BUILT_IN_ASIN):
1956 builtin_optab = asin_optab; break;
1957 CASE_FLT_FN (BUILT_IN_ACOS):
1958 builtin_optab = acos_optab; break;
1959 CASE_FLT_FN (BUILT_IN_TAN):
1960 builtin_optab = tan_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ATAN):
1962 builtin_optab = atan_optab; break;
1963 CASE_FLT_FN (BUILT_IN_FLOOR):
1964 builtin_optab = floor_optab; break;
1965 CASE_FLT_FN (BUILT_IN_CEIL):
1966 builtin_optab = ceil_optab; break;
1967 CASE_FLT_FN (BUILT_IN_TRUNC):
1968 builtin_optab = btrunc_optab; break;
1969 CASE_FLT_FN (BUILT_IN_ROUND):
1970 builtin_optab = round_optab; break;
1971 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1972 builtin_optab = nearbyint_optab;
1973 if (flag_trapping_math)
1974 break;
1975 /* Else fallthrough and expand as rint. */
1976 CASE_FLT_FN (BUILT_IN_RINT):
1977 builtin_optab = rint_optab; break;
1978 default:
1979 gcc_unreachable ();
1982 /* Make a suitable register to place result in. */
1983 mode = TYPE_MODE (TREE_TYPE (exp));
1985 if (! flag_errno_math || ! HONOR_NANS (mode))
1986 errno_set = false;
1988 /* Before working hard, check whether the instruction is available. */
1989 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1991 target = gen_reg_rtx (mode);
1993 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1994 need to expand the argument again. This way, we will not perform
1995 side-effects more the once. */
1996 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1998 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2000 start_sequence ();
2002 /* Compute into TARGET.
2003 Set TARGET to wherever the result comes back. */
2004 target = expand_unop (mode, builtin_optab, op0, target, 0);
2006 if (target != 0)
2008 if (errno_set)
2009 expand_errno_check (exp, target);
2011 /* Output the entire sequence. */
2012 insns = get_insns ();
2013 end_sequence ();
2014 emit_insn (insns);
2015 return target;
2018 /* If we were unable to expand via the builtin, stop the sequence
2019 (without outputting the insns) and call to the library function
2020 with the stabilized argument list. */
2021 end_sequence ();
2024 before_call = get_last_insn ();
2026 return expand_call (exp, target, target == const0_rtx);
2029 /* Expand a call to the builtin binary math functions (pow and atan2).
2030 Return NULL_RTX if a normal call should be emitted rather than expanding the
2031 function in-line. EXP is the expression that is a call to the builtin
2032 function; if convenient, the result should be placed in TARGET.
2033 SUBTARGET may be used as the target for computing one of EXP's
2034 operands. */
2036 static rtx
2037 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2039 optab builtin_optab;
2040 rtx op0, op1, insns;
2041 int op1_type = REAL_TYPE;
2042 tree fndecl = get_callee_fndecl (exp);
2043 tree arg0, arg1;
2044 enum machine_mode mode;
2045 bool errno_set = true;
2047 switch (DECL_FUNCTION_CODE (fndecl))
2049 CASE_FLT_FN (BUILT_IN_SCALBN):
2050 CASE_FLT_FN (BUILT_IN_SCALBLN):
2051 CASE_FLT_FN (BUILT_IN_LDEXP):
2052 op1_type = INTEGER_TYPE;
2053 default:
2054 break;
2057 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2058 return NULL_RTX;
2060 arg0 = CALL_EXPR_ARG (exp, 0);
2061 arg1 = CALL_EXPR_ARG (exp, 1);
2063 switch (DECL_FUNCTION_CODE (fndecl))
2065 CASE_FLT_FN (BUILT_IN_POW):
2066 builtin_optab = pow_optab; break;
2067 CASE_FLT_FN (BUILT_IN_ATAN2):
2068 builtin_optab = atan2_optab; break;
2069 CASE_FLT_FN (BUILT_IN_SCALB):
2070 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2071 return 0;
2072 builtin_optab = scalb_optab; break;
2073 CASE_FLT_FN (BUILT_IN_SCALBN):
2074 CASE_FLT_FN (BUILT_IN_SCALBLN):
2075 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2076 return 0;
2077 /* Fall through... */
2078 CASE_FLT_FN (BUILT_IN_LDEXP):
2079 builtin_optab = ldexp_optab; break;
2080 CASE_FLT_FN (BUILT_IN_FMOD):
2081 builtin_optab = fmod_optab; break;
2082 CASE_FLT_FN (BUILT_IN_REMAINDER):
2083 CASE_FLT_FN (BUILT_IN_DREM):
2084 builtin_optab = remainder_optab; break;
2085 default:
2086 gcc_unreachable ();
2089 /* Make a suitable register to place result in. */
2090 mode = TYPE_MODE (TREE_TYPE (exp));
2092 /* Before working hard, check whether the instruction is available. */
2093 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 return NULL_RTX;
2096 target = gen_reg_rtx (mode);
2098 if (! flag_errno_math || ! HONOR_NANS (mode))
2099 errno_set = false;
2101 /* Always stabilize the argument list. */
2102 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2103 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2105 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2106 op1 = expand_normal (arg1);
2108 start_sequence ();
2110 /* Compute into TARGET.
2111 Set TARGET to wherever the result comes back. */
2112 target = expand_binop (mode, builtin_optab, op0, op1,
2113 target, 0, OPTAB_DIRECT);
2115 /* If we were unable to expand via the builtin, stop the sequence
2116 (without outputting the insns) and call to the library function
2117 with the stabilized argument list. */
2118 if (target == 0)
2120 end_sequence ();
2121 return expand_call (exp, target, target == const0_rtx);
2124 if (errno_set)
2125 expand_errno_check (exp, target);
2127 /* Output the entire sequence. */
2128 insns = get_insns ();
2129 end_sequence ();
2130 emit_insn (insns);
2132 return target;
2135 /* Expand a call to the builtin sin and cos math functions.
2136 Return NULL_RTX if a normal call should be emitted rather than expanding the
2137 function in-line. EXP is the expression that is a call to the builtin
2138 function; if convenient, the result should be placed in TARGET.
2139 SUBTARGET may be used as the target for computing one of EXP's
2140 operands. */
2142 static rtx
2143 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2145 optab builtin_optab;
2146 rtx op0, insns;
2147 tree fndecl = get_callee_fndecl (exp);
2148 enum machine_mode mode;
2149 tree arg;
2151 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2152 return NULL_RTX;
2154 arg = CALL_EXPR_ARG (exp, 0);
2156 switch (DECL_FUNCTION_CODE (fndecl))
2158 CASE_FLT_FN (BUILT_IN_SIN):
2159 CASE_FLT_FN (BUILT_IN_COS):
2160 builtin_optab = sincos_optab; break;
2161 default:
2162 gcc_unreachable ();
2165 /* Make a suitable register to place result in. */
2166 mode = TYPE_MODE (TREE_TYPE (exp));
2168 /* Check if sincos insn is available, otherwise fallback
2169 to sin or cos insn. */
2170 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2171 switch (DECL_FUNCTION_CODE (fndecl))
2173 CASE_FLT_FN (BUILT_IN_SIN):
2174 builtin_optab = sin_optab; break;
2175 CASE_FLT_FN (BUILT_IN_COS):
2176 builtin_optab = cos_optab; break;
2177 default:
2178 gcc_unreachable ();
2181 /* Before working hard, check whether the instruction is available. */
2182 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2184 target = gen_reg_rtx (mode);
2186 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2187 need to expand the argument again. This way, we will not perform
2188 side-effects more the once. */
2189 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2191 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2193 start_sequence ();
2195 /* Compute into TARGET.
2196 Set TARGET to wherever the result comes back. */
2197 if (builtin_optab == sincos_optab)
2199 int result;
2201 switch (DECL_FUNCTION_CODE (fndecl))
2203 CASE_FLT_FN (BUILT_IN_SIN):
2204 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2205 break;
2206 CASE_FLT_FN (BUILT_IN_COS):
2207 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2208 break;
2209 default:
2210 gcc_unreachable ();
2212 gcc_assert (result);
2214 else
2216 target = expand_unop (mode, builtin_optab, op0, target, 0);
2219 if (target != 0)
2221 /* Output the entire sequence. */
2222 insns = get_insns ();
2223 end_sequence ();
2224 emit_insn (insns);
2225 return target;
2228 /* If we were unable to expand via the builtin, stop the sequence
2229 (without outputting the insns) and call to the library function
2230 with the stabilized argument list. */
2231 end_sequence ();
2234 target = expand_call (exp, target, target == const0_rtx);
2236 return target;
2239 /* Expand a call to one of the builtin math functions that operate on
2240 floating point argument and output an integer result (ilogb, isinf,
2241 isnan, etc).
2242 Return 0 if a normal call should be emitted rather than expanding the
2243 function in-line. EXP is the expression that is a call to the builtin
2244 function; if convenient, the result should be placed in TARGET.
2245 SUBTARGET may be used as the target for computing one of EXP's operands. */
2247 static rtx
2248 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2250 optab builtin_optab = 0;
2251 enum insn_code icode = CODE_FOR_nothing;
2252 rtx op0;
2253 tree fndecl = get_callee_fndecl (exp);
2254 enum machine_mode mode;
2255 bool errno_set = false;
2256 tree arg;
2258 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2259 return NULL_RTX;
2261 arg = CALL_EXPR_ARG (exp, 0);
2263 switch (DECL_FUNCTION_CODE (fndecl))
2265 CASE_FLT_FN (BUILT_IN_ILOGB):
2266 errno_set = true; builtin_optab = ilogb_optab; break;
2267 CASE_FLT_FN (BUILT_IN_ISINF):
2268 builtin_optab = isinf_optab; break;
2269 case BUILT_IN_ISNORMAL:
2270 case BUILT_IN_ISFINITE:
2271 CASE_FLT_FN (BUILT_IN_FINITE):
2272 /* These builtins have no optabs (yet). */
2273 break;
2274 default:
2275 gcc_unreachable ();
2278 /* There's no easy way to detect the case we need to set EDOM. */
2279 if (flag_errno_math && errno_set)
2280 return NULL_RTX;
2282 /* Optab mode depends on the mode of the input argument. */
2283 mode = TYPE_MODE (TREE_TYPE (arg));
2285 if (builtin_optab)
2286 icode = optab_handler (builtin_optab, mode)->insn_code;
2288 /* Before working hard, check whether the instruction is available. */
2289 if (icode != CODE_FOR_nothing)
2291 /* Make a suitable register to place result in. */
2292 if (!target
2293 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2294 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2296 gcc_assert (insn_data[icode].operand[0].predicate
2297 (target, GET_MODE (target)));
2299 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2300 need to expand the argument again. This way, we will not perform
2301 side-effects more the once. */
2302 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2304 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2306 if (mode != GET_MODE (op0))
2307 op0 = convert_to_mode (mode, op0, 0);
2309 /* Compute into TARGET.
2310 Set TARGET to wherever the result comes back. */
2311 emit_unop_insn (icode, target, op0, UNKNOWN);
2312 return target;
2315 /* If there is no optab, try generic code. */
2316 switch (DECL_FUNCTION_CODE (fndecl))
2318 tree result;
2320 CASE_FLT_FN (BUILT_IN_ISINF):
2322 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2323 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2324 tree const type = TREE_TYPE (arg);
2325 REAL_VALUE_TYPE r;
2326 char buf[128];
2328 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2329 real_from_string (&r, buf);
2330 result = build_call_expr (isgr_fn, 2,
2331 fold_build1 (ABS_EXPR, type, arg),
2332 build_real (type, r));
2333 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2335 CASE_FLT_FN (BUILT_IN_FINITE):
2336 case BUILT_IN_ISFINITE:
2338 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2339 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2340 tree const type = TREE_TYPE (arg);
2341 REAL_VALUE_TYPE r;
2342 char buf[128];
2344 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2345 real_from_string (&r, buf);
2346 result = build_call_expr (isle_fn, 2,
2347 fold_build1 (ABS_EXPR, type, arg),
2348 build_real (type, r));
2349 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2351 case BUILT_IN_ISNORMAL:
2353 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2354 islessequal(fabs(x),DBL_MAX). */
2355 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2356 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2357 tree const type = TREE_TYPE (arg);
2358 REAL_VALUE_TYPE rmax, rmin;
2359 char buf[128];
2361 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2362 real_from_string (&rmax, buf);
2363 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2364 real_from_string (&rmin, buf);
2365 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2366 result = build_call_expr (isle_fn, 2, arg,
2367 build_real (type, rmax));
2368 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2369 build_call_expr (isge_fn, 2, arg,
2370 build_real (type, rmin)));
2371 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2373 default:
2374 break;
2377 target = expand_call (exp, target, target == const0_rtx);
2379 return target;
2382 /* Expand a call to the builtin sincos math function.
2383 Return NULL_RTX if a normal call should be emitted rather than expanding the
2384 function in-line. EXP is the expression that is a call to the builtin
2385 function. */
2387 static rtx
2388 expand_builtin_sincos (tree exp)
2390 rtx op0, op1, op2, target1, target2;
2391 enum machine_mode mode;
2392 tree arg, sinp, cosp;
2393 int result;
2395 if (!validate_arglist (exp, REAL_TYPE,
2396 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2397 return NULL_RTX;
2399 arg = CALL_EXPR_ARG (exp, 0);
2400 sinp = CALL_EXPR_ARG (exp, 1);
2401 cosp = CALL_EXPR_ARG (exp, 2);
2403 /* Make a suitable register to place result in. */
2404 mode = TYPE_MODE (TREE_TYPE (arg));
2406 /* Check if sincos insn is available, otherwise emit the call. */
2407 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2408 return NULL_RTX;
2410 target1 = gen_reg_rtx (mode);
2411 target2 = gen_reg_rtx (mode);
2413 op0 = expand_normal (arg);
2414 op1 = expand_normal (build_fold_indirect_ref (sinp));
2415 op2 = expand_normal (build_fold_indirect_ref (cosp));
2417 /* Compute into target1 and target2.
2418 Set TARGET to wherever the result comes back. */
2419 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2420 gcc_assert (result);
2422 /* Move target1 and target2 to the memory locations indicated
2423 by op1 and op2. */
2424 emit_move_insn (op1, target1);
2425 emit_move_insn (op2, target2);
2427 return const0_rtx;
2430 /* Expand a call to the internal cexpi builtin to the sincos math function.
2431 EXP is the expression that is a call to the builtin function; if convenient,
2432 the result should be placed in TARGET. SUBTARGET may be used as the target
2433 for computing one of EXP's operands. */
2435 static rtx
2436 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2438 tree fndecl = get_callee_fndecl (exp);
2439 tree arg, type;
2440 enum machine_mode mode;
2441 rtx op0, op1, op2;
2443 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2444 return NULL_RTX;
2446 arg = CALL_EXPR_ARG (exp, 0);
2447 type = TREE_TYPE (arg);
2448 mode = TYPE_MODE (TREE_TYPE (arg));
2450 /* Try expanding via a sincos optab, fall back to emitting a libcall
2451 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2452 is only generated from sincos, cexp or if we have either of them. */
2453 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2455 op1 = gen_reg_rtx (mode);
2456 op2 = gen_reg_rtx (mode);
2458 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2460 /* Compute into op1 and op2. */
2461 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2463 else if (TARGET_HAS_SINCOS)
2465 tree call, fn = NULL_TREE;
2466 tree top1, top2;
2467 rtx op1a, op2a;
2469 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2470 fn = built_in_decls[BUILT_IN_SINCOSF];
2471 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2472 fn = built_in_decls[BUILT_IN_SINCOS];
2473 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2474 fn = built_in_decls[BUILT_IN_SINCOSL];
2475 else
2476 gcc_unreachable ();
2478 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2479 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2480 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2481 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2482 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2483 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2485 /* Make sure not to fold the sincos call again. */
2486 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2487 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2488 call, 3, arg, top1, top2));
2490 else
2492 tree call, fn = NULL_TREE, narg;
2493 tree ctype = build_complex_type (type);
2495 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2496 fn = built_in_decls[BUILT_IN_CEXPF];
2497 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2498 fn = built_in_decls[BUILT_IN_CEXP];
2499 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2500 fn = built_in_decls[BUILT_IN_CEXPL];
2501 else
2502 gcc_unreachable ();
2504 /* If we don't have a decl for cexp create one. This is the
2505 friendliest fallback if the user calls __builtin_cexpi
2506 without full target C99 function support. */
2507 if (fn == NULL_TREE)
2509 tree fntype;
2510 const char *name = NULL;
2512 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2513 name = "cexpf";
2514 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2515 name = "cexp";
2516 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2517 name = "cexpl";
2519 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2520 fn = build_fn_decl (name, fntype);
2523 narg = fold_build2 (COMPLEX_EXPR, ctype,
2524 build_real (type, dconst0), arg);
2526 /* Make sure not to fold the cexp call again. */
2527 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2528 return expand_expr (build_call_nary (ctype, call, 1, narg),
2529 target, VOIDmode, EXPAND_NORMAL);
2532 /* Now build the proper return type. */
2533 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2534 make_tree (TREE_TYPE (arg), op2),
2535 make_tree (TREE_TYPE (arg), op1)),
2536 target, VOIDmode, EXPAND_NORMAL);
2539 /* Expand a call to one of the builtin rounding functions gcc defines
2540 as an extension (lfloor and lceil). As these are gcc extensions we
2541 do not need to worry about setting errno to EDOM.
2542 If expanding via optab fails, lower expression to (int)(floor(x)).
2543 EXP is the expression that is a call to the builtin function;
2544 if convenient, the result should be placed in TARGET. */
2546 static rtx
2547 expand_builtin_int_roundingfn (tree exp, rtx target)
2549 convert_optab builtin_optab;
2550 rtx op0, insns, tmp;
2551 tree fndecl = get_callee_fndecl (exp);
2552 enum built_in_function fallback_fn;
2553 tree fallback_fndecl;
2554 enum machine_mode mode;
2555 tree arg;
2557 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2558 gcc_unreachable ();
2560 arg = CALL_EXPR_ARG (exp, 0);
2562 switch (DECL_FUNCTION_CODE (fndecl))
2564 CASE_FLT_FN (BUILT_IN_LCEIL):
2565 CASE_FLT_FN (BUILT_IN_LLCEIL):
2566 builtin_optab = lceil_optab;
2567 fallback_fn = BUILT_IN_CEIL;
2568 break;
2570 CASE_FLT_FN (BUILT_IN_LFLOOR):
2571 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2572 builtin_optab = lfloor_optab;
2573 fallback_fn = BUILT_IN_FLOOR;
2574 break;
2576 default:
2577 gcc_unreachable ();
2580 /* Make a suitable register to place result in. */
2581 mode = TYPE_MODE (TREE_TYPE (exp));
2583 target = gen_reg_rtx (mode);
2585 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2586 need to expand the argument again. This way, we will not perform
2587 side-effects more the once. */
2588 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2590 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2592 start_sequence ();
2594 /* Compute into TARGET. */
2595 if (expand_sfix_optab (target, op0, builtin_optab))
2597 /* Output the entire sequence. */
2598 insns = get_insns ();
2599 end_sequence ();
2600 emit_insn (insns);
2601 return target;
2604 /* If we were unable to expand via the builtin, stop the sequence
2605 (without outputting the insns). */
2606 end_sequence ();
2608 /* Fall back to floating point rounding optab. */
2609 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2611 /* For non-C99 targets we may end up without a fallback fndecl here
2612 if the user called __builtin_lfloor directly. In this case emit
2613 a call to the floor/ceil variants nevertheless. This should result
2614 in the best user experience for not full C99 targets. */
2615 if (fallback_fndecl == NULL_TREE)
2617 tree fntype;
2618 const char *name = NULL;
2620 switch (DECL_FUNCTION_CODE (fndecl))
2622 case BUILT_IN_LCEIL:
2623 case BUILT_IN_LLCEIL:
2624 name = "ceil";
2625 break;
2626 case BUILT_IN_LCEILF:
2627 case BUILT_IN_LLCEILF:
2628 name = "ceilf";
2629 break;
2630 case BUILT_IN_LCEILL:
2631 case BUILT_IN_LLCEILL:
2632 name = "ceill";
2633 break;
2634 case BUILT_IN_LFLOOR:
2635 case BUILT_IN_LLFLOOR:
2636 name = "floor";
2637 break;
2638 case BUILT_IN_LFLOORF:
2639 case BUILT_IN_LLFLOORF:
2640 name = "floorf";
2641 break;
2642 case BUILT_IN_LFLOORL:
2643 case BUILT_IN_LLFLOORL:
2644 name = "floorl";
2645 break;
2646 default:
2647 gcc_unreachable ();
2650 fntype = build_function_type_list (TREE_TYPE (arg),
2651 TREE_TYPE (arg), NULL_TREE);
2652 fallback_fndecl = build_fn_decl (name, fntype);
2655 exp = build_call_expr (fallback_fndecl, 1, arg);
2657 tmp = expand_normal (exp);
2659 /* Truncate the result of floating point optab to integer
2660 via expand_fix (). */
2661 target = gen_reg_rtx (mode);
2662 expand_fix (target, tmp, 0);
2664 return target;
2667 /* Expand a call to one of the builtin math functions doing integer
2668 conversion (lrint).
2669 Return 0 if a normal call should be emitted rather than expanding the
2670 function in-line. EXP is the expression that is a call to the builtin
2671 function; if convenient, the result should be placed in TARGET. */
2673 static rtx
2674 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2676 convert_optab builtin_optab;
2677 rtx op0, insns;
2678 tree fndecl = get_callee_fndecl (exp);
2679 tree arg;
2680 enum machine_mode mode;
2682 /* There's no easy way to detect the case we need to set EDOM. */
2683 if (flag_errno_math)
2684 return NULL_RTX;
2686 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2687 gcc_unreachable ();
2689 arg = CALL_EXPR_ARG (exp, 0);
2691 switch (DECL_FUNCTION_CODE (fndecl))
2693 CASE_FLT_FN (BUILT_IN_LRINT):
2694 CASE_FLT_FN (BUILT_IN_LLRINT):
2695 builtin_optab = lrint_optab; break;
2696 CASE_FLT_FN (BUILT_IN_LROUND):
2697 CASE_FLT_FN (BUILT_IN_LLROUND):
2698 builtin_optab = lround_optab; break;
2699 default:
2700 gcc_unreachable ();
2703 /* Make a suitable register to place result in. */
2704 mode = TYPE_MODE (TREE_TYPE (exp));
2706 target = gen_reg_rtx (mode);
2708 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2709 need to expand the argument again. This way, we will not perform
2710 side-effects more the once. */
2711 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2713 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2715 start_sequence ();
2717 if (expand_sfix_optab (target, op0, builtin_optab))
2719 /* Output the entire sequence. */
2720 insns = get_insns ();
2721 end_sequence ();
2722 emit_insn (insns);
2723 return target;
2726 /* If we were unable to expand via the builtin, stop the sequence
2727 (without outputting the insns) and call to the library function
2728 with the stabilized argument list. */
2729 end_sequence ();
2731 target = expand_call (exp, target, target == const0_rtx);
2733 return target;
2736 /* To evaluate powi(x,n), the floating point value x raised to the
2737 constant integer exponent n, we use a hybrid algorithm that
2738 combines the "window method" with look-up tables. For an
2739 introduction to exponentiation algorithms and "addition chains",
2740 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2741 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2742 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2743 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2745 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2746 multiplications to inline before calling the system library's pow
2747 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2748 so this default never requires calling pow, powf or powl. */
2750 #ifndef POWI_MAX_MULTS
2751 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2752 #endif
2754 /* The size of the "optimal power tree" lookup table. All
2755 exponents less than this value are simply looked up in the
2756 powi_table below. This threshold is also used to size the
2757 cache of pseudo registers that hold intermediate results. */
2758 #define POWI_TABLE_SIZE 256
2760 /* The size, in bits of the window, used in the "window method"
2761 exponentiation algorithm. This is equivalent to a radix of
2762 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2763 #define POWI_WINDOW_SIZE 3
2765 /* The following table is an efficient representation of an
2766 "optimal power tree". For each value, i, the corresponding
2767 value, j, in the table states than an optimal evaluation
2768 sequence for calculating pow(x,i) can be found by evaluating
2769 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2770 100 integers is given in Knuth's "Seminumerical algorithms". */
2772 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2774 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2775 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2776 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2777 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2778 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2779 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2780 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2781 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2782 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2783 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2784 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2785 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2786 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2787 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2788 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2789 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2790 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2791 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2792 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2793 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2794 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2795 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2796 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2797 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2798 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2799 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2800 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2801 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2802 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2803 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2804 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2805 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2809 /* Return the number of multiplications required to calculate
2810 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2811 subroutine of powi_cost. CACHE is an array indicating
2812 which exponents have already been calculated. */
2814 static int
2815 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2817 /* If we've already calculated this exponent, then this evaluation
2818 doesn't require any additional multiplications. */
2819 if (cache[n])
2820 return 0;
2822 cache[n] = true;
2823 return powi_lookup_cost (n - powi_table[n], cache)
2824 + powi_lookup_cost (powi_table[n], cache) + 1;
2827 /* Return the number of multiplications required to calculate
2828 powi(x,n) for an arbitrary x, given the exponent N. This
2829 function needs to be kept in sync with expand_powi below. */
2831 static int
2832 powi_cost (HOST_WIDE_INT n)
2834 bool cache[POWI_TABLE_SIZE];
2835 unsigned HOST_WIDE_INT digit;
2836 unsigned HOST_WIDE_INT val;
2837 int result;
2839 if (n == 0)
2840 return 0;
2842 /* Ignore the reciprocal when calculating the cost. */
2843 val = (n < 0) ? -n : n;
2845 /* Initialize the exponent cache. */
2846 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2847 cache[1] = true;
2849 result = 0;
2851 while (val >= POWI_TABLE_SIZE)
2853 if (val & 1)
2855 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2856 result += powi_lookup_cost (digit, cache)
2857 + POWI_WINDOW_SIZE + 1;
2858 val >>= POWI_WINDOW_SIZE;
2860 else
2862 val >>= 1;
2863 result++;
2867 return result + powi_lookup_cost (val, cache);
2870 /* Recursive subroutine of expand_powi. This function takes the array,
2871 CACHE, of already calculated exponents and an exponent N and returns
2872 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2874 static rtx
2875 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2877 unsigned HOST_WIDE_INT digit;
2878 rtx target, result;
2879 rtx op0, op1;
2881 if (n < POWI_TABLE_SIZE)
2883 if (cache[n])
2884 return cache[n];
2886 target = gen_reg_rtx (mode);
2887 cache[n] = target;
2889 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2890 op1 = expand_powi_1 (mode, powi_table[n], cache);
2892 else if (n & 1)
2894 target = gen_reg_rtx (mode);
2895 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2896 op0 = expand_powi_1 (mode, n - digit, cache);
2897 op1 = expand_powi_1 (mode, digit, cache);
2899 else
2901 target = gen_reg_rtx (mode);
2902 op0 = expand_powi_1 (mode, n >> 1, cache);
2903 op1 = op0;
2906 result = expand_mult (mode, op0, op1, target, 0);
2907 if (result != target)
2908 emit_move_insn (target, result);
2909 return target;
2912 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2913 floating point operand in mode MODE, and N is the exponent. This
2914 function needs to be kept in sync with powi_cost above. */
2916 static rtx
2917 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2919 unsigned HOST_WIDE_INT val;
2920 rtx cache[POWI_TABLE_SIZE];
2921 rtx result;
2923 if (n == 0)
2924 return CONST1_RTX (mode);
2926 val = (n < 0) ? -n : n;
2928 memset (cache, 0, sizeof (cache));
2929 cache[1] = x;
2931 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2933 /* If the original exponent was negative, reciprocate the result. */
2934 if (n < 0)
2935 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2936 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2938 return result;
2941 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2942 a normal call should be emitted rather than expanding the function
2943 in-line. EXP is the expression that is a call to the builtin
2944 function; if convenient, the result should be placed in TARGET. */
2946 static rtx
2947 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2949 tree arg0, arg1;
2950 tree fn, narg0;
2951 tree type = TREE_TYPE (exp);
2952 REAL_VALUE_TYPE cint, c, c2;
2953 HOST_WIDE_INT n;
2954 rtx op, op2;
2955 enum machine_mode mode = TYPE_MODE (type);
2957 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2960 arg0 = CALL_EXPR_ARG (exp, 0);
2961 arg1 = CALL_EXPR_ARG (exp, 1);
2963 if (TREE_CODE (arg1) != REAL_CST
2964 || TREE_OVERFLOW (arg1))
2965 return expand_builtin_mathfn_2 (exp, target, subtarget);
2967 /* Handle constant exponents. */
2969 /* For integer valued exponents we can expand to an optimal multiplication
2970 sequence using expand_powi. */
2971 c = TREE_REAL_CST (arg1);
2972 n = real_to_integer (&c);
2973 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2974 if (real_identical (&c, &cint)
2975 && ((n >= -1 && n <= 2)
2976 || (flag_unsafe_math_optimizations
2977 && optimize_insn_for_speed_p ()
2978 && powi_cost (n) <= POWI_MAX_MULTS)))
2980 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2981 if (n != 1)
2983 op = force_reg (mode, op);
2984 op = expand_powi (op, mode, n);
2986 return op;
2989 narg0 = builtin_save_expr (arg0);
2991 /* If the exponent is not integer valued, check if it is half of an integer.
2992 In this case we can expand to sqrt (x) * x**(n/2). */
2993 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2994 if (fn != NULL_TREE)
2996 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2997 n = real_to_integer (&c2);
2998 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2999 if (real_identical (&c2, &cint)
3000 && ((flag_unsafe_math_optimizations
3001 && optimize_insn_for_speed_p ()
3002 && powi_cost (n/2) <= POWI_MAX_MULTS)
3003 || n == 1))
3005 tree call_expr = build_call_expr (fn, 1, narg0);
3006 /* Use expand_expr in case the newly built call expression
3007 was folded to a non-call. */
3008 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3009 if (n != 1)
3011 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3012 op2 = force_reg (mode, op2);
3013 op2 = expand_powi (op2, mode, abs (n / 2));
3014 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3015 0, OPTAB_LIB_WIDEN);
3016 /* If the original exponent was negative, reciprocate the
3017 result. */
3018 if (n < 0)
3019 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3020 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3022 return op;
3026 /* Try if the exponent is a third of an integer. In this case
3027 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3028 different from pow (x, 1./3.) due to rounding and behavior
3029 with negative x we need to constrain this transformation to
3030 unsafe math and positive x or finite math. */
3031 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3032 if (fn != NULL_TREE
3033 && flag_unsafe_math_optimizations
3034 && (tree_expr_nonnegative_p (arg0)
3035 || !HONOR_NANS (mode)))
3037 REAL_VALUE_TYPE dconst3;
3038 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3039 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3040 real_round (&c2, mode, &c2);
3041 n = real_to_integer (&c2);
3042 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3043 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3044 real_convert (&c2, mode, &c2);
3045 if (real_identical (&c2, &c)
3046 && ((optimize_insn_for_speed_p ()
3047 && powi_cost (n/3) <= POWI_MAX_MULTS)
3048 || n == 1))
3050 tree call_expr = build_call_expr (fn, 1,narg0);
3051 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3052 if (abs (n) % 3 == 2)
3053 op = expand_simple_binop (mode, MULT, op, op, op,
3054 0, OPTAB_LIB_WIDEN);
3055 if (n != 1)
3057 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3058 op2 = force_reg (mode, op2);
3059 op2 = expand_powi (op2, mode, abs (n / 3));
3060 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3061 0, OPTAB_LIB_WIDEN);
3062 /* If the original exponent was negative, reciprocate the
3063 result. */
3064 if (n < 0)
3065 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3066 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3068 return op;
3072 /* Fall back to optab expansion. */
3073 return expand_builtin_mathfn_2 (exp, target, subtarget);
3076 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3077 a normal call should be emitted rather than expanding the function
3078 in-line. EXP is the expression that is a call to the builtin
3079 function; if convenient, the result should be placed in TARGET. */
3081 static rtx
3082 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3084 tree arg0, arg1;
3085 rtx op0, op1;
3086 enum machine_mode mode;
3087 enum machine_mode mode2;
3089 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3090 return NULL_RTX;
3092 arg0 = CALL_EXPR_ARG (exp, 0);
3093 arg1 = CALL_EXPR_ARG (exp, 1);
3094 mode = TYPE_MODE (TREE_TYPE (exp));
3096 /* Handle constant power. */
3098 if (TREE_CODE (arg1) == INTEGER_CST
3099 && !TREE_OVERFLOW (arg1))
3101 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3103 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3104 Otherwise, check the number of multiplications required. */
3105 if ((TREE_INT_CST_HIGH (arg1) == 0
3106 || TREE_INT_CST_HIGH (arg1) == -1)
3107 && ((n >= -1 && n <= 2)
3108 || (optimize_insn_for_speed_p ()
3109 && powi_cost (n) <= POWI_MAX_MULTS)))
3111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3112 op0 = force_reg (mode, op0);
3113 return expand_powi (op0, mode, n);
3117 /* Emit a libcall to libgcc. */
3119 /* Mode of the 2nd argument must match that of an int. */
3120 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3122 if (target == NULL_RTX)
3123 target = gen_reg_rtx (mode);
3125 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3126 if (GET_MODE (op0) != mode)
3127 op0 = convert_to_mode (mode, op0, 0);
3128 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3129 if (GET_MODE (op1) != mode2)
3130 op1 = convert_to_mode (mode2, op1, 0);
3132 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3133 target, LCT_CONST, mode, 2,
3134 op0, mode, op1, mode2);
3136 return target;
3139 /* Expand expression EXP which is a call to the strlen builtin. Return
3140 NULL_RTX if we failed the caller should emit a normal call, otherwise
3141 try to get the result in TARGET, if convenient. */
3143 static rtx
3144 expand_builtin_strlen (tree exp, rtx target,
3145 enum machine_mode target_mode)
3147 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3148 return NULL_RTX;
3149 else
3151 rtx pat;
3152 tree len;
3153 tree src = CALL_EXPR_ARG (exp, 0);
3154 rtx result, src_reg, char_rtx, before_strlen;
3155 enum machine_mode insn_mode = target_mode, char_mode;
3156 enum insn_code icode = CODE_FOR_nothing;
3157 int align;
3159 /* If the length can be computed at compile-time, return it. */
3160 len = c_strlen (src, 0);
3161 if (len)
3162 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3164 /* If the length can be computed at compile-time and is constant
3165 integer, but there are side-effects in src, evaluate
3166 src for side-effects, then return len.
3167 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3168 can be optimized into: i++; x = 3; */
3169 len = c_strlen (src, 1);
3170 if (len && TREE_CODE (len) == INTEGER_CST)
3172 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3173 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3176 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3178 /* If SRC is not a pointer type, don't do this operation inline. */
3179 if (align == 0)
3180 return NULL_RTX;
3182 /* Bail out if we can't compute strlen in the right mode. */
3183 while (insn_mode != VOIDmode)
3185 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3186 if (icode != CODE_FOR_nothing)
3187 break;
3189 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3191 if (insn_mode == VOIDmode)
3192 return NULL_RTX;
3194 /* Make a place to write the result of the instruction. */
3195 result = target;
3196 if (! (result != 0
3197 && REG_P (result)
3198 && GET_MODE (result) == insn_mode
3199 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3200 result = gen_reg_rtx (insn_mode);
3202 /* Make a place to hold the source address. We will not expand
3203 the actual source until we are sure that the expansion will
3204 not fail -- there are trees that cannot be expanded twice. */
3205 src_reg = gen_reg_rtx (Pmode);
3207 /* Mark the beginning of the strlen sequence so we can emit the
3208 source operand later. */
3209 before_strlen = get_last_insn ();
3211 char_rtx = const0_rtx;
3212 char_mode = insn_data[(int) icode].operand[2].mode;
3213 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3214 char_mode))
3215 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3217 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3218 char_rtx, GEN_INT (align));
3219 if (! pat)
3220 return NULL_RTX;
3221 emit_insn (pat);
3223 /* Now that we are assured of success, expand the source. */
3224 start_sequence ();
3225 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3226 if (pat != src_reg)
3227 emit_move_insn (src_reg, pat);
3228 pat = get_insns ();
3229 end_sequence ();
3231 if (before_strlen)
3232 emit_insn_after (pat, before_strlen);
3233 else
3234 emit_insn_before (pat, get_insns ());
3236 /* Return the value in the proper mode for this function. */
3237 if (GET_MODE (result) == target_mode)
3238 target = result;
3239 else if (target != 0)
3240 convert_move (target, result, 0);
3241 else
3242 target = convert_to_mode (target_mode, result, 0);
3244 return target;
3248 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3249 caller should emit a normal call, otherwise try to get the result
3250 in TARGET, if convenient (and in mode MODE if that's convenient). */
3252 static rtx
3253 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3255 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3257 tree type = TREE_TYPE (exp);
3258 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3259 CALL_EXPR_ARG (exp, 1), type);
3260 if (result)
3261 return expand_expr (result, target, mode, EXPAND_NORMAL);
3263 return NULL_RTX;
3266 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3267 caller should emit a normal call, otherwise try to get the result
3268 in TARGET, if convenient (and in mode MODE if that's convenient). */
3270 static rtx
3271 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3273 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3275 tree type = TREE_TYPE (exp);
3276 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3277 CALL_EXPR_ARG (exp, 1), type);
3278 if (result)
3279 return expand_expr (result, target, mode, EXPAND_NORMAL);
3281 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3283 return NULL_RTX;
3286 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3287 caller should emit a normal call, otherwise try to get the result
3288 in TARGET, if convenient (and in mode MODE if that's convenient). */
3290 static rtx
3291 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3293 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3295 tree type = TREE_TYPE (exp);
3296 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3297 CALL_EXPR_ARG (exp, 1), type);
3298 if (result)
3299 return expand_expr (result, target, mode, EXPAND_NORMAL);
3301 return NULL_RTX;
3304 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3305 caller should emit a normal call, otherwise try to get the result
3306 in TARGET, if convenient (and in mode MODE if that's convenient). */
3308 static rtx
3309 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3311 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3313 tree type = TREE_TYPE (exp);
3314 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3315 CALL_EXPR_ARG (exp, 1), type);
3316 if (result)
3317 return expand_expr (result, target, mode, EXPAND_NORMAL);
3319 return NULL_RTX;
3322 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3323 bytes from constant string DATA + OFFSET and return it as target
3324 constant. */
3326 static rtx
3327 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3328 enum machine_mode mode)
3330 const char *str = (const char *) data;
3332 gcc_assert (offset >= 0
3333 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3334 <= strlen (str) + 1));
3336 return c_readstr (str + offset, mode);
3339 /* Expand a call EXP to the memcpy builtin.
3340 Return NULL_RTX if we failed, the caller should emit a normal call,
3341 otherwise try to get the result in TARGET, if convenient (and in
3342 mode MODE if that's convenient). */
3344 static rtx
3345 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3347 tree fndecl = get_callee_fndecl (exp);
3349 if (!validate_arglist (exp,
3350 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3351 return NULL_RTX;
3352 else
3354 tree dest = CALL_EXPR_ARG (exp, 0);
3355 tree src = CALL_EXPR_ARG (exp, 1);
3356 tree len = CALL_EXPR_ARG (exp, 2);
3357 const char *src_str;
3358 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3359 unsigned int dest_align
3360 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3361 rtx dest_mem, src_mem, dest_addr, len_rtx;
3362 tree result = fold_builtin_memory_op (dest, src, len,
3363 TREE_TYPE (TREE_TYPE (fndecl)),
3364 false, /*endp=*/0);
3365 HOST_WIDE_INT expected_size = -1;
3366 unsigned int expected_align = 0;
3367 tree_ann_common_t ann;
3369 if (result)
3371 while (TREE_CODE (result) == COMPOUND_EXPR)
3373 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3374 EXPAND_NORMAL);
3375 result = TREE_OPERAND (result, 1);
3377 return expand_expr (result, target, mode, EXPAND_NORMAL);
3380 /* If DEST is not a pointer type, call the normal function. */
3381 if (dest_align == 0)
3382 return NULL_RTX;
3384 /* If either SRC is not a pointer type, don't do this
3385 operation in-line. */
3386 if (src_align == 0)
3387 return NULL_RTX;
3389 ann = tree_common_ann (exp);
3390 if (ann)
3391 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3393 if (expected_align < dest_align)
3394 expected_align = dest_align;
3395 dest_mem = get_memory_rtx (dest, len);
3396 set_mem_align (dest_mem, dest_align);
3397 len_rtx = expand_normal (len);
3398 src_str = c_getstr (src);
3400 /* If SRC is a string constant and block move would be done
3401 by pieces, we can avoid loading the string from memory
3402 and only stored the computed constants. */
3403 if (src_str
3404 && GET_CODE (len_rtx) == CONST_INT
3405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3407 CONST_CAST (char *, src_str),
3408 dest_align, false))
3410 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3411 builtin_memcpy_read_str,
3412 CONST_CAST (char *, src_str),
3413 dest_align, false, 0);
3414 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3415 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3416 return dest_mem;
3419 src_mem = get_memory_rtx (src, len);
3420 set_mem_align (src_mem, src_align);
3422 /* Copy word part most expediently. */
3423 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3424 CALL_EXPR_TAILCALL (exp)
3425 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3426 expected_align, expected_size);
3428 if (dest_addr == 0)
3430 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3431 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3433 return dest_addr;
3437 /* Expand a call EXP to the mempcpy builtin.
3438 Return NULL_RTX if we failed; the caller should emit a normal call,
3439 otherwise try to get the result in TARGET, if convenient (and in
3440 mode MODE if that's convenient). If ENDP is 0 return the
3441 destination pointer, if ENDP is 1 return the end pointer ala
3442 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3443 stpcpy. */
3445 static rtx
3446 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3448 if (!validate_arglist (exp,
3449 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3450 return NULL_RTX;
3451 else
3453 tree dest = CALL_EXPR_ARG (exp, 0);
3454 tree src = CALL_EXPR_ARG (exp, 1);
3455 tree len = CALL_EXPR_ARG (exp, 2);
3456 return expand_builtin_mempcpy_args (dest, src, len,
3457 TREE_TYPE (exp),
3458 target, mode, /*endp=*/ 1);
3462 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3463 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3464 so that this can also be called without constructing an actual CALL_EXPR.
3465 TYPE is the return type of the call. The other arguments and return value
3466 are the same as for expand_builtin_mempcpy. */
3468 static rtx
3469 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3470 rtx target, enum machine_mode mode, int endp)
3472 /* If return value is ignored, transform mempcpy into memcpy. */
3473 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3475 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3476 tree result = build_call_expr (fn, 3, dest, src, len);
3478 while (TREE_CODE (result) == COMPOUND_EXPR)
3480 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3481 EXPAND_NORMAL);
3482 result = TREE_OPERAND (result, 1);
3484 return expand_expr (result, target, mode, EXPAND_NORMAL);
3486 else
3488 const char *src_str;
3489 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3490 unsigned int dest_align
3491 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3492 rtx dest_mem, src_mem, len_rtx;
3493 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3495 if (result)
3497 while (TREE_CODE (result) == COMPOUND_EXPR)
3499 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3500 EXPAND_NORMAL);
3501 result = TREE_OPERAND (result, 1);
3503 return expand_expr (result, target, mode, EXPAND_NORMAL);
3506 /* If either SRC or DEST is not a pointer type, don't do this
3507 operation in-line. */
3508 if (dest_align == 0 || src_align == 0)
3509 return NULL_RTX;
3511 /* If LEN is not constant, call the normal function. */
3512 if (! host_integerp (len, 1))
3513 return NULL_RTX;
3515 len_rtx = expand_normal (len);
3516 src_str = c_getstr (src);
3518 /* If SRC is a string constant and block move would be done
3519 by pieces, we can avoid loading the string from memory
3520 and only stored the computed constants. */
3521 if (src_str
3522 && GET_CODE (len_rtx) == CONST_INT
3523 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3524 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3525 CONST_CAST (char *, src_str),
3526 dest_align, false))
3528 dest_mem = get_memory_rtx (dest, len);
3529 set_mem_align (dest_mem, dest_align);
3530 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3531 builtin_memcpy_read_str,
3532 CONST_CAST (char *, src_str),
3533 dest_align, false, endp);
3534 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3535 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3536 return dest_mem;
3539 if (GET_CODE (len_rtx) == CONST_INT
3540 && can_move_by_pieces (INTVAL (len_rtx),
3541 MIN (dest_align, src_align)))
3543 dest_mem = get_memory_rtx (dest, len);
3544 set_mem_align (dest_mem, dest_align);
3545 src_mem = get_memory_rtx (src, len);
3546 set_mem_align (src_mem, src_align);
3547 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3548 MIN (dest_align, src_align), endp);
3549 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3550 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3551 return dest_mem;
3554 return NULL_RTX;
3558 /* Expand expression EXP, which is a call to the memmove builtin. Return
3559 NULL_RTX if we failed; the caller should emit a normal call. */
3561 static rtx
3562 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3564 if (!validate_arglist (exp,
3565 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3566 return NULL_RTX;
3567 else
3569 tree dest = CALL_EXPR_ARG (exp, 0);
3570 tree src = CALL_EXPR_ARG (exp, 1);
3571 tree len = CALL_EXPR_ARG (exp, 2);
3572 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3573 target, mode, ignore);
3577 /* Helper function to do the actual work for expand_builtin_memmove. The
3578 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3579 so that this can also be called without constructing an actual CALL_EXPR.
3580 TYPE is the return type of the call. The other arguments and return value
3581 are the same as for expand_builtin_memmove. */
3583 static rtx
3584 expand_builtin_memmove_args (tree dest, tree src, tree len,
3585 tree type, rtx target, enum machine_mode mode,
3586 int ignore)
3588 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3590 if (result)
3592 STRIP_TYPE_NOPS (result);
3593 while (TREE_CODE (result) == COMPOUND_EXPR)
3595 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3596 EXPAND_NORMAL);
3597 result = TREE_OPERAND (result, 1);
3599 return expand_expr (result, target, mode, EXPAND_NORMAL);
3602 /* Otherwise, call the normal function. */
3603 return NULL_RTX;
3606 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3607 NULL_RTX if we failed the caller should emit a normal call. */
3609 static rtx
3610 expand_builtin_bcopy (tree exp, int ignore)
3612 tree type = TREE_TYPE (exp);
3613 tree src, dest, size;
3615 if (!validate_arglist (exp,
3616 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3617 return NULL_RTX;
3619 src = CALL_EXPR_ARG (exp, 0);
3620 dest = CALL_EXPR_ARG (exp, 1);
3621 size = CALL_EXPR_ARG (exp, 2);
3623 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3624 This is done this way so that if it isn't expanded inline, we fall
3625 back to calling bcopy instead of memmove. */
3626 return expand_builtin_memmove_args (dest, src,
3627 fold_convert (sizetype, size),
3628 type, const0_rtx, VOIDmode,
3629 ignore);
3632 #ifndef HAVE_movstr
3633 # define HAVE_movstr 0
3634 # define CODE_FOR_movstr CODE_FOR_nothing
3635 #endif
3637 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3638 we failed, the caller should emit a normal call, otherwise try to
3639 get the result in TARGET, if convenient. If ENDP is 0 return the
3640 destination pointer, if ENDP is 1 return the end pointer ala
3641 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3642 stpcpy. */
3644 static rtx
3645 expand_movstr (tree dest, tree src, rtx target, int endp)
3647 rtx end;
3648 rtx dest_mem;
3649 rtx src_mem;
3650 rtx insn;
3651 const struct insn_data * data;
3653 if (!HAVE_movstr)
3654 return NULL_RTX;
3656 dest_mem = get_memory_rtx (dest, NULL);
3657 src_mem = get_memory_rtx (src, NULL);
3658 if (!endp)
3660 target = force_reg (Pmode, XEXP (dest_mem, 0));
3661 dest_mem = replace_equiv_address (dest_mem, target);
3662 end = gen_reg_rtx (Pmode);
3664 else
3666 if (target == 0 || target == const0_rtx)
3668 end = gen_reg_rtx (Pmode);
3669 if (target == 0)
3670 target = end;
3672 else
3673 end = target;
3676 data = insn_data + CODE_FOR_movstr;
3678 if (data->operand[0].mode != VOIDmode)
3679 end = gen_lowpart (data->operand[0].mode, end);
3681 insn = data->genfun (end, dest_mem, src_mem);
3683 gcc_assert (insn);
3685 emit_insn (insn);
3687 /* movstr is supposed to set end to the address of the NUL
3688 terminator. If the caller requested a mempcpy-like return value,
3689 adjust it. */
3690 if (endp == 1 && target != const0_rtx)
3692 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3693 emit_move_insn (target, force_operand (tem, NULL_RTX));
3696 return target;
3699 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3700 NULL_RTX if we failed the caller should emit a normal call, otherwise
3701 try to get the result in TARGET, if convenient (and in mode MODE if that's
3702 convenient). */
3704 static rtx
3705 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3707 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3709 tree dest = CALL_EXPR_ARG (exp, 0);
3710 tree src = CALL_EXPR_ARG (exp, 1);
3711 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3713 return NULL_RTX;
3716 /* Helper function to do the actual work for expand_builtin_strcpy. The
3717 arguments to the builtin_strcpy call DEST and SRC are broken out
3718 so that this can also be called without constructing an actual CALL_EXPR.
3719 The other arguments and return value are the same as for
3720 expand_builtin_strcpy. */
3722 static rtx
3723 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3724 rtx target, enum machine_mode mode)
3726 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3727 if (result)
3728 return expand_expr (result, target, mode, EXPAND_NORMAL);
3729 return expand_movstr (dest, src, target, /*endp=*/0);
3733 /* Expand a call EXP to the stpcpy builtin.
3734 Return NULL_RTX if we failed the caller should emit a normal call,
3735 otherwise try to get the result in TARGET, if convenient (and in
3736 mode MODE if that's convenient). */
3738 static rtx
3739 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3741 tree dst, src;
3743 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3744 return NULL_RTX;
3746 dst = CALL_EXPR_ARG (exp, 0);
3747 src = CALL_EXPR_ARG (exp, 1);
3749 /* If return value is ignored, transform stpcpy into strcpy. */
3750 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3752 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3753 tree result = build_call_expr (fn, 2, dst, src);
3755 STRIP_NOPS (result);
3756 while (TREE_CODE (result) == COMPOUND_EXPR)
3758 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3759 EXPAND_NORMAL);
3760 result = TREE_OPERAND (result, 1);
3762 return expand_expr (result, target, mode, EXPAND_NORMAL);
3764 else
3766 tree len, lenp1;
3767 rtx ret;
3769 /* Ensure we get an actual string whose length can be evaluated at
3770 compile-time, not an expression containing a string. This is
3771 because the latter will potentially produce pessimized code
3772 when used to produce the return value. */
3773 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3774 return expand_movstr (dst, src, target, /*endp=*/2);
3776 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3777 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3778 target, mode, /*endp=*/2);
3780 if (ret)
3781 return ret;
3783 if (TREE_CODE (len) == INTEGER_CST)
3785 rtx len_rtx = expand_normal (len);
3787 if (GET_CODE (len_rtx) == CONST_INT)
3789 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3790 dst, src, target, mode);
3792 if (ret)
3794 if (! target)
3796 if (mode != VOIDmode)
3797 target = gen_reg_rtx (mode);
3798 else
3799 target = gen_reg_rtx (GET_MODE (ret));
3801 if (GET_MODE (target) != GET_MODE (ret))
3802 ret = gen_lowpart (GET_MODE (target), ret);
3804 ret = plus_constant (ret, INTVAL (len_rtx));
3805 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3806 gcc_assert (ret);
3808 return target;
3813 return expand_movstr (dst, src, target, /*endp=*/2);
3817 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3818 bytes from constant string DATA + OFFSET and return it as target
3819 constant. */
3822 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3823 enum machine_mode mode)
3825 const char *str = (const char *) data;
3827 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3828 return const0_rtx;
3830 return c_readstr (str + offset, mode);
3833 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3834 NULL_RTX if we failed the caller should emit a normal call. */
3836 static rtx
3837 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3839 tree fndecl = get_callee_fndecl (exp);
3841 if (validate_arglist (exp,
3842 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3844 tree dest = CALL_EXPR_ARG (exp, 0);
3845 tree src = CALL_EXPR_ARG (exp, 1);
3846 tree len = CALL_EXPR_ARG (exp, 2);
3847 tree slen = c_strlen (src, 1);
3848 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3850 if (result)
3852 while (TREE_CODE (result) == COMPOUND_EXPR)
3854 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3855 EXPAND_NORMAL);
3856 result = TREE_OPERAND (result, 1);
3858 return expand_expr (result, target, mode, EXPAND_NORMAL);
3861 /* We must be passed a constant len and src parameter. */
3862 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3863 return NULL_RTX;
3865 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3867 /* We're required to pad with trailing zeros if the requested
3868 len is greater than strlen(s2)+1. In that case try to
3869 use store_by_pieces, if it fails, punt. */
3870 if (tree_int_cst_lt (slen, len))
3872 unsigned int dest_align
3873 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3874 const char *p = c_getstr (src);
3875 rtx dest_mem;
3877 if (!p || dest_align == 0 || !host_integerp (len, 1)
3878 || !can_store_by_pieces (tree_low_cst (len, 1),
3879 builtin_strncpy_read_str,
3880 CONST_CAST (char *, p),
3881 dest_align, false))
3882 return NULL_RTX;
3884 dest_mem = get_memory_rtx (dest, len);
3885 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3886 builtin_strncpy_read_str,
3887 CONST_CAST (char *, p), dest_align, false, 0);
3888 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3889 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3890 return dest_mem;
3893 return NULL_RTX;
3896 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3897 bytes from constant string DATA + OFFSET and return it as target
3898 constant. */
3901 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3902 enum machine_mode mode)
3904 const char *c = (const char *) data;
3905 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3907 memset (p, *c, GET_MODE_SIZE (mode));
3909 return c_readstr (p, mode);
3912 /* Callback routine for store_by_pieces. Return the RTL of a register
3913 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3914 char value given in the RTL register data. For example, if mode is
3915 4 bytes wide, return the RTL for 0x01010101*data. */
3917 static rtx
3918 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3919 enum machine_mode mode)
3921 rtx target, coeff;
3922 size_t size;
3923 char *p;
3925 size = GET_MODE_SIZE (mode);
3926 if (size == 1)
3927 return (rtx) data;
3929 p = XALLOCAVEC (char, size);
3930 memset (p, 1, size);
3931 coeff = c_readstr (p, mode);
3933 target = convert_to_mode (mode, (rtx) data, 1);
3934 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3935 return force_reg (mode, target);
3938 /* Expand expression EXP, which is a call to the memset builtin. Return
3939 NULL_RTX if we failed the caller should emit a normal call, otherwise
3940 try to get the result in TARGET, if convenient (and in mode MODE if that's
3941 convenient). */
3943 static rtx
3944 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3946 if (!validate_arglist (exp,
3947 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3948 return NULL_RTX;
3949 else
3951 tree dest = CALL_EXPR_ARG (exp, 0);
3952 tree val = CALL_EXPR_ARG (exp, 1);
3953 tree len = CALL_EXPR_ARG (exp, 2);
3954 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3958 /* Helper function to do the actual work for expand_builtin_memset. The
3959 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3960 so that this can also be called without constructing an actual CALL_EXPR.
3961 The other arguments and return value are the same as for
3962 expand_builtin_memset. */
3964 static rtx
3965 expand_builtin_memset_args (tree dest, tree val, tree len,
3966 rtx target, enum machine_mode mode, tree orig_exp)
3968 tree fndecl, fn;
3969 enum built_in_function fcode;
3970 char c;
3971 unsigned int dest_align;
3972 rtx dest_mem, dest_addr, len_rtx;
3973 HOST_WIDE_INT expected_size = -1;
3974 unsigned int expected_align = 0;
3975 tree_ann_common_t ann;
3977 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3979 /* If DEST is not a pointer type, don't do this operation in-line. */
3980 if (dest_align == 0)
3981 return NULL_RTX;
3983 ann = tree_common_ann (orig_exp);
3984 if (ann)
3985 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3987 if (expected_align < dest_align)
3988 expected_align = dest_align;
3990 /* If the LEN parameter is zero, return DEST. */
3991 if (integer_zerop (len))
3993 /* Evaluate and ignore VAL in case it has side-effects. */
3994 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3995 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3998 /* Stabilize the arguments in case we fail. */
3999 dest = builtin_save_expr (dest);
4000 val = builtin_save_expr (val);
4001 len = builtin_save_expr (len);
4003 len_rtx = expand_normal (len);
4004 dest_mem = get_memory_rtx (dest, len);
4006 if (TREE_CODE (val) != INTEGER_CST)
4008 rtx val_rtx;
4010 val_rtx = expand_normal (val);
4011 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4012 val_rtx, 0);
4014 /* Assume that we can memset by pieces if we can store
4015 * the coefficients by pieces (in the required modes).
4016 * We can't pass builtin_memset_gen_str as that emits RTL. */
4017 c = 1;
4018 if (host_integerp (len, 1)
4019 && can_store_by_pieces (tree_low_cst (len, 1),
4020 builtin_memset_read_str, &c, dest_align,
4021 true))
4023 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4024 val_rtx);
4025 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4026 builtin_memset_gen_str, val_rtx, dest_align,
4027 true, 0);
4029 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4030 dest_align, expected_align,
4031 expected_size))
4032 goto do_libcall;
4034 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4035 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4036 return dest_mem;
4039 if (target_char_cast (val, &c))
4040 goto do_libcall;
4042 if (c)
4044 if (host_integerp (len, 1)
4045 && can_store_by_pieces (tree_low_cst (len, 1),
4046 builtin_memset_read_str, &c, dest_align,
4047 true))
4048 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4049 builtin_memset_read_str, &c, dest_align, true, 0);
4050 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4051 dest_align, expected_align,
4052 expected_size))
4053 goto do_libcall;
4055 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4056 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4057 return dest_mem;
4060 set_mem_align (dest_mem, dest_align);
4061 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4062 CALL_EXPR_TAILCALL (orig_exp)
4063 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4064 expected_align, expected_size);
4066 if (dest_addr == 0)
4068 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4069 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4072 return dest_addr;
4074 do_libcall:
4075 fndecl = get_callee_fndecl (orig_exp);
4076 fcode = DECL_FUNCTION_CODE (fndecl);
4077 if (fcode == BUILT_IN_MEMSET)
4078 fn = build_call_expr (fndecl, 3, dest, val, len);
4079 else if (fcode == BUILT_IN_BZERO)
4080 fn = build_call_expr (fndecl, 2, dest, len);
4081 else
4082 gcc_unreachable ();
4083 if (TREE_CODE (fn) == CALL_EXPR)
4084 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4085 return expand_call (fn, target, target == const0_rtx);
4088 /* Expand expression EXP, which is a call to the bzero builtin. Return
4089 NULL_RTX if we failed the caller should emit a normal call. */
4091 static rtx
4092 expand_builtin_bzero (tree exp)
4094 tree dest, size;
4096 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4097 return NULL_RTX;
4099 dest = CALL_EXPR_ARG (exp, 0);
4100 size = CALL_EXPR_ARG (exp, 1);
4102 /* New argument list transforming bzero(ptr x, int y) to
4103 memset(ptr x, int 0, size_t y). This is done this way
4104 so that if it isn't expanded inline, we fallback to
4105 calling bzero instead of memset. */
4107 return expand_builtin_memset_args (dest, integer_zero_node,
4108 fold_convert (sizetype, size),
4109 const0_rtx, VOIDmode, exp);
4112 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4113 caller should emit a normal call, otherwise try to get the result
4114 in TARGET, if convenient (and in mode MODE if that's convenient). */
4116 static rtx
4117 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4119 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4120 INTEGER_TYPE, VOID_TYPE))
4122 tree type = TREE_TYPE (exp);
4123 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4124 CALL_EXPR_ARG (exp, 1),
4125 CALL_EXPR_ARG (exp, 2), type);
4126 if (result)
4127 return expand_expr (result, target, mode, EXPAND_NORMAL);
4129 return NULL_RTX;
4132 /* Expand expression EXP, which is a call to the memcmp built-in function.
4133 Return NULL_RTX if we failed and the
4134 caller should emit a normal call, otherwise try to get the result in
4135 TARGET, if convenient (and in mode MODE, if that's convenient). */
4137 static rtx
4138 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4140 if (!validate_arglist (exp,
4141 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4142 return NULL_RTX;
4143 else
4145 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4146 CALL_EXPR_ARG (exp, 1),
4147 CALL_EXPR_ARG (exp, 2));
4148 if (result)
4149 return expand_expr (result, target, mode, EXPAND_NORMAL);
4152 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4154 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4155 rtx result;
4156 rtx insn;
4157 tree arg1 = CALL_EXPR_ARG (exp, 0);
4158 tree arg2 = CALL_EXPR_ARG (exp, 1);
4159 tree len = CALL_EXPR_ARG (exp, 2);
4161 int arg1_align
4162 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4163 int arg2_align
4164 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4165 enum machine_mode insn_mode;
4167 #ifdef HAVE_cmpmemsi
4168 if (HAVE_cmpmemsi)
4169 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4170 else
4171 #endif
4172 #ifdef HAVE_cmpstrnsi
4173 if (HAVE_cmpstrnsi)
4174 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4175 else
4176 #endif
4177 return NULL_RTX;
4179 /* If we don't have POINTER_TYPE, call the function. */
4180 if (arg1_align == 0 || arg2_align == 0)
4181 return NULL_RTX;
4183 /* Make a place to write the result of the instruction. */
4184 result = target;
4185 if (! (result != 0
4186 && REG_P (result) && GET_MODE (result) == insn_mode
4187 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4188 result = gen_reg_rtx (insn_mode);
4190 arg1_rtx = get_memory_rtx (arg1, len);
4191 arg2_rtx = get_memory_rtx (arg2, len);
4192 arg3_rtx = expand_normal (fold_convert (sizetype, len));
4194 /* Set MEM_SIZE as appropriate. */
4195 if (GET_CODE (arg3_rtx) == CONST_INT)
4197 set_mem_size (arg1_rtx, arg3_rtx);
4198 set_mem_size (arg2_rtx, arg3_rtx);
4201 #ifdef HAVE_cmpmemsi
4202 if (HAVE_cmpmemsi)
4203 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4204 GEN_INT (MIN (arg1_align, arg2_align)));
4205 else
4206 #endif
4207 #ifdef HAVE_cmpstrnsi
4208 if (HAVE_cmpstrnsi)
4209 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4210 GEN_INT (MIN (arg1_align, arg2_align)));
4211 else
4212 #endif
4213 gcc_unreachable ();
4215 if (insn)
4216 emit_insn (insn);
4217 else
4218 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4219 TYPE_MODE (integer_type_node), 3,
4220 XEXP (arg1_rtx, 0), Pmode,
4221 XEXP (arg2_rtx, 0), Pmode,
4222 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4223 TYPE_UNSIGNED (sizetype)),
4224 TYPE_MODE (sizetype));
4226 /* Return the value in the proper mode for this function. */
4227 mode = TYPE_MODE (TREE_TYPE (exp));
4228 if (GET_MODE (result) == mode)
4229 return result;
4230 else if (target != 0)
4232 convert_move (target, result, 0);
4233 return target;
4235 else
4236 return convert_to_mode (mode, result, 0);
4238 #endif
4240 return NULL_RTX;
4243 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4244 if we failed the caller should emit a normal call, otherwise try to get
4245 the result in TARGET, if convenient. */
4247 static rtx
4248 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4250 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4251 return NULL_RTX;
4252 else
4254 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4255 CALL_EXPR_ARG (exp, 1));
4256 if (result)
4257 return expand_expr (result, target, mode, EXPAND_NORMAL);
4260 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4261 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4262 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4264 rtx arg1_rtx, arg2_rtx;
4265 rtx result, insn = NULL_RTX;
4266 tree fndecl, fn;
4267 tree arg1 = CALL_EXPR_ARG (exp, 0);
4268 tree arg2 = CALL_EXPR_ARG (exp, 1);
4270 int arg1_align
4271 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4272 int arg2_align
4273 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4275 /* If we don't have POINTER_TYPE, call the function. */
4276 if (arg1_align == 0 || arg2_align == 0)
4277 return NULL_RTX;
4279 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4280 arg1 = builtin_save_expr (arg1);
4281 arg2 = builtin_save_expr (arg2);
4283 arg1_rtx = get_memory_rtx (arg1, NULL);
4284 arg2_rtx = get_memory_rtx (arg2, NULL);
4286 #ifdef HAVE_cmpstrsi
4287 /* Try to call cmpstrsi. */
4288 if (HAVE_cmpstrsi)
4290 enum machine_mode insn_mode
4291 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4293 /* Make a place to write the result of the instruction. */
4294 result = target;
4295 if (! (result != 0
4296 && REG_P (result) && GET_MODE (result) == insn_mode
4297 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4298 result = gen_reg_rtx (insn_mode);
4300 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4301 GEN_INT (MIN (arg1_align, arg2_align)));
4303 #endif
4304 #ifdef HAVE_cmpstrnsi
4305 /* Try to determine at least one length and call cmpstrnsi. */
4306 if (!insn && HAVE_cmpstrnsi)
4308 tree len;
4309 rtx arg3_rtx;
4311 enum machine_mode insn_mode
4312 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4313 tree len1 = c_strlen (arg1, 1);
4314 tree len2 = c_strlen (arg2, 1);
4316 if (len1)
4317 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4318 if (len2)
4319 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4321 /* If we don't have a constant length for the first, use the length
4322 of the second, if we know it. We don't require a constant for
4323 this case; some cost analysis could be done if both are available
4324 but neither is constant. For now, assume they're equally cheap,
4325 unless one has side effects. If both strings have constant lengths,
4326 use the smaller. */
4328 if (!len1)
4329 len = len2;
4330 else if (!len2)
4331 len = len1;
4332 else if (TREE_SIDE_EFFECTS (len1))
4333 len = len2;
4334 else if (TREE_SIDE_EFFECTS (len2))
4335 len = len1;
4336 else if (TREE_CODE (len1) != INTEGER_CST)
4337 len = len2;
4338 else if (TREE_CODE (len2) != INTEGER_CST)
4339 len = len1;
4340 else if (tree_int_cst_lt (len1, len2))
4341 len = len1;
4342 else
4343 len = len2;
4345 /* If both arguments have side effects, we cannot optimize. */
4346 if (!len || TREE_SIDE_EFFECTS (len))
4347 goto do_libcall;
4349 arg3_rtx = expand_normal (len);
4351 /* Make a place to write the result of the instruction. */
4352 result = target;
4353 if (! (result != 0
4354 && REG_P (result) && GET_MODE (result) == insn_mode
4355 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4356 result = gen_reg_rtx (insn_mode);
4358 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4359 GEN_INT (MIN (arg1_align, arg2_align)));
4361 #endif
4363 if (insn)
4365 emit_insn (insn);
4367 /* Return the value in the proper mode for this function. */
4368 mode = TYPE_MODE (TREE_TYPE (exp));
4369 if (GET_MODE (result) == mode)
4370 return result;
4371 if (target == 0)
4372 return convert_to_mode (mode, result, 0);
4373 convert_move (target, result, 0);
4374 return target;
4377 /* Expand the library call ourselves using a stabilized argument
4378 list to avoid re-evaluating the function's arguments twice. */
4379 #ifdef HAVE_cmpstrnsi
4380 do_libcall:
4381 #endif
4382 fndecl = get_callee_fndecl (exp);
4383 fn = build_call_expr (fndecl, 2, arg1, arg2);
4384 if (TREE_CODE (fn) == CALL_EXPR)
4385 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4386 return expand_call (fn, target, target == const0_rtx);
4388 #endif
4389 return NULL_RTX;
4392 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4393 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4394 the result in TARGET, if convenient. */
4396 static rtx
4397 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4399 if (!validate_arglist (exp,
4400 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4401 return NULL_RTX;
4402 else
4404 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4405 CALL_EXPR_ARG (exp, 1),
4406 CALL_EXPR_ARG (exp, 2));
4407 if (result)
4408 return expand_expr (result, target, mode, EXPAND_NORMAL);
4411 /* If c_strlen can determine an expression for one of the string
4412 lengths, and it doesn't have side effects, then emit cmpstrnsi
4413 using length MIN(strlen(string)+1, arg3). */
4414 #ifdef HAVE_cmpstrnsi
4415 if (HAVE_cmpstrnsi)
4417 tree len, len1, len2;
4418 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4419 rtx result, insn;
4420 tree fndecl, fn;
4421 tree arg1 = CALL_EXPR_ARG (exp, 0);
4422 tree arg2 = CALL_EXPR_ARG (exp, 1);
4423 tree arg3 = CALL_EXPR_ARG (exp, 2);
4425 int arg1_align
4426 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4427 int arg2_align
4428 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4429 enum machine_mode insn_mode
4430 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4432 len1 = c_strlen (arg1, 1);
4433 len2 = c_strlen (arg2, 1);
4435 if (len1)
4436 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4437 if (len2)
4438 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4440 /* If we don't have a constant length for the first, use the length
4441 of the second, if we know it. We don't require a constant for
4442 this case; some cost analysis could be done if both are available
4443 but neither is constant. For now, assume they're equally cheap,
4444 unless one has side effects. If both strings have constant lengths,
4445 use the smaller. */
4447 if (!len1)
4448 len = len2;
4449 else if (!len2)
4450 len = len1;
4451 else if (TREE_SIDE_EFFECTS (len1))
4452 len = len2;
4453 else if (TREE_SIDE_EFFECTS (len2))
4454 len = len1;
4455 else if (TREE_CODE (len1) != INTEGER_CST)
4456 len = len2;
4457 else if (TREE_CODE (len2) != INTEGER_CST)
4458 len = len1;
4459 else if (tree_int_cst_lt (len1, len2))
4460 len = len1;
4461 else
4462 len = len2;
4464 /* If both arguments have side effects, we cannot optimize. */
4465 if (!len || TREE_SIDE_EFFECTS (len))
4466 return NULL_RTX;
4468 /* The actual new length parameter is MIN(len,arg3). */
4469 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4470 fold_convert (TREE_TYPE (len), arg3));
4472 /* If we don't have POINTER_TYPE, call the function. */
4473 if (arg1_align == 0 || arg2_align == 0)
4474 return NULL_RTX;
4476 /* Make a place to write the result of the instruction. */
4477 result = target;
4478 if (! (result != 0
4479 && REG_P (result) && GET_MODE (result) == insn_mode
4480 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4481 result = gen_reg_rtx (insn_mode);
4483 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4484 arg1 = builtin_save_expr (arg1);
4485 arg2 = builtin_save_expr (arg2);
4486 len = builtin_save_expr (len);
4488 arg1_rtx = get_memory_rtx (arg1, len);
4489 arg2_rtx = get_memory_rtx (arg2, len);
4490 arg3_rtx = expand_normal (len);
4491 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4492 GEN_INT (MIN (arg1_align, arg2_align)));
4493 if (insn)
4495 emit_insn (insn);
4497 /* Return the value in the proper mode for this function. */
4498 mode = TYPE_MODE (TREE_TYPE (exp));
4499 if (GET_MODE (result) == mode)
4500 return result;
4501 if (target == 0)
4502 return convert_to_mode (mode, result, 0);
4503 convert_move (target, result, 0);
4504 return target;
4507 /* Expand the library call ourselves using a stabilized argument
4508 list to avoid re-evaluating the function's arguments twice. */
4509 fndecl = get_callee_fndecl (exp);
4510 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4511 if (TREE_CODE (fn) == CALL_EXPR)
4512 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4513 return expand_call (fn, target, target == const0_rtx);
4515 #endif
4516 return NULL_RTX;
4519 /* Expand expression EXP, which is a call to the strcat builtin.
4520 Return NULL_RTX if we failed the caller should emit a normal call,
4521 otherwise try to get the result in TARGET, if convenient. */
4523 static rtx
4524 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4526 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4527 return NULL_RTX;
4528 else
4530 tree dst = CALL_EXPR_ARG (exp, 0);
4531 tree src = CALL_EXPR_ARG (exp, 1);
4532 const char *p = c_getstr (src);
4534 /* If the string length is zero, return the dst parameter. */
4535 if (p && *p == '\0')
4536 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4538 if (optimize_insn_for_speed_p ())
4540 /* See if we can store by pieces into (dst + strlen(dst)). */
4541 tree newsrc, newdst,
4542 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4543 rtx insns;
4545 /* Stabilize the argument list. */
4546 newsrc = builtin_save_expr (src);
4547 dst = builtin_save_expr (dst);
4549 start_sequence ();
4551 /* Create strlen (dst). */
4552 newdst = build_call_expr (strlen_fn, 1, dst);
4553 /* Create (dst p+ strlen (dst)). */
4555 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4556 newdst = builtin_save_expr (newdst);
4558 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4560 end_sequence (); /* Stop sequence. */
4561 return NULL_RTX;
4564 /* Output the entire sequence. */
4565 insns = get_insns ();
4566 end_sequence ();
4567 emit_insn (insns);
4569 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4572 return NULL_RTX;
4576 /* Expand expression EXP, which is a call to the strncat builtin.
4577 Return NULL_RTX if we failed the caller should emit a normal call,
4578 otherwise try to get the result in TARGET, if convenient. */
4580 static rtx
4581 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4583 if (validate_arglist (exp,
4584 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4586 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4587 CALL_EXPR_ARG (exp, 1),
4588 CALL_EXPR_ARG (exp, 2));
4589 if (result)
4590 return expand_expr (result, target, mode, EXPAND_NORMAL);
4592 return NULL_RTX;
4595 /* Expand expression EXP, which is a call to the strspn builtin.
4596 Return NULL_RTX if we failed the caller should emit a normal call,
4597 otherwise try to get the result in TARGET, if convenient. */
4599 static rtx
4600 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4602 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4604 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4605 CALL_EXPR_ARG (exp, 1));
4606 if (result)
4607 return expand_expr (result, target, mode, EXPAND_NORMAL);
4609 return NULL_RTX;
4612 /* Expand expression EXP, which is a call to the strcspn builtin.
4613 Return NULL_RTX if we failed the caller should emit a normal call,
4614 otherwise try to get the result in TARGET, if convenient. */
4616 static rtx
4617 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4619 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4621 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4622 CALL_EXPR_ARG (exp, 1));
4623 if (result)
4624 return expand_expr (result, target, mode, EXPAND_NORMAL);
4626 return NULL_RTX;
4629 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4630 if that's convenient. */
4633 expand_builtin_saveregs (void)
4635 rtx val, seq;
4637 /* Don't do __builtin_saveregs more than once in a function.
4638 Save the result of the first call and reuse it. */
4639 if (saveregs_value != 0)
4640 return saveregs_value;
4642 /* When this function is called, it means that registers must be
4643 saved on entry to this function. So we migrate the call to the
4644 first insn of this function. */
4646 start_sequence ();
4648 /* Do whatever the machine needs done in this case. */
4649 val = targetm.calls.expand_builtin_saveregs ();
4651 seq = get_insns ();
4652 end_sequence ();
4654 saveregs_value = val;
4656 /* Put the insns after the NOTE that starts the function. If this
4657 is inside a start_sequence, make the outer-level insn chain current, so
4658 the code is placed at the start of the function. */
4659 push_topmost_sequence ();
4660 emit_insn_after (seq, entry_of_function ());
4661 pop_topmost_sequence ();
4663 return val;
4666 /* __builtin_args_info (N) returns word N of the arg space info
4667 for the current function. The number and meanings of words
4668 is controlled by the definition of CUMULATIVE_ARGS. */
4670 static rtx
4671 expand_builtin_args_info (tree exp)
4673 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4674 int *word_ptr = (int *) &crtl->args.info;
4676 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4678 if (call_expr_nargs (exp) != 0)
4680 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4681 error ("argument of %<__builtin_args_info%> must be constant");
4682 else
4684 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4686 if (wordnum < 0 || wordnum >= nwords)
4687 error ("argument of %<__builtin_args_info%> out of range");
4688 else
4689 return GEN_INT (word_ptr[wordnum]);
4692 else
4693 error ("missing argument in %<__builtin_args_info%>");
4695 return const0_rtx;
4698 /* Expand a call to __builtin_next_arg. */
4700 static rtx
4701 expand_builtin_next_arg (void)
4703 /* Checking arguments is already done in fold_builtin_next_arg
4704 that must be called before this function. */
4705 return expand_binop (ptr_mode, add_optab,
4706 crtl->args.internal_arg_pointer,
4707 crtl->args.arg_offset_rtx,
4708 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4711 /* Make it easier for the backends by protecting the valist argument
4712 from multiple evaluations. */
4714 static tree
4715 stabilize_va_list (tree valist, int needs_lvalue)
4717 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4719 gcc_assert (vatype != NULL_TREE);
4721 if (TREE_CODE (vatype) == ARRAY_TYPE)
4723 if (TREE_SIDE_EFFECTS (valist))
4724 valist = save_expr (valist);
4726 /* For this case, the backends will be expecting a pointer to
4727 vatype, but it's possible we've actually been given an array
4728 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4729 So fix it. */
4730 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4732 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4733 valist = build_fold_addr_expr_with_type (valist, p1);
4736 else
4738 tree pt;
4740 if (! needs_lvalue)
4742 if (! TREE_SIDE_EFFECTS (valist))
4743 return valist;
4745 pt = build_pointer_type (vatype);
4746 valist = fold_build1 (ADDR_EXPR, pt, valist);
4747 TREE_SIDE_EFFECTS (valist) = 1;
4750 if (TREE_SIDE_EFFECTS (valist))
4751 valist = save_expr (valist);
4752 valist = build_fold_indirect_ref (valist);
4755 return valist;
4758 /* The "standard" definition of va_list is void*. */
4760 tree
4761 std_build_builtin_va_list (void)
4763 return ptr_type_node;
4766 /* The "standard" abi va_list is va_list_type_node. */
4768 tree
4769 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4771 return va_list_type_node;
4774 /* The "standard" type of va_list is va_list_type_node. */
4776 tree
4777 std_canonical_va_list_type (tree type)
4779 tree wtype, htype;
4781 if (INDIRECT_REF_P (type))
4782 type = TREE_TYPE (type);
4783 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4784 type = TREE_TYPE (type);
4785 wtype = va_list_type_node;
4786 htype = type;
4787 /* Treat structure va_list types. */
4788 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4789 htype = TREE_TYPE (htype);
4790 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4792 /* If va_list is an array type, the argument may have decayed
4793 to a pointer type, e.g. by being passed to another function.
4794 In that case, unwrap both types so that we can compare the
4795 underlying records. */
4796 if (TREE_CODE (htype) == ARRAY_TYPE
4797 || POINTER_TYPE_P (htype))
4799 wtype = TREE_TYPE (wtype);
4800 htype = TREE_TYPE (htype);
4803 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4804 return va_list_type_node;
4806 return NULL_TREE;
4809 /* The "standard" implementation of va_start: just assign `nextarg' to
4810 the variable. */
4812 void
4813 std_expand_builtin_va_start (tree valist, rtx nextarg)
4815 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4816 convert_move (va_r, nextarg, 0);
4819 /* Expand EXP, a call to __builtin_va_start. */
4821 static rtx
4822 expand_builtin_va_start (tree exp)
4824 rtx nextarg;
4825 tree valist;
4827 if (call_expr_nargs (exp) < 2)
4829 error ("too few arguments to function %<va_start%>");
4830 return const0_rtx;
4833 if (fold_builtin_next_arg (exp, true))
4834 return const0_rtx;
4836 nextarg = expand_builtin_next_arg ();
4837 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4839 if (targetm.expand_builtin_va_start)
4840 targetm.expand_builtin_va_start (valist, nextarg);
4841 else
4842 std_expand_builtin_va_start (valist, nextarg);
4844 return const0_rtx;
4847 /* The "standard" implementation of va_arg: read the value from the
4848 current (padded) address and increment by the (padded) size. */
4850 tree
4851 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4852 gimple_seq *post_p)
4854 tree addr, t, type_size, rounded_size, valist_tmp;
4855 unsigned HOST_WIDE_INT align, boundary;
4856 bool indirect;
4858 #ifdef ARGS_GROW_DOWNWARD
4859 /* All of the alignment and movement below is for args-grow-up machines.
4860 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4861 implement their own specialized gimplify_va_arg_expr routines. */
4862 gcc_unreachable ();
4863 #endif
4865 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4866 if (indirect)
4867 type = build_pointer_type (type);
4869 align = PARM_BOUNDARY / BITS_PER_UNIT;
4870 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4872 /* When we align parameter on stack for caller, if the parameter
4873 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4874 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4875 here with caller. */
4876 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4877 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4879 boundary /= BITS_PER_UNIT;
4881 /* Hoist the valist value into a temporary for the moment. */
4882 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4884 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4885 requires greater alignment, we must perform dynamic alignment. */
4886 if (boundary > align
4887 && !integer_zerop (TYPE_SIZE (type)))
4889 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4890 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4891 valist_tmp, size_int (boundary - 1)));
4892 gimplify_and_add (t, pre_p);
4894 t = fold_convert (sizetype, valist_tmp);
4895 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4896 fold_convert (TREE_TYPE (valist),
4897 fold_build2 (BIT_AND_EXPR, sizetype, t,
4898 size_int (-boundary))));
4899 gimplify_and_add (t, pre_p);
4901 else
4902 boundary = align;
4904 /* If the actual alignment is less than the alignment of the type,
4905 adjust the type accordingly so that we don't assume strict alignment
4906 when dereferencing the pointer. */
4907 boundary *= BITS_PER_UNIT;
4908 if (boundary < TYPE_ALIGN (type))
4910 type = build_variant_type_copy (type);
4911 TYPE_ALIGN (type) = boundary;
4914 /* Compute the rounded size of the type. */
4915 type_size = size_in_bytes (type);
4916 rounded_size = round_up (type_size, align);
4918 /* Reduce rounded_size so it's sharable with the postqueue. */
4919 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4921 /* Get AP. */
4922 addr = valist_tmp;
4923 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4925 /* Small args are padded downward. */
4926 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4927 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4928 size_binop (MINUS_EXPR, rounded_size, type_size));
4929 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4932 /* Compute new value for AP. */
4933 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4934 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4935 gimplify_and_add (t, pre_p);
4937 addr = fold_convert (build_pointer_type (type), addr);
4939 if (indirect)
4940 addr = build_va_arg_indirect_ref (addr);
4942 return build_va_arg_indirect_ref (addr);
4945 /* Build an indirect-ref expression over the given TREE, which represents a
4946 piece of a va_arg() expansion. */
4947 tree
4948 build_va_arg_indirect_ref (tree addr)
4950 addr = build_fold_indirect_ref (addr);
4952 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4953 mf_mark (addr);
4955 return addr;
4958 /* Return a dummy expression of type TYPE in order to keep going after an
4959 error. */
4961 static tree
4962 dummy_object (tree type)
4964 tree t = build_int_cst (build_pointer_type (type), 0);
4965 return build1 (INDIRECT_REF, type, t);
4968 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4969 builtin function, but a very special sort of operator. */
4971 enum gimplify_status
4972 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4974 tree promoted_type, have_va_type;
4975 tree valist = TREE_OPERAND (*expr_p, 0);
4976 tree type = TREE_TYPE (*expr_p);
4977 tree t;
4979 /* Verify that valist is of the proper type. */
4980 have_va_type = TREE_TYPE (valist);
4981 if (have_va_type == error_mark_node)
4982 return GS_ERROR;
4983 have_va_type = targetm.canonical_va_list_type (have_va_type);
4985 if (have_va_type == NULL_TREE)
4987 error ("first argument to %<va_arg%> not of type %<va_list%>");
4988 return GS_ERROR;
4991 /* Generate a diagnostic for requesting data of a type that cannot
4992 be passed through `...' due to type promotion at the call site. */
4993 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4994 != type)
4996 static bool gave_help;
4997 bool warned;
4999 /* Unfortunately, this is merely undefined, rather than a constraint
5000 violation, so we cannot make this an error. If this call is never
5001 executed, the program is still strictly conforming. */
5002 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
5003 type, promoted_type);
5004 if (!gave_help && warned)
5006 gave_help = true;
5007 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
5008 promoted_type, type);
5011 /* We can, however, treat "undefined" any way we please.
5012 Call abort to encourage the user to fix the program. */
5013 if (warned)
5014 inform (input_location, "if this code is reached, the program will abort");
5015 /* Before the abort, allow the evaluation of the va_list
5016 expression to exit or longjmp. */
5017 gimplify_and_add (valist, pre_p);
5018 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5019 gimplify_and_add (t, pre_p);
5021 /* This is dead code, but go ahead and finish so that the
5022 mode of the result comes out right. */
5023 *expr_p = dummy_object (type);
5024 return GS_ALL_DONE;
5026 else
5028 /* Make it easier for the backends by protecting the valist argument
5029 from multiple evaluations. */
5030 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5032 /* For this case, the backends will be expecting a pointer to
5033 TREE_TYPE (abi), but it's possible we've
5034 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5035 So fix it. */
5036 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5038 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5039 valist = fold_convert (p1, build_fold_addr_expr (valist));
5042 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5044 else
5045 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5047 if (!targetm.gimplify_va_arg_expr)
5048 /* FIXME: Once most targets are converted we should merely
5049 assert this is non-null. */
5050 return GS_ALL_DONE;
5052 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5053 return GS_OK;
5057 /* Expand EXP, a call to __builtin_va_end. */
5059 static rtx
5060 expand_builtin_va_end (tree exp)
5062 tree valist = CALL_EXPR_ARG (exp, 0);
5064 /* Evaluate for side effects, if needed. I hate macros that don't
5065 do that. */
5066 if (TREE_SIDE_EFFECTS (valist))
5067 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5069 return const0_rtx;
5072 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5073 builtin rather than just as an assignment in stdarg.h because of the
5074 nastiness of array-type va_list types. */
5076 static rtx
5077 expand_builtin_va_copy (tree exp)
5079 tree dst, src, t;
5081 dst = CALL_EXPR_ARG (exp, 0);
5082 src = CALL_EXPR_ARG (exp, 1);
5084 dst = stabilize_va_list (dst, 1);
5085 src = stabilize_va_list (src, 0);
5087 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5089 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5091 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5092 TREE_SIDE_EFFECTS (t) = 1;
5093 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5095 else
5097 rtx dstb, srcb, size;
5099 /* Evaluate to pointers. */
5100 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5101 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5102 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5103 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5105 dstb = convert_memory_address (Pmode, dstb);
5106 srcb = convert_memory_address (Pmode, srcb);
5108 /* "Dereference" to BLKmode memories. */
5109 dstb = gen_rtx_MEM (BLKmode, dstb);
5110 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5111 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5112 srcb = gen_rtx_MEM (BLKmode, srcb);
5113 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5114 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5116 /* Copy. */
5117 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5120 return const0_rtx;
5123 /* Expand a call to one of the builtin functions __builtin_frame_address or
5124 __builtin_return_address. */
5126 static rtx
5127 expand_builtin_frame_address (tree fndecl, tree exp)
5129 /* The argument must be a nonnegative integer constant.
5130 It counts the number of frames to scan up the stack.
5131 The value is the return address saved in that frame. */
5132 if (call_expr_nargs (exp) == 0)
5133 /* Warning about missing arg was already issued. */
5134 return const0_rtx;
5135 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5137 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5138 error ("invalid argument to %<__builtin_frame_address%>");
5139 else
5140 error ("invalid argument to %<__builtin_return_address%>");
5141 return const0_rtx;
5143 else
5145 rtx tem
5146 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5147 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5149 /* Some ports cannot access arbitrary stack frames. */
5150 if (tem == NULL)
5152 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5153 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5154 else
5155 warning (0, "unsupported argument to %<__builtin_return_address%>");
5156 return const0_rtx;
5159 /* For __builtin_frame_address, return what we've got. */
5160 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5161 return tem;
5163 if (!REG_P (tem)
5164 && ! CONSTANT_P (tem))
5165 tem = copy_to_mode_reg (Pmode, tem);
5166 return tem;
5170 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5171 we failed and the caller should emit a normal call, otherwise try to get
5172 the result in TARGET, if convenient. */
5174 static rtx
5175 expand_builtin_alloca (tree exp, rtx target)
5177 rtx op0;
5178 rtx result;
5180 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5181 should always expand to function calls. These can be intercepted
5182 in libmudflap. */
5183 if (flag_mudflap)
5184 return NULL_RTX;
5186 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5187 return NULL_RTX;
5189 /* Compute the argument. */
5190 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5192 /* Allocate the desired space. */
5193 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5194 result = convert_memory_address (ptr_mode, result);
5196 return result;
5199 /* Expand a call to a bswap builtin with argument ARG0. MODE
5200 is the mode to expand with. */
5202 static rtx
5203 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5205 enum machine_mode mode;
5206 tree arg;
5207 rtx op0;
5209 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5210 return NULL_RTX;
5212 arg = CALL_EXPR_ARG (exp, 0);
5213 mode = TYPE_MODE (TREE_TYPE (arg));
5214 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5216 target = expand_unop (mode, bswap_optab, op0, target, 1);
5218 gcc_assert (target);
5220 return convert_to_mode (mode, target, 0);
5223 /* Expand a call to a unary builtin in EXP.
5224 Return NULL_RTX if a normal call should be emitted rather than expanding the
5225 function in-line. If convenient, the result should be placed in TARGET.
5226 SUBTARGET may be used as the target for computing one of EXP's operands. */
5228 static rtx
5229 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5230 rtx subtarget, optab op_optab)
5232 rtx op0;
5234 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5235 return NULL_RTX;
5237 /* Compute the argument. */
5238 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5239 VOIDmode, EXPAND_NORMAL);
5240 /* Compute op, into TARGET if possible.
5241 Set TARGET to wherever the result comes back. */
5242 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5243 op_optab, op0, target, 1);
5244 gcc_assert (target);
5246 return convert_to_mode (target_mode, target, 0);
5249 /* If the string passed to fputs is a constant and is one character
5250 long, we attempt to transform this call into __builtin_fputc(). */
5252 static rtx
5253 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5255 /* Verify the arguments in the original call. */
5256 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5258 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5259 CALL_EXPR_ARG (exp, 1),
5260 (target == const0_rtx),
5261 unlocked, NULL_TREE);
5262 if (result)
5263 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5265 return NULL_RTX;
5268 /* Expand a call to __builtin_expect. We just return our argument
5269 as the builtin_expect semantic should've been already executed by
5270 tree branch prediction pass. */
5272 static rtx
5273 expand_builtin_expect (tree exp, rtx target)
5275 tree arg, c;
5277 if (call_expr_nargs (exp) < 2)
5278 return const0_rtx;
5279 arg = CALL_EXPR_ARG (exp, 0);
5280 c = CALL_EXPR_ARG (exp, 1);
5282 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5283 /* When guessing was done, the hints should be already stripped away. */
5284 gcc_assert (!flag_guess_branch_prob
5285 || optimize == 0 || errorcount || sorrycount);
5286 return target;
5289 void
5290 expand_builtin_trap (void)
5292 #ifdef HAVE_trap
5293 if (HAVE_trap)
5294 emit_insn (gen_trap ());
5295 else
5296 #endif
5297 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5298 emit_barrier ();
5301 /* Expand EXP, a call to fabs, fabsf or fabsl.
5302 Return NULL_RTX if a normal call should be emitted rather than expanding
5303 the function inline. If convenient, the result should be placed
5304 in TARGET. SUBTARGET may be used as the target for computing
5305 the operand. */
5307 static rtx
5308 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5310 enum machine_mode mode;
5311 tree arg;
5312 rtx op0;
5314 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5315 return NULL_RTX;
5317 arg = CALL_EXPR_ARG (exp, 0);
5318 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5319 mode = TYPE_MODE (TREE_TYPE (arg));
5320 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5321 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5324 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5325 Return NULL is a normal call should be emitted rather than expanding the
5326 function inline. If convenient, the result should be placed in TARGET.
5327 SUBTARGET may be used as the target for computing the operand. */
5329 static rtx
5330 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5332 rtx op0, op1;
5333 tree arg;
5335 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5336 return NULL_RTX;
5338 arg = CALL_EXPR_ARG (exp, 0);
5339 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5341 arg = CALL_EXPR_ARG (exp, 1);
5342 op1 = expand_normal (arg);
5344 return expand_copysign (op0, op1, target);
5347 /* Create a new constant string literal and return a char* pointer to it.
5348 The STRING_CST value is the LEN characters at STR. */
5349 tree
5350 build_string_literal (int len, const char *str)
5352 tree t, elem, index, type;
5354 t = build_string (len, str);
5355 elem = build_type_variant (char_type_node, 1, 0);
5356 index = build_index_type (size_int (len - 1));
5357 type = build_array_type (elem, index);
5358 TREE_TYPE (t) = type;
5359 TREE_CONSTANT (t) = 1;
5360 TREE_READONLY (t) = 1;
5361 TREE_STATIC (t) = 1;
5363 type = build_pointer_type (elem);
5364 t = build1 (ADDR_EXPR, type,
5365 build4 (ARRAY_REF, elem,
5366 t, integer_zero_node, NULL_TREE, NULL_TREE));
5367 return t;
5370 /* Expand EXP, a call to printf or printf_unlocked.
5371 Return NULL_RTX if a normal call should be emitted rather than transforming
5372 the function inline. If convenient, the result should be placed in
5373 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5374 call. */
5375 static rtx
5376 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5377 bool unlocked)
5379 /* If we're using an unlocked function, assume the other unlocked
5380 functions exist explicitly. */
5381 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5382 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5383 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5384 : implicit_built_in_decls[BUILT_IN_PUTS];
5385 const char *fmt_str;
5386 tree fn = 0;
5387 tree fmt, arg;
5388 int nargs = call_expr_nargs (exp);
5390 /* If the return value is used, don't do the transformation. */
5391 if (target != const0_rtx)
5392 return NULL_RTX;
5394 /* Verify the required arguments in the original call. */
5395 if (nargs == 0)
5396 return NULL_RTX;
5397 fmt = CALL_EXPR_ARG (exp, 0);
5398 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5399 return NULL_RTX;
5401 /* Check whether the format is a literal string constant. */
5402 fmt_str = c_getstr (fmt);
5403 if (fmt_str == NULL)
5404 return NULL_RTX;
5406 if (!init_target_chars ())
5407 return NULL_RTX;
5409 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5410 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5412 if ((nargs != 2)
5413 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5414 return NULL_RTX;
5415 if (fn_puts)
5416 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5418 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5419 else if (strcmp (fmt_str, target_percent_c) == 0)
5421 if ((nargs != 2)
5422 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5423 return NULL_RTX;
5424 if (fn_putchar)
5425 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5427 else
5429 /* We can't handle anything else with % args or %% ... yet. */
5430 if (strchr (fmt_str, target_percent))
5431 return NULL_RTX;
5433 if (nargs > 1)
5434 return NULL_RTX;
5436 /* If the format specifier was "", printf does nothing. */
5437 if (fmt_str[0] == '\0')
5438 return const0_rtx;
5439 /* If the format specifier has length of 1, call putchar. */
5440 if (fmt_str[1] == '\0')
5442 /* Given printf("c"), (where c is any one character,)
5443 convert "c"[0] to an int and pass that to the replacement
5444 function. */
5445 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5446 if (fn_putchar)
5447 fn = build_call_expr (fn_putchar, 1, arg);
5449 else
5451 /* If the format specifier was "string\n", call puts("string"). */
5452 size_t len = strlen (fmt_str);
5453 if ((unsigned char)fmt_str[len - 1] == target_newline)
5455 /* Create a NUL-terminated string that's one char shorter
5456 than the original, stripping off the trailing '\n'. */
5457 char *newstr = XALLOCAVEC (char, len);
5458 memcpy (newstr, fmt_str, len - 1);
5459 newstr[len - 1] = 0;
5460 arg = build_string_literal (len, newstr);
5461 if (fn_puts)
5462 fn = build_call_expr (fn_puts, 1, arg);
5464 else
5465 /* We'd like to arrange to call fputs(string,stdout) here,
5466 but we need stdout and don't have a way to get it yet. */
5467 return NULL_RTX;
5471 if (!fn)
5472 return NULL_RTX;
5473 if (TREE_CODE (fn) == CALL_EXPR)
5474 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5475 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5478 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5479 Return NULL_RTX if a normal call should be emitted rather than transforming
5480 the function inline. If convenient, the result should be placed in
5481 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5482 call. */
5483 static rtx
5484 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5485 bool unlocked)
5487 /* If we're using an unlocked function, assume the other unlocked
5488 functions exist explicitly. */
5489 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5490 : implicit_built_in_decls[BUILT_IN_FPUTC];
5491 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5492 : implicit_built_in_decls[BUILT_IN_FPUTS];
5493 const char *fmt_str;
5494 tree fn = 0;
5495 tree fmt, fp, arg;
5496 int nargs = call_expr_nargs (exp);
5498 /* If the return value is used, don't do the transformation. */
5499 if (target != const0_rtx)
5500 return NULL_RTX;
5502 /* Verify the required arguments in the original call. */
5503 if (nargs < 2)
5504 return NULL_RTX;
5505 fp = CALL_EXPR_ARG (exp, 0);
5506 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5507 return NULL_RTX;
5508 fmt = CALL_EXPR_ARG (exp, 1);
5509 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5510 return NULL_RTX;
5512 /* Check whether the format is a literal string constant. */
5513 fmt_str = c_getstr (fmt);
5514 if (fmt_str == NULL)
5515 return NULL_RTX;
5517 if (!init_target_chars ())
5518 return NULL_RTX;
5520 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5521 if (strcmp (fmt_str, target_percent_s) == 0)
5523 if ((nargs != 3)
5524 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5525 return NULL_RTX;
5526 arg = CALL_EXPR_ARG (exp, 2);
5527 if (fn_fputs)
5528 fn = build_call_expr (fn_fputs, 2, arg, fp);
5530 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5531 else if (strcmp (fmt_str, target_percent_c) == 0)
5533 if ((nargs != 3)
5534 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5535 return NULL_RTX;
5536 arg = CALL_EXPR_ARG (exp, 2);
5537 if (fn_fputc)
5538 fn = build_call_expr (fn_fputc, 2, arg, fp);
5540 else
5542 /* We can't handle anything else with % args or %% ... yet. */
5543 if (strchr (fmt_str, target_percent))
5544 return NULL_RTX;
5546 if (nargs > 2)
5547 return NULL_RTX;
5549 /* If the format specifier was "", fprintf does nothing. */
5550 if (fmt_str[0] == '\0')
5552 /* Evaluate and ignore FILE* argument for side-effects. */
5553 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5554 return const0_rtx;
5557 /* When "string" doesn't contain %, replace all cases of
5558 fprintf(stream,string) with fputs(string,stream). The fputs
5559 builtin will take care of special cases like length == 1. */
5560 if (fn_fputs)
5561 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5564 if (!fn)
5565 return NULL_RTX;
5566 if (TREE_CODE (fn) == CALL_EXPR)
5567 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5568 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5571 /* Expand a call EXP to sprintf. Return NULL_RTX if
5572 a normal call should be emitted rather than expanding the function
5573 inline. If convenient, the result should be placed in TARGET with
5574 mode MODE. */
5576 static rtx
5577 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5579 tree dest, fmt;
5580 const char *fmt_str;
5581 int nargs = call_expr_nargs (exp);
5583 /* Verify the required arguments in the original call. */
5584 if (nargs < 2)
5585 return NULL_RTX;
5586 dest = CALL_EXPR_ARG (exp, 0);
5587 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5588 return NULL_RTX;
5589 fmt = CALL_EXPR_ARG (exp, 0);
5590 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5591 return NULL_RTX;
5593 /* Check whether the format is a literal string constant. */
5594 fmt_str = c_getstr (fmt);
5595 if (fmt_str == NULL)
5596 return NULL_RTX;
5598 if (!init_target_chars ())
5599 return NULL_RTX;
5601 /* If the format doesn't contain % args or %%, use strcpy. */
5602 if (strchr (fmt_str, target_percent) == 0)
5604 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5605 tree exp;
5607 if ((nargs > 2) || ! fn)
5608 return NULL_RTX;
5609 expand_expr (build_call_expr (fn, 2, dest, fmt),
5610 const0_rtx, VOIDmode, EXPAND_NORMAL);
5611 if (target == const0_rtx)
5612 return const0_rtx;
5613 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5614 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5616 /* If the format is "%s", use strcpy if the result isn't used. */
5617 else if (strcmp (fmt_str, target_percent_s) == 0)
5619 tree fn, arg, len;
5620 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5622 if (! fn)
5623 return NULL_RTX;
5624 if (nargs != 3)
5625 return NULL_RTX;
5626 arg = CALL_EXPR_ARG (exp, 2);
5627 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5628 return NULL_RTX;
5630 if (target != const0_rtx)
5632 len = c_strlen (arg, 1);
5633 if (! len || TREE_CODE (len) != INTEGER_CST)
5634 return NULL_RTX;
5636 else
5637 len = NULL_TREE;
5639 expand_expr (build_call_expr (fn, 2, dest, arg),
5640 const0_rtx, VOIDmode, EXPAND_NORMAL);
5642 if (target == const0_rtx)
5643 return const0_rtx;
5644 return expand_expr (len, target, mode, EXPAND_NORMAL);
5647 return NULL_RTX;
5650 /* Expand a call to either the entry or exit function profiler. */
5652 static rtx
5653 expand_builtin_profile_func (bool exitp)
5655 rtx this_rtx, which;
5657 this_rtx = DECL_RTL (current_function_decl);
5658 gcc_assert (MEM_P (this_rtx));
5659 this_rtx = XEXP (this_rtx, 0);
5661 if (exitp)
5662 which = profile_function_exit_libfunc;
5663 else
5664 which = profile_function_entry_libfunc;
5666 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5667 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5669 Pmode);
5671 return const0_rtx;
5674 /* Expand a call to __builtin___clear_cache. */
5676 static rtx
5677 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5679 #ifndef HAVE_clear_cache
5680 #ifdef CLEAR_INSN_CACHE
5681 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5682 does something. Just do the default expansion to a call to
5683 __clear_cache(). */
5684 return NULL_RTX;
5685 #else
5686 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5687 does nothing. There is no need to call it. Do nothing. */
5688 return const0_rtx;
5689 #endif /* CLEAR_INSN_CACHE */
5690 #else
5691 /* We have a "clear_cache" insn, and it will handle everything. */
5692 tree begin, end;
5693 rtx begin_rtx, end_rtx;
5694 enum insn_code icode;
5696 /* We must not expand to a library call. If we did, any
5697 fallback library function in libgcc that might contain a call to
5698 __builtin___clear_cache() would recurse infinitely. */
5699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5701 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5702 return const0_rtx;
5705 if (HAVE_clear_cache)
5707 icode = CODE_FOR_clear_cache;
5709 begin = CALL_EXPR_ARG (exp, 0);
5710 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5711 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5712 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5713 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5715 end = CALL_EXPR_ARG (exp, 1);
5716 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5717 end_rtx = convert_memory_address (Pmode, end_rtx);
5718 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5719 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5721 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5723 return const0_rtx;
5724 #endif /* HAVE_clear_cache */
5727 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5729 static rtx
5730 round_trampoline_addr (rtx tramp)
5732 rtx temp, addend, mask;
5734 /* If we don't need too much alignment, we'll have been guaranteed
5735 proper alignment by get_trampoline_type. */
5736 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5737 return tramp;
5739 /* Round address up to desired boundary. */
5740 temp = gen_reg_rtx (Pmode);
5741 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5742 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5744 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5745 temp, 0, OPTAB_LIB_WIDEN);
5746 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5747 temp, 0, OPTAB_LIB_WIDEN);
5749 return tramp;
5752 static rtx
5753 expand_builtin_init_trampoline (tree exp)
5755 tree t_tramp, t_func, t_chain;
5756 rtx r_tramp, r_func, r_chain;
5757 #ifdef TRAMPOLINE_TEMPLATE
5758 rtx blktramp;
5759 #endif
5761 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5762 POINTER_TYPE, VOID_TYPE))
5763 return NULL_RTX;
5765 t_tramp = CALL_EXPR_ARG (exp, 0);
5766 t_func = CALL_EXPR_ARG (exp, 1);
5767 t_chain = CALL_EXPR_ARG (exp, 2);
5769 r_tramp = expand_normal (t_tramp);
5770 r_func = expand_normal (t_func);
5771 r_chain = expand_normal (t_chain);
5773 /* Generate insns to initialize the trampoline. */
5774 r_tramp = round_trampoline_addr (r_tramp);
5775 #ifdef TRAMPOLINE_TEMPLATE
5776 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5777 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5778 emit_block_move (blktramp, assemble_trampoline_template (),
5779 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5780 #endif
5781 trampolines_created = 1;
5782 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5784 return const0_rtx;
5787 static rtx
5788 expand_builtin_adjust_trampoline (tree exp)
5790 rtx tramp;
5792 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5793 return NULL_RTX;
5795 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5796 tramp = round_trampoline_addr (tramp);
5797 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5798 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5799 #endif
5801 return tramp;
5804 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5805 function. The function first checks whether the back end provides
5806 an insn to implement signbit for the respective mode. If not, it
5807 checks whether the floating point format of the value is such that
5808 the sign bit can be extracted. If that is not the case, the
5809 function returns NULL_RTX to indicate that a normal call should be
5810 emitted rather than expanding the function in-line. EXP is the
5811 expression that is a call to the builtin function; if convenient,
5812 the result should be placed in TARGET. */
5813 static rtx
5814 expand_builtin_signbit (tree exp, rtx target)
5816 const struct real_format *fmt;
5817 enum machine_mode fmode, imode, rmode;
5818 HOST_WIDE_INT hi, lo;
5819 tree arg;
5820 int word, bitpos;
5821 enum insn_code icode;
5822 rtx temp;
5824 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5825 return NULL_RTX;
5827 arg = CALL_EXPR_ARG (exp, 0);
5828 fmode = TYPE_MODE (TREE_TYPE (arg));
5829 rmode = TYPE_MODE (TREE_TYPE (exp));
5830 fmt = REAL_MODE_FORMAT (fmode);
5832 arg = builtin_save_expr (arg);
5834 /* Expand the argument yielding a RTX expression. */
5835 temp = expand_normal (arg);
5837 /* Check if the back end provides an insn that handles signbit for the
5838 argument's mode. */
5839 icode = signbit_optab->handlers [(int) fmode].insn_code;
5840 if (icode != CODE_FOR_nothing)
5842 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5843 emit_unop_insn (icode, target, temp, UNKNOWN);
5844 return target;
5847 /* For floating point formats without a sign bit, implement signbit
5848 as "ARG < 0.0". */
5849 bitpos = fmt->signbit_ro;
5850 if (bitpos < 0)
5852 /* But we can't do this if the format supports signed zero. */
5853 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5854 return NULL_RTX;
5856 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5857 build_real (TREE_TYPE (arg), dconst0));
5858 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5861 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5863 imode = int_mode_for_mode (fmode);
5864 if (imode == BLKmode)
5865 return NULL_RTX;
5866 temp = gen_lowpart (imode, temp);
5868 else
5870 imode = word_mode;
5871 /* Handle targets with different FP word orders. */
5872 if (FLOAT_WORDS_BIG_ENDIAN)
5873 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5874 else
5875 word = bitpos / BITS_PER_WORD;
5876 temp = operand_subword_force (temp, word, fmode);
5877 bitpos = bitpos % BITS_PER_WORD;
5880 /* Force the intermediate word_mode (or narrower) result into a
5881 register. This avoids attempting to create paradoxical SUBREGs
5882 of floating point modes below. */
5883 temp = force_reg (imode, temp);
5885 /* If the bitpos is within the "result mode" lowpart, the operation
5886 can be implement with a single bitwise AND. Otherwise, we need
5887 a right shift and an AND. */
5889 if (bitpos < GET_MODE_BITSIZE (rmode))
5891 if (bitpos < HOST_BITS_PER_WIDE_INT)
5893 hi = 0;
5894 lo = (HOST_WIDE_INT) 1 << bitpos;
5896 else
5898 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5899 lo = 0;
5902 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5903 temp = gen_lowpart (rmode, temp);
5904 temp = expand_binop (rmode, and_optab, temp,
5905 immed_double_const (lo, hi, rmode),
5906 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5908 else
5910 /* Perform a logical right shift to place the signbit in the least
5911 significant bit, then truncate the result to the desired mode
5912 and mask just this bit. */
5913 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5914 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5915 temp = gen_lowpart (rmode, temp);
5916 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5917 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5920 return temp;
5923 /* Expand fork or exec calls. TARGET is the desired target of the
5924 call. EXP is the call. FN is the
5925 identificator of the actual function. IGNORE is nonzero if the
5926 value is to be ignored. */
5928 static rtx
5929 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5931 tree id, decl;
5932 tree call;
5934 /* If we are not profiling, just call the function. */
5935 if (!profile_arc_flag)
5936 return NULL_RTX;
5938 /* Otherwise call the wrapper. This should be equivalent for the rest of
5939 compiler, so the code does not diverge, and the wrapper may run the
5940 code necessary for keeping the profiling sane. */
5942 switch (DECL_FUNCTION_CODE (fn))
5944 case BUILT_IN_FORK:
5945 id = get_identifier ("__gcov_fork");
5946 break;
5948 case BUILT_IN_EXECL:
5949 id = get_identifier ("__gcov_execl");
5950 break;
5952 case BUILT_IN_EXECV:
5953 id = get_identifier ("__gcov_execv");
5954 break;
5956 case BUILT_IN_EXECLP:
5957 id = get_identifier ("__gcov_execlp");
5958 break;
5960 case BUILT_IN_EXECLE:
5961 id = get_identifier ("__gcov_execle");
5962 break;
5964 case BUILT_IN_EXECVP:
5965 id = get_identifier ("__gcov_execvp");
5966 break;
5968 case BUILT_IN_EXECVE:
5969 id = get_identifier ("__gcov_execve");
5970 break;
5972 default:
5973 gcc_unreachable ();
5976 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5977 DECL_EXTERNAL (decl) = 1;
5978 TREE_PUBLIC (decl) = 1;
5979 DECL_ARTIFICIAL (decl) = 1;
5980 TREE_NOTHROW (decl) = 1;
5981 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5982 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5983 call = rewrite_call_expr (exp, 0, decl, 0);
5984 return expand_call (call, target, ignore);
5989 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5990 the pointer in these functions is void*, the tree optimizers may remove
5991 casts. The mode computed in expand_builtin isn't reliable either, due
5992 to __sync_bool_compare_and_swap.
5994 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5995 group of builtins. This gives us log2 of the mode size. */
5997 static inline enum machine_mode
5998 get_builtin_sync_mode (int fcode_diff)
6000 /* The size is not negotiable, so ask not to get BLKmode in return
6001 if the target indicates that a smaller size would be better. */
6002 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6005 /* Expand the memory expression LOC and return the appropriate memory operand
6006 for the builtin_sync operations. */
6008 static rtx
6009 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6011 rtx addr, mem;
6013 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6015 /* Note that we explicitly do not want any alias information for this
6016 memory, so that we kill all other live memories. Otherwise we don't
6017 satisfy the full barrier semantics of the intrinsic. */
6018 mem = validize_mem (gen_rtx_MEM (mode, addr));
6020 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6021 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6022 MEM_VOLATILE_P (mem) = 1;
6024 return mem;
6027 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6028 EXP is the CALL_EXPR. CODE is the rtx code
6029 that corresponds to the arithmetic or logical operation from the name;
6030 an exception here is that NOT actually means NAND. TARGET is an optional
6031 place for us to store the results; AFTER is true if this is the
6032 fetch_and_xxx form. IGNORE is true if we don't actually care about
6033 the result of the operation at all. */
6035 static rtx
6036 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6037 enum rtx_code code, bool after,
6038 rtx target, bool ignore)
6040 rtx val, mem;
6041 enum machine_mode old_mode;
6043 if (code == NOT && warn_sync_nand)
6045 tree fndecl = get_callee_fndecl (exp);
6046 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6048 static bool warned_f_a_n, warned_n_a_f;
6050 switch (fcode)
6052 case BUILT_IN_FETCH_AND_NAND_1:
6053 case BUILT_IN_FETCH_AND_NAND_2:
6054 case BUILT_IN_FETCH_AND_NAND_4:
6055 case BUILT_IN_FETCH_AND_NAND_8:
6056 case BUILT_IN_FETCH_AND_NAND_16:
6058 if (warned_f_a_n)
6059 break;
6061 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6062 inform (input_location,
6063 "%qD changed semantics in GCC 4.4", fndecl);
6064 warned_f_a_n = true;
6065 break;
6067 case BUILT_IN_NAND_AND_FETCH_1:
6068 case BUILT_IN_NAND_AND_FETCH_2:
6069 case BUILT_IN_NAND_AND_FETCH_4:
6070 case BUILT_IN_NAND_AND_FETCH_8:
6071 case BUILT_IN_NAND_AND_FETCH_16:
6073 if (warned_n_a_f)
6074 break;
6076 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6077 inform (input_location,
6078 "%qD changed semantics in GCC 4.4", fndecl);
6079 warned_n_a_f = true;
6080 break;
6082 default:
6083 gcc_unreachable ();
6087 /* Expand the operands. */
6088 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6090 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6091 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6092 of CONST_INTs, where we know the old_mode only from the call argument. */
6093 old_mode = GET_MODE (val);
6094 if (old_mode == VOIDmode)
6095 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6096 val = convert_modes (mode, old_mode, val, 1);
6098 if (ignore)
6099 return expand_sync_operation (mem, val, code);
6100 else
6101 return expand_sync_fetch_operation (mem, val, code, after, target);
6104 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6105 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6106 true if this is the boolean form. TARGET is a place for us to store the
6107 results; this is NOT optional if IS_BOOL is true. */
6109 static rtx
6110 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6111 bool is_bool, rtx target)
6113 rtx old_val, new_val, mem;
6114 enum machine_mode old_mode;
6116 /* Expand the operands. */
6117 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6120 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6121 mode, EXPAND_NORMAL);
6122 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6123 of CONST_INTs, where we know the old_mode only from the call argument. */
6124 old_mode = GET_MODE (old_val);
6125 if (old_mode == VOIDmode)
6126 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6127 old_val = convert_modes (mode, old_mode, old_val, 1);
6129 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6130 mode, EXPAND_NORMAL);
6131 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6132 of CONST_INTs, where we know the old_mode only from the call argument. */
6133 old_mode = GET_MODE (new_val);
6134 if (old_mode == VOIDmode)
6135 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6136 new_val = convert_modes (mode, old_mode, new_val, 1);
6138 if (is_bool)
6139 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6140 else
6141 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6144 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6145 general form is actually an atomic exchange, and some targets only
6146 support a reduced form with the second argument being a constant 1.
6147 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6148 the results. */
6150 static rtx
6151 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6152 rtx target)
6154 rtx val, mem;
6155 enum machine_mode old_mode;
6157 /* Expand the operands. */
6158 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6159 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6160 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6161 of CONST_INTs, where we know the old_mode only from the call argument. */
6162 old_mode = GET_MODE (val);
6163 if (old_mode == VOIDmode)
6164 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6165 val = convert_modes (mode, old_mode, val, 1);
6167 return expand_sync_lock_test_and_set (mem, val, target);
6170 /* Expand the __sync_synchronize intrinsic. */
6172 static void
6173 expand_builtin_synchronize (void)
6175 tree x;
6177 #ifdef HAVE_memory_barrier
6178 if (HAVE_memory_barrier)
6180 emit_insn (gen_memory_barrier ());
6181 return;
6183 #endif
6185 if (synchronize_libfunc != NULL_RTX)
6187 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6188 return;
6191 /* If no explicit memory barrier instruction is available, create an
6192 empty asm stmt with a memory clobber. */
6193 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6194 tree_cons (NULL, build_string (6, "memory"), NULL));
6195 ASM_VOLATILE_P (x) = 1;
6196 expand_asm_expr (x);
6199 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6201 static void
6202 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6204 enum insn_code icode;
6205 rtx mem, insn;
6206 rtx val = const0_rtx;
6208 /* Expand the operands. */
6209 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6211 /* If there is an explicit operation in the md file, use it. */
6212 icode = sync_lock_release[mode];
6213 if (icode != CODE_FOR_nothing)
6215 if (!insn_data[icode].operand[1].predicate (val, mode))
6216 val = force_reg (mode, val);
6218 insn = GEN_FCN (icode) (mem, val);
6219 if (insn)
6221 emit_insn (insn);
6222 return;
6226 /* Otherwise we can implement this operation by emitting a barrier
6227 followed by a store of zero. */
6228 expand_builtin_synchronize ();
6229 emit_move_insn (mem, val);
6232 /* Expand an expression EXP that calls a built-in function,
6233 with result going to TARGET if that's convenient
6234 (and in mode MODE if that's convenient).
6235 SUBTARGET may be used as the target for computing one of EXP's operands.
6236 IGNORE is nonzero if the value is to be ignored. */
6239 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6240 int ignore)
6242 tree fndecl = get_callee_fndecl (exp);
6243 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6244 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6246 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6247 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6249 /* When not optimizing, generate calls to library functions for a certain
6250 set of builtins. */
6251 if (!optimize
6252 && !called_as_built_in (fndecl)
6253 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6254 && fcode != BUILT_IN_ALLOCA
6255 && fcode != BUILT_IN_FREE)
6256 return expand_call (exp, target, ignore);
6258 /* The built-in function expanders test for target == const0_rtx
6259 to determine whether the function's result will be ignored. */
6260 if (ignore)
6261 target = const0_rtx;
6263 /* If the result of a pure or const built-in function is ignored, and
6264 none of its arguments are volatile, we can avoid expanding the
6265 built-in call and just evaluate the arguments for side-effects. */
6266 if (target == const0_rtx
6267 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6269 bool volatilep = false;
6270 tree arg;
6271 call_expr_arg_iterator iter;
6273 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6274 if (TREE_THIS_VOLATILE (arg))
6276 volatilep = true;
6277 break;
6280 if (! volatilep)
6282 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6283 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6284 return const0_rtx;
6288 switch (fcode)
6290 CASE_FLT_FN (BUILT_IN_FABS):
6291 target = expand_builtin_fabs (exp, target, subtarget);
6292 if (target)
6293 return target;
6294 break;
6296 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6297 target = expand_builtin_copysign (exp, target, subtarget);
6298 if (target)
6299 return target;
6300 break;
6302 /* Just do a normal library call if we were unable to fold
6303 the values. */
6304 CASE_FLT_FN (BUILT_IN_CABS):
6305 break;
6307 CASE_FLT_FN (BUILT_IN_EXP):
6308 CASE_FLT_FN (BUILT_IN_EXP10):
6309 CASE_FLT_FN (BUILT_IN_POW10):
6310 CASE_FLT_FN (BUILT_IN_EXP2):
6311 CASE_FLT_FN (BUILT_IN_EXPM1):
6312 CASE_FLT_FN (BUILT_IN_LOGB):
6313 CASE_FLT_FN (BUILT_IN_LOG):
6314 CASE_FLT_FN (BUILT_IN_LOG10):
6315 CASE_FLT_FN (BUILT_IN_LOG2):
6316 CASE_FLT_FN (BUILT_IN_LOG1P):
6317 CASE_FLT_FN (BUILT_IN_TAN):
6318 CASE_FLT_FN (BUILT_IN_ASIN):
6319 CASE_FLT_FN (BUILT_IN_ACOS):
6320 CASE_FLT_FN (BUILT_IN_ATAN):
6321 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6322 because of possible accuracy problems. */
6323 if (! flag_unsafe_math_optimizations)
6324 break;
6325 CASE_FLT_FN (BUILT_IN_SQRT):
6326 CASE_FLT_FN (BUILT_IN_FLOOR):
6327 CASE_FLT_FN (BUILT_IN_CEIL):
6328 CASE_FLT_FN (BUILT_IN_TRUNC):
6329 CASE_FLT_FN (BUILT_IN_ROUND):
6330 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6331 CASE_FLT_FN (BUILT_IN_RINT):
6332 target = expand_builtin_mathfn (exp, target, subtarget);
6333 if (target)
6334 return target;
6335 break;
6337 CASE_FLT_FN (BUILT_IN_ILOGB):
6338 if (! flag_unsafe_math_optimizations)
6339 break;
6340 CASE_FLT_FN (BUILT_IN_ISINF):
6341 CASE_FLT_FN (BUILT_IN_FINITE):
6342 case BUILT_IN_ISFINITE:
6343 case BUILT_IN_ISNORMAL:
6344 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6345 if (target)
6346 return target;
6347 break;
6349 CASE_FLT_FN (BUILT_IN_LCEIL):
6350 CASE_FLT_FN (BUILT_IN_LLCEIL):
6351 CASE_FLT_FN (BUILT_IN_LFLOOR):
6352 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6353 target = expand_builtin_int_roundingfn (exp, target);
6354 if (target)
6355 return target;
6356 break;
6358 CASE_FLT_FN (BUILT_IN_LRINT):
6359 CASE_FLT_FN (BUILT_IN_LLRINT):
6360 CASE_FLT_FN (BUILT_IN_LROUND):
6361 CASE_FLT_FN (BUILT_IN_LLROUND):
6362 target = expand_builtin_int_roundingfn_2 (exp, target);
6363 if (target)
6364 return target;
6365 break;
6367 CASE_FLT_FN (BUILT_IN_POW):
6368 target = expand_builtin_pow (exp, target, subtarget);
6369 if (target)
6370 return target;
6371 break;
6373 CASE_FLT_FN (BUILT_IN_POWI):
6374 target = expand_builtin_powi (exp, target, subtarget);
6375 if (target)
6376 return target;
6377 break;
6379 CASE_FLT_FN (BUILT_IN_ATAN2):
6380 CASE_FLT_FN (BUILT_IN_LDEXP):
6381 CASE_FLT_FN (BUILT_IN_SCALB):
6382 CASE_FLT_FN (BUILT_IN_SCALBN):
6383 CASE_FLT_FN (BUILT_IN_SCALBLN):
6384 if (! flag_unsafe_math_optimizations)
6385 break;
6387 CASE_FLT_FN (BUILT_IN_FMOD):
6388 CASE_FLT_FN (BUILT_IN_REMAINDER):
6389 CASE_FLT_FN (BUILT_IN_DREM):
6390 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6391 if (target)
6392 return target;
6393 break;
6395 CASE_FLT_FN (BUILT_IN_CEXPI):
6396 target = expand_builtin_cexpi (exp, target, subtarget);
6397 gcc_assert (target);
6398 return target;
6400 CASE_FLT_FN (BUILT_IN_SIN):
6401 CASE_FLT_FN (BUILT_IN_COS):
6402 if (! flag_unsafe_math_optimizations)
6403 break;
6404 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6405 if (target)
6406 return target;
6407 break;
6409 CASE_FLT_FN (BUILT_IN_SINCOS):
6410 if (! flag_unsafe_math_optimizations)
6411 break;
6412 target = expand_builtin_sincos (exp);
6413 if (target)
6414 return target;
6415 break;
6417 case BUILT_IN_APPLY_ARGS:
6418 return expand_builtin_apply_args ();
6420 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6421 FUNCTION with a copy of the parameters described by
6422 ARGUMENTS, and ARGSIZE. It returns a block of memory
6423 allocated on the stack into which is stored all the registers
6424 that might possibly be used for returning the result of a
6425 function. ARGUMENTS is the value returned by
6426 __builtin_apply_args. ARGSIZE is the number of bytes of
6427 arguments that must be copied. ??? How should this value be
6428 computed? We'll also need a safe worst case value for varargs
6429 functions. */
6430 case BUILT_IN_APPLY:
6431 if (!validate_arglist (exp, POINTER_TYPE,
6432 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6433 && !validate_arglist (exp, REFERENCE_TYPE,
6434 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6435 return const0_rtx;
6436 else
6438 rtx ops[3];
6440 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6441 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6442 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6444 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6447 /* __builtin_return (RESULT) causes the function to return the
6448 value described by RESULT. RESULT is address of the block of
6449 memory returned by __builtin_apply. */
6450 case BUILT_IN_RETURN:
6451 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6452 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6453 return const0_rtx;
6455 case BUILT_IN_SAVEREGS:
6456 return expand_builtin_saveregs ();
6458 case BUILT_IN_ARGS_INFO:
6459 return expand_builtin_args_info (exp);
6461 case BUILT_IN_VA_ARG_PACK:
6462 /* All valid uses of __builtin_va_arg_pack () are removed during
6463 inlining. */
6464 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6465 return const0_rtx;
6467 case BUILT_IN_VA_ARG_PACK_LEN:
6468 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6469 inlining. */
6470 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6471 return const0_rtx;
6473 /* Return the address of the first anonymous stack arg. */
6474 case BUILT_IN_NEXT_ARG:
6475 if (fold_builtin_next_arg (exp, false))
6476 return const0_rtx;
6477 return expand_builtin_next_arg ();
6479 case BUILT_IN_CLEAR_CACHE:
6480 target = expand_builtin___clear_cache (exp);
6481 if (target)
6482 return target;
6483 break;
6485 case BUILT_IN_CLASSIFY_TYPE:
6486 return expand_builtin_classify_type (exp);
6488 case BUILT_IN_CONSTANT_P:
6489 return const0_rtx;
6491 case BUILT_IN_FRAME_ADDRESS:
6492 case BUILT_IN_RETURN_ADDRESS:
6493 return expand_builtin_frame_address (fndecl, exp);
6495 /* Returns the address of the area where the structure is returned.
6496 0 otherwise. */
6497 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6498 if (call_expr_nargs (exp) != 0
6499 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6500 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6501 return const0_rtx;
6502 else
6503 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6505 case BUILT_IN_ALLOCA:
6506 target = expand_builtin_alloca (exp, target);
6507 if (target)
6508 return target;
6509 break;
6511 case BUILT_IN_STACK_SAVE:
6512 return expand_stack_save ();
6514 case BUILT_IN_STACK_RESTORE:
6515 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6516 return const0_rtx;
6518 case BUILT_IN_BSWAP32:
6519 case BUILT_IN_BSWAP64:
6520 target = expand_builtin_bswap (exp, target, subtarget);
6522 if (target)
6523 return target;
6524 break;
6526 CASE_INT_FN (BUILT_IN_FFS):
6527 case BUILT_IN_FFSIMAX:
6528 target = expand_builtin_unop (target_mode, exp, target,
6529 subtarget, ffs_optab);
6530 if (target)
6531 return target;
6532 break;
6534 CASE_INT_FN (BUILT_IN_CLZ):
6535 case BUILT_IN_CLZIMAX:
6536 target = expand_builtin_unop (target_mode, exp, target,
6537 subtarget, clz_optab);
6538 if (target)
6539 return target;
6540 break;
6542 CASE_INT_FN (BUILT_IN_CTZ):
6543 case BUILT_IN_CTZIMAX:
6544 target = expand_builtin_unop (target_mode, exp, target,
6545 subtarget, ctz_optab);
6546 if (target)
6547 return target;
6548 break;
6550 CASE_INT_FN (BUILT_IN_POPCOUNT):
6551 case BUILT_IN_POPCOUNTIMAX:
6552 target = expand_builtin_unop (target_mode, exp, target,
6553 subtarget, popcount_optab);
6554 if (target)
6555 return target;
6556 break;
6558 CASE_INT_FN (BUILT_IN_PARITY):
6559 case BUILT_IN_PARITYIMAX:
6560 target = expand_builtin_unop (target_mode, exp, target,
6561 subtarget, parity_optab);
6562 if (target)
6563 return target;
6564 break;
6566 case BUILT_IN_STRLEN:
6567 target = expand_builtin_strlen (exp, target, target_mode);
6568 if (target)
6569 return target;
6570 break;
6572 case BUILT_IN_STRCPY:
6573 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6574 if (target)
6575 return target;
6576 break;
6578 case BUILT_IN_STRNCPY:
6579 target = expand_builtin_strncpy (exp, target, mode);
6580 if (target)
6581 return target;
6582 break;
6584 case BUILT_IN_STPCPY:
6585 target = expand_builtin_stpcpy (exp, target, mode);
6586 if (target)
6587 return target;
6588 break;
6590 case BUILT_IN_STRCAT:
6591 target = expand_builtin_strcat (fndecl, exp, target, mode);
6592 if (target)
6593 return target;
6594 break;
6596 case BUILT_IN_STRNCAT:
6597 target = expand_builtin_strncat (exp, target, mode);
6598 if (target)
6599 return target;
6600 break;
6602 case BUILT_IN_STRSPN:
6603 target = expand_builtin_strspn (exp, target, mode);
6604 if (target)
6605 return target;
6606 break;
6608 case BUILT_IN_STRCSPN:
6609 target = expand_builtin_strcspn (exp, target, mode);
6610 if (target)
6611 return target;
6612 break;
6614 case BUILT_IN_STRSTR:
6615 target = expand_builtin_strstr (exp, target, mode);
6616 if (target)
6617 return target;
6618 break;
6620 case BUILT_IN_STRPBRK:
6621 target = expand_builtin_strpbrk (exp, target, mode);
6622 if (target)
6623 return target;
6624 break;
6626 case BUILT_IN_INDEX:
6627 case BUILT_IN_STRCHR:
6628 target = expand_builtin_strchr (exp, target, mode);
6629 if (target)
6630 return target;
6631 break;
6633 case BUILT_IN_RINDEX:
6634 case BUILT_IN_STRRCHR:
6635 target = expand_builtin_strrchr (exp, target, mode);
6636 if (target)
6637 return target;
6638 break;
6640 case BUILT_IN_MEMCPY:
6641 target = expand_builtin_memcpy (exp, target, mode);
6642 if (target)
6643 return target;
6644 break;
6646 case BUILT_IN_MEMPCPY:
6647 target = expand_builtin_mempcpy (exp, target, mode);
6648 if (target)
6649 return target;
6650 break;
6652 case BUILT_IN_MEMMOVE:
6653 target = expand_builtin_memmove (exp, target, mode, ignore);
6654 if (target)
6655 return target;
6656 break;
6658 case BUILT_IN_BCOPY:
6659 target = expand_builtin_bcopy (exp, ignore);
6660 if (target)
6661 return target;
6662 break;
6664 case BUILT_IN_MEMSET:
6665 target = expand_builtin_memset (exp, target, mode);
6666 if (target)
6667 return target;
6668 break;
6670 case BUILT_IN_BZERO:
6671 target = expand_builtin_bzero (exp);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_STRCMP:
6677 target = expand_builtin_strcmp (exp, target, mode);
6678 if (target)
6679 return target;
6680 break;
6682 case BUILT_IN_STRNCMP:
6683 target = expand_builtin_strncmp (exp, target, mode);
6684 if (target)
6685 return target;
6686 break;
6688 case BUILT_IN_MEMCHR:
6689 target = expand_builtin_memchr (exp, target, mode);
6690 if (target)
6691 return target;
6692 break;
6694 case BUILT_IN_BCMP:
6695 case BUILT_IN_MEMCMP:
6696 target = expand_builtin_memcmp (exp, target, mode);
6697 if (target)
6698 return target;
6699 break;
6701 case BUILT_IN_SETJMP:
6702 /* This should have been lowered to the builtins below. */
6703 gcc_unreachable ();
6705 case BUILT_IN_SETJMP_SETUP:
6706 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6707 and the receiver label. */
6708 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6710 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6711 VOIDmode, EXPAND_NORMAL);
6712 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6713 rtx label_r = label_rtx (label);
6715 /* This is copied from the handling of non-local gotos. */
6716 expand_builtin_setjmp_setup (buf_addr, label_r);
6717 nonlocal_goto_handler_labels
6718 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6719 nonlocal_goto_handler_labels);
6720 /* ??? Do not let expand_label treat us as such since we would
6721 not want to be both on the list of non-local labels and on
6722 the list of forced labels. */
6723 FORCED_LABEL (label) = 0;
6724 return const0_rtx;
6726 break;
6728 case BUILT_IN_SETJMP_DISPATCHER:
6729 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6730 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6732 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6733 rtx label_r = label_rtx (label);
6735 /* Remove the dispatcher label from the list of non-local labels
6736 since the receiver labels have been added to it above. */
6737 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6738 return const0_rtx;
6740 break;
6742 case BUILT_IN_SETJMP_RECEIVER:
6743 /* __builtin_setjmp_receiver is passed the receiver label. */
6744 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6746 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6747 rtx label_r = label_rtx (label);
6749 expand_builtin_setjmp_receiver (label_r);
6750 return const0_rtx;
6752 break;
6754 /* __builtin_longjmp is passed a pointer to an array of five words.
6755 It's similar to the C library longjmp function but works with
6756 __builtin_setjmp above. */
6757 case BUILT_IN_LONGJMP:
6758 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6760 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6761 VOIDmode, EXPAND_NORMAL);
6762 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6764 if (value != const1_rtx)
6766 error ("%<__builtin_longjmp%> second argument must be 1");
6767 return const0_rtx;
6770 expand_builtin_longjmp (buf_addr, value);
6771 return const0_rtx;
6773 break;
6775 case BUILT_IN_NONLOCAL_GOTO:
6776 target = expand_builtin_nonlocal_goto (exp);
6777 if (target)
6778 return target;
6779 break;
6781 /* This updates the setjmp buffer that is its argument with the value
6782 of the current stack pointer. */
6783 case BUILT_IN_UPDATE_SETJMP_BUF:
6784 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6786 rtx buf_addr
6787 = expand_normal (CALL_EXPR_ARG (exp, 0));
6789 expand_builtin_update_setjmp_buf (buf_addr);
6790 return const0_rtx;
6792 break;
6794 case BUILT_IN_TRAP:
6795 expand_builtin_trap ();
6796 return const0_rtx;
6798 case BUILT_IN_PRINTF:
6799 target = expand_builtin_printf (exp, target, mode, false);
6800 if (target)
6801 return target;
6802 break;
6804 case BUILT_IN_PRINTF_UNLOCKED:
6805 target = expand_builtin_printf (exp, target, mode, true);
6806 if (target)
6807 return target;
6808 break;
6810 case BUILT_IN_FPUTS:
6811 target = expand_builtin_fputs (exp, target, false);
6812 if (target)
6813 return target;
6814 break;
6815 case BUILT_IN_FPUTS_UNLOCKED:
6816 target = expand_builtin_fputs (exp, target, true);
6817 if (target)
6818 return target;
6819 break;
6821 case BUILT_IN_FPRINTF:
6822 target = expand_builtin_fprintf (exp, target, mode, false);
6823 if (target)
6824 return target;
6825 break;
6827 case BUILT_IN_FPRINTF_UNLOCKED:
6828 target = expand_builtin_fprintf (exp, target, mode, true);
6829 if (target)
6830 return target;
6831 break;
6833 case BUILT_IN_SPRINTF:
6834 target = expand_builtin_sprintf (exp, target, mode);
6835 if (target)
6836 return target;
6837 break;
6839 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6840 case BUILT_IN_SIGNBITD32:
6841 case BUILT_IN_SIGNBITD64:
6842 case BUILT_IN_SIGNBITD128:
6843 target = expand_builtin_signbit (exp, target);
6844 if (target)
6845 return target;
6846 break;
6848 /* Various hooks for the DWARF 2 __throw routine. */
6849 case BUILT_IN_UNWIND_INIT:
6850 expand_builtin_unwind_init ();
6851 return const0_rtx;
6852 case BUILT_IN_DWARF_CFA:
6853 return virtual_cfa_rtx;
6854 #ifdef DWARF2_UNWIND_INFO
6855 case BUILT_IN_DWARF_SP_COLUMN:
6856 return expand_builtin_dwarf_sp_column ();
6857 case BUILT_IN_INIT_DWARF_REG_SIZES:
6858 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6859 return const0_rtx;
6860 #endif
6861 case BUILT_IN_FROB_RETURN_ADDR:
6862 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6863 case BUILT_IN_EXTRACT_RETURN_ADDR:
6864 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6865 case BUILT_IN_EH_RETURN:
6866 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6867 CALL_EXPR_ARG (exp, 1));
6868 return const0_rtx;
6869 #ifdef EH_RETURN_DATA_REGNO
6870 case BUILT_IN_EH_RETURN_DATA_REGNO:
6871 return expand_builtin_eh_return_data_regno (exp);
6872 #endif
6873 case BUILT_IN_EXTEND_POINTER:
6874 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6876 case BUILT_IN_VA_START:
6877 return expand_builtin_va_start (exp);
6878 case BUILT_IN_VA_END:
6879 return expand_builtin_va_end (exp);
6880 case BUILT_IN_VA_COPY:
6881 return expand_builtin_va_copy (exp);
6882 case BUILT_IN_EXPECT:
6883 return expand_builtin_expect (exp, target);
6884 case BUILT_IN_PREFETCH:
6885 expand_builtin_prefetch (exp);
6886 return const0_rtx;
6888 case BUILT_IN_PROFILE_FUNC_ENTER:
6889 return expand_builtin_profile_func (false);
6890 case BUILT_IN_PROFILE_FUNC_EXIT:
6891 return expand_builtin_profile_func (true);
6893 case BUILT_IN_INIT_TRAMPOLINE:
6894 return expand_builtin_init_trampoline (exp);
6895 case BUILT_IN_ADJUST_TRAMPOLINE:
6896 return expand_builtin_adjust_trampoline (exp);
6898 case BUILT_IN_FORK:
6899 case BUILT_IN_EXECL:
6900 case BUILT_IN_EXECV:
6901 case BUILT_IN_EXECLP:
6902 case BUILT_IN_EXECLE:
6903 case BUILT_IN_EXECVP:
6904 case BUILT_IN_EXECVE:
6905 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6906 if (target)
6907 return target;
6908 break;
6910 case BUILT_IN_FETCH_AND_ADD_1:
6911 case BUILT_IN_FETCH_AND_ADD_2:
6912 case BUILT_IN_FETCH_AND_ADD_4:
6913 case BUILT_IN_FETCH_AND_ADD_8:
6914 case BUILT_IN_FETCH_AND_ADD_16:
6915 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6916 target = expand_builtin_sync_operation (mode, exp, PLUS,
6917 false, target, ignore);
6918 if (target)
6919 return target;
6920 break;
6922 case BUILT_IN_FETCH_AND_SUB_1:
6923 case BUILT_IN_FETCH_AND_SUB_2:
6924 case BUILT_IN_FETCH_AND_SUB_4:
6925 case BUILT_IN_FETCH_AND_SUB_8:
6926 case BUILT_IN_FETCH_AND_SUB_16:
6927 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6928 target = expand_builtin_sync_operation (mode, exp, MINUS,
6929 false, target, ignore);
6930 if (target)
6931 return target;
6932 break;
6934 case BUILT_IN_FETCH_AND_OR_1:
6935 case BUILT_IN_FETCH_AND_OR_2:
6936 case BUILT_IN_FETCH_AND_OR_4:
6937 case BUILT_IN_FETCH_AND_OR_8:
6938 case BUILT_IN_FETCH_AND_OR_16:
6939 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6940 target = expand_builtin_sync_operation (mode, exp, IOR,
6941 false, target, ignore);
6942 if (target)
6943 return target;
6944 break;
6946 case BUILT_IN_FETCH_AND_AND_1:
6947 case BUILT_IN_FETCH_AND_AND_2:
6948 case BUILT_IN_FETCH_AND_AND_4:
6949 case BUILT_IN_FETCH_AND_AND_8:
6950 case BUILT_IN_FETCH_AND_AND_16:
6951 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6952 target = expand_builtin_sync_operation (mode, exp, AND,
6953 false, target, ignore);
6954 if (target)
6955 return target;
6956 break;
6958 case BUILT_IN_FETCH_AND_XOR_1:
6959 case BUILT_IN_FETCH_AND_XOR_2:
6960 case BUILT_IN_FETCH_AND_XOR_4:
6961 case BUILT_IN_FETCH_AND_XOR_8:
6962 case BUILT_IN_FETCH_AND_XOR_16:
6963 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6964 target = expand_builtin_sync_operation (mode, exp, XOR,
6965 false, target, ignore);
6966 if (target)
6967 return target;
6968 break;
6970 case BUILT_IN_FETCH_AND_NAND_1:
6971 case BUILT_IN_FETCH_AND_NAND_2:
6972 case BUILT_IN_FETCH_AND_NAND_4:
6973 case BUILT_IN_FETCH_AND_NAND_8:
6974 case BUILT_IN_FETCH_AND_NAND_16:
6975 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6976 target = expand_builtin_sync_operation (mode, exp, NOT,
6977 false, target, ignore);
6978 if (target)
6979 return target;
6980 break;
6982 case BUILT_IN_ADD_AND_FETCH_1:
6983 case BUILT_IN_ADD_AND_FETCH_2:
6984 case BUILT_IN_ADD_AND_FETCH_4:
6985 case BUILT_IN_ADD_AND_FETCH_8:
6986 case BUILT_IN_ADD_AND_FETCH_16:
6987 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6988 target = expand_builtin_sync_operation (mode, exp, PLUS,
6989 true, target, ignore);
6990 if (target)
6991 return target;
6992 break;
6994 case BUILT_IN_SUB_AND_FETCH_1:
6995 case BUILT_IN_SUB_AND_FETCH_2:
6996 case BUILT_IN_SUB_AND_FETCH_4:
6997 case BUILT_IN_SUB_AND_FETCH_8:
6998 case BUILT_IN_SUB_AND_FETCH_16:
6999 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7000 target = expand_builtin_sync_operation (mode, exp, MINUS,
7001 true, target, ignore);
7002 if (target)
7003 return target;
7004 break;
7006 case BUILT_IN_OR_AND_FETCH_1:
7007 case BUILT_IN_OR_AND_FETCH_2:
7008 case BUILT_IN_OR_AND_FETCH_4:
7009 case BUILT_IN_OR_AND_FETCH_8:
7010 case BUILT_IN_OR_AND_FETCH_16:
7011 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7012 target = expand_builtin_sync_operation (mode, exp, IOR,
7013 true, target, ignore);
7014 if (target)
7015 return target;
7016 break;
7018 case BUILT_IN_AND_AND_FETCH_1:
7019 case BUILT_IN_AND_AND_FETCH_2:
7020 case BUILT_IN_AND_AND_FETCH_4:
7021 case BUILT_IN_AND_AND_FETCH_8:
7022 case BUILT_IN_AND_AND_FETCH_16:
7023 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7024 target = expand_builtin_sync_operation (mode, exp, AND,
7025 true, target, ignore);
7026 if (target)
7027 return target;
7028 break;
7030 case BUILT_IN_XOR_AND_FETCH_1:
7031 case BUILT_IN_XOR_AND_FETCH_2:
7032 case BUILT_IN_XOR_AND_FETCH_4:
7033 case BUILT_IN_XOR_AND_FETCH_8:
7034 case BUILT_IN_XOR_AND_FETCH_16:
7035 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7036 target = expand_builtin_sync_operation (mode, exp, XOR,
7037 true, target, ignore);
7038 if (target)
7039 return target;
7040 break;
7042 case BUILT_IN_NAND_AND_FETCH_1:
7043 case BUILT_IN_NAND_AND_FETCH_2:
7044 case BUILT_IN_NAND_AND_FETCH_4:
7045 case BUILT_IN_NAND_AND_FETCH_8:
7046 case BUILT_IN_NAND_AND_FETCH_16:
7047 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7048 target = expand_builtin_sync_operation (mode, exp, NOT,
7049 true, target, ignore);
7050 if (target)
7051 return target;
7052 break;
7054 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7055 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7056 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7057 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7058 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7059 if (mode == VOIDmode)
7060 mode = TYPE_MODE (boolean_type_node);
7061 if (!target || !register_operand (target, mode))
7062 target = gen_reg_rtx (mode);
7064 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7065 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7066 if (target)
7067 return target;
7068 break;
7070 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7071 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7072 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7073 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7074 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7075 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7076 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7077 if (target)
7078 return target;
7079 break;
7081 case BUILT_IN_LOCK_TEST_AND_SET_1:
7082 case BUILT_IN_LOCK_TEST_AND_SET_2:
7083 case BUILT_IN_LOCK_TEST_AND_SET_4:
7084 case BUILT_IN_LOCK_TEST_AND_SET_8:
7085 case BUILT_IN_LOCK_TEST_AND_SET_16:
7086 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7087 target = expand_builtin_lock_test_and_set (mode, exp, target);
7088 if (target)
7089 return target;
7090 break;
7092 case BUILT_IN_LOCK_RELEASE_1:
7093 case BUILT_IN_LOCK_RELEASE_2:
7094 case BUILT_IN_LOCK_RELEASE_4:
7095 case BUILT_IN_LOCK_RELEASE_8:
7096 case BUILT_IN_LOCK_RELEASE_16:
7097 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7098 expand_builtin_lock_release (mode, exp);
7099 return const0_rtx;
7101 case BUILT_IN_SYNCHRONIZE:
7102 expand_builtin_synchronize ();
7103 return const0_rtx;
7105 case BUILT_IN_OBJECT_SIZE:
7106 return expand_builtin_object_size (exp);
7108 case BUILT_IN_MEMCPY_CHK:
7109 case BUILT_IN_MEMPCPY_CHK:
7110 case BUILT_IN_MEMMOVE_CHK:
7111 case BUILT_IN_MEMSET_CHK:
7112 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7113 if (target)
7114 return target;
7115 break;
7117 case BUILT_IN_STRCPY_CHK:
7118 case BUILT_IN_STPCPY_CHK:
7119 case BUILT_IN_STRNCPY_CHK:
7120 case BUILT_IN_STRCAT_CHK:
7121 case BUILT_IN_STRNCAT_CHK:
7122 case BUILT_IN_SNPRINTF_CHK:
7123 case BUILT_IN_VSNPRINTF_CHK:
7124 maybe_emit_chk_warning (exp, fcode);
7125 break;
7127 case BUILT_IN_SPRINTF_CHK:
7128 case BUILT_IN_VSPRINTF_CHK:
7129 maybe_emit_sprintf_chk_warning (exp, fcode);
7130 break;
7132 case BUILT_IN_FREE:
7133 maybe_emit_free_warning (exp);
7134 break;
7136 default: /* just do library call, if unknown builtin */
7137 break;
7140 /* The switch statement above can drop through to cause the function
7141 to be called normally. */
7142 return expand_call (exp, target, ignore);
7145 /* Determine whether a tree node represents a call to a built-in
7146 function. If the tree T is a call to a built-in function with
7147 the right number of arguments of the appropriate types, return
7148 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7149 Otherwise the return value is END_BUILTINS. */
7151 enum built_in_function
7152 builtin_mathfn_code (const_tree t)
7154 const_tree fndecl, arg, parmlist;
7155 const_tree argtype, parmtype;
7156 const_call_expr_arg_iterator iter;
7158 if (TREE_CODE (t) != CALL_EXPR
7159 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7160 return END_BUILTINS;
7162 fndecl = get_callee_fndecl (t);
7163 if (fndecl == NULL_TREE
7164 || TREE_CODE (fndecl) != FUNCTION_DECL
7165 || ! DECL_BUILT_IN (fndecl)
7166 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7167 return END_BUILTINS;
7169 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7170 init_const_call_expr_arg_iterator (t, &iter);
7171 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7173 /* If a function doesn't take a variable number of arguments,
7174 the last element in the list will have type `void'. */
7175 parmtype = TREE_VALUE (parmlist);
7176 if (VOID_TYPE_P (parmtype))
7178 if (more_const_call_expr_args_p (&iter))
7179 return END_BUILTINS;
7180 return DECL_FUNCTION_CODE (fndecl);
7183 if (! more_const_call_expr_args_p (&iter))
7184 return END_BUILTINS;
7186 arg = next_const_call_expr_arg (&iter);
7187 argtype = TREE_TYPE (arg);
7189 if (SCALAR_FLOAT_TYPE_P (parmtype))
7191 if (! SCALAR_FLOAT_TYPE_P (argtype))
7192 return END_BUILTINS;
7194 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7196 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7197 return END_BUILTINS;
7199 else if (POINTER_TYPE_P (parmtype))
7201 if (! POINTER_TYPE_P (argtype))
7202 return END_BUILTINS;
7204 else if (INTEGRAL_TYPE_P (parmtype))
7206 if (! INTEGRAL_TYPE_P (argtype))
7207 return END_BUILTINS;
7209 else
7210 return END_BUILTINS;
7213 /* Variable-length argument list. */
7214 return DECL_FUNCTION_CODE (fndecl);
7217 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7218 evaluate to a constant. */
7220 static tree
7221 fold_builtin_constant_p (tree arg)
7223 /* We return 1 for a numeric type that's known to be a constant
7224 value at compile-time or for an aggregate type that's a
7225 literal constant. */
7226 STRIP_NOPS (arg);
7228 /* If we know this is a constant, emit the constant of one. */
7229 if (CONSTANT_CLASS_P (arg)
7230 || (TREE_CODE (arg) == CONSTRUCTOR
7231 && TREE_CONSTANT (arg)))
7232 return integer_one_node;
7233 if (TREE_CODE (arg) == ADDR_EXPR)
7235 tree op = TREE_OPERAND (arg, 0);
7236 if (TREE_CODE (op) == STRING_CST
7237 || (TREE_CODE (op) == ARRAY_REF
7238 && integer_zerop (TREE_OPERAND (op, 1))
7239 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7240 return integer_one_node;
7243 /* If this expression has side effects, show we don't know it to be a
7244 constant. Likewise if it's a pointer or aggregate type since in
7245 those case we only want literals, since those are only optimized
7246 when generating RTL, not later.
7247 And finally, if we are compiling an initializer, not code, we
7248 need to return a definite result now; there's not going to be any
7249 more optimization done. */
7250 if (TREE_SIDE_EFFECTS (arg)
7251 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7252 || POINTER_TYPE_P (TREE_TYPE (arg))
7253 || cfun == 0
7254 || folding_initializer)
7255 return integer_zero_node;
7257 return NULL_TREE;
7260 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7261 return it as a truthvalue. */
7263 static tree
7264 build_builtin_expect_predicate (tree pred, tree expected)
7266 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7268 fn = built_in_decls[BUILT_IN_EXPECT];
7269 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7270 ret_type = TREE_TYPE (TREE_TYPE (fn));
7271 pred_type = TREE_VALUE (arg_types);
7272 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7274 pred = fold_convert (pred_type, pred);
7275 expected = fold_convert (expected_type, expected);
7276 call_expr = build_call_expr (fn, 2, pred, expected);
7278 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7279 build_int_cst (ret_type, 0));
7282 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7283 NULL_TREE if no simplification is possible. */
7285 static tree
7286 fold_builtin_expect (tree arg0, tree arg1)
7288 tree inner, fndecl;
7289 enum tree_code code;
7291 /* If this is a builtin_expect within a builtin_expect keep the
7292 inner one. See through a comparison against a constant. It
7293 might have been added to create a thruthvalue. */
7294 inner = arg0;
7295 if (COMPARISON_CLASS_P (inner)
7296 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7297 inner = TREE_OPERAND (inner, 0);
7299 if (TREE_CODE (inner) == CALL_EXPR
7300 && (fndecl = get_callee_fndecl (inner))
7301 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7302 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7303 return arg0;
7305 /* Distribute the expected value over short-circuiting operators.
7306 See through the cast from truthvalue_type_node to long. */
7307 inner = arg0;
7308 while (TREE_CODE (inner) == NOP_EXPR
7309 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7310 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7311 inner = TREE_OPERAND (inner, 0);
7313 code = TREE_CODE (inner);
7314 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7316 tree op0 = TREE_OPERAND (inner, 0);
7317 tree op1 = TREE_OPERAND (inner, 1);
7319 op0 = build_builtin_expect_predicate (op0, arg1);
7320 op1 = build_builtin_expect_predicate (op1, arg1);
7321 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7323 return fold_convert (TREE_TYPE (arg0), inner);
7326 /* If the argument isn't invariant then there's nothing else we can do. */
7327 if (!TREE_CONSTANT (arg0))
7328 return NULL_TREE;
7330 /* If we expect that a comparison against the argument will fold to
7331 a constant return the constant. In practice, this means a true
7332 constant or the address of a non-weak symbol. */
7333 inner = arg0;
7334 STRIP_NOPS (inner);
7335 if (TREE_CODE (inner) == ADDR_EXPR)
7339 inner = TREE_OPERAND (inner, 0);
7341 while (TREE_CODE (inner) == COMPONENT_REF
7342 || TREE_CODE (inner) == ARRAY_REF);
7343 if ((TREE_CODE (inner) == VAR_DECL
7344 || TREE_CODE (inner) == FUNCTION_DECL)
7345 && DECL_WEAK (inner))
7346 return NULL_TREE;
7349 /* Otherwise, ARG0 already has the proper type for the return value. */
7350 return arg0;
7353 /* Fold a call to __builtin_classify_type with argument ARG. */
7355 static tree
7356 fold_builtin_classify_type (tree arg)
7358 if (arg == 0)
7359 return build_int_cst (NULL_TREE, no_type_class);
7361 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7364 /* Fold a call to __builtin_strlen with argument ARG. */
7366 static tree
7367 fold_builtin_strlen (tree arg)
7369 if (!validate_arg (arg, POINTER_TYPE))
7370 return NULL_TREE;
7371 else
7373 tree len = c_strlen (arg, 0);
7375 if (len)
7377 /* Convert from the internal "sizetype" type to "size_t". */
7378 if (size_type_node)
7379 len = fold_convert (size_type_node, len);
7380 return len;
7383 return NULL_TREE;
7387 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7389 static tree
7390 fold_builtin_inf (tree type, int warn)
7392 REAL_VALUE_TYPE real;
7394 /* __builtin_inff is intended to be usable to define INFINITY on all
7395 targets. If an infinity is not available, INFINITY expands "to a
7396 positive constant of type float that overflows at translation
7397 time", footnote "In this case, using INFINITY will violate the
7398 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7399 Thus we pedwarn to ensure this constraint violation is
7400 diagnosed. */
7401 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7402 pedwarn (input_location, 0, "target format does not support infinity");
7404 real_inf (&real);
7405 return build_real (type, real);
7408 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7410 static tree
7411 fold_builtin_nan (tree arg, tree type, int quiet)
7413 REAL_VALUE_TYPE real;
7414 const char *str;
7416 if (!validate_arg (arg, POINTER_TYPE))
7417 return NULL_TREE;
7418 str = c_getstr (arg);
7419 if (!str)
7420 return NULL_TREE;
7422 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7423 return NULL_TREE;
7425 return build_real (type, real);
7428 /* Return true if the floating point expression T has an integer value.
7429 We also allow +Inf, -Inf and NaN to be considered integer values. */
7431 static bool
7432 integer_valued_real_p (tree t)
7434 switch (TREE_CODE (t))
7436 case FLOAT_EXPR:
7437 return true;
7439 case ABS_EXPR:
7440 case SAVE_EXPR:
7441 return integer_valued_real_p (TREE_OPERAND (t, 0));
7443 case COMPOUND_EXPR:
7444 case MODIFY_EXPR:
7445 case BIND_EXPR:
7446 return integer_valued_real_p (TREE_OPERAND (t, 1));
7448 case PLUS_EXPR:
7449 case MINUS_EXPR:
7450 case MULT_EXPR:
7451 case MIN_EXPR:
7452 case MAX_EXPR:
7453 return integer_valued_real_p (TREE_OPERAND (t, 0))
7454 && integer_valued_real_p (TREE_OPERAND (t, 1));
7456 case COND_EXPR:
7457 return integer_valued_real_p (TREE_OPERAND (t, 1))
7458 && integer_valued_real_p (TREE_OPERAND (t, 2));
7460 case REAL_CST:
7461 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7463 case NOP_EXPR:
7465 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7466 if (TREE_CODE (type) == INTEGER_TYPE)
7467 return true;
7468 if (TREE_CODE (type) == REAL_TYPE)
7469 return integer_valued_real_p (TREE_OPERAND (t, 0));
7470 break;
7473 case CALL_EXPR:
7474 switch (builtin_mathfn_code (t))
7476 CASE_FLT_FN (BUILT_IN_CEIL):
7477 CASE_FLT_FN (BUILT_IN_FLOOR):
7478 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7479 CASE_FLT_FN (BUILT_IN_RINT):
7480 CASE_FLT_FN (BUILT_IN_ROUND):
7481 CASE_FLT_FN (BUILT_IN_TRUNC):
7482 return true;
7484 CASE_FLT_FN (BUILT_IN_FMIN):
7485 CASE_FLT_FN (BUILT_IN_FMAX):
7486 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7487 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7489 default:
7490 break;
7492 break;
7494 default:
7495 break;
7497 return false;
7500 /* FNDECL is assumed to be a builtin where truncation can be propagated
7501 across (for instance floor((double)f) == (double)floorf (f).
7502 Do the transformation for a call with argument ARG. */
7504 static tree
7505 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7507 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7509 if (!validate_arg (arg, REAL_TYPE))
7510 return NULL_TREE;
7512 /* Integer rounding functions are idempotent. */
7513 if (fcode == builtin_mathfn_code (arg))
7514 return arg;
7516 /* If argument is already integer valued, and we don't need to worry
7517 about setting errno, there's no need to perform rounding. */
7518 if (! flag_errno_math && integer_valued_real_p (arg))
7519 return arg;
7521 if (optimize)
7523 tree arg0 = strip_float_extensions (arg);
7524 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7525 tree newtype = TREE_TYPE (arg0);
7526 tree decl;
7528 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7529 && (decl = mathfn_built_in (newtype, fcode)))
7530 return fold_convert (ftype,
7531 build_call_expr (decl, 1,
7532 fold_convert (newtype, arg0)));
7534 return NULL_TREE;
7537 /* FNDECL is assumed to be builtin which can narrow the FP type of
7538 the argument, for instance lround((double)f) -> lroundf (f).
7539 Do the transformation for a call with argument ARG. */
7541 static tree
7542 fold_fixed_mathfn (tree fndecl, tree arg)
7544 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7546 if (!validate_arg (arg, REAL_TYPE))
7547 return NULL_TREE;
7549 /* If argument is already integer valued, and we don't need to worry
7550 about setting errno, there's no need to perform rounding. */
7551 if (! flag_errno_math && integer_valued_real_p (arg))
7552 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7554 if (optimize)
7556 tree ftype = TREE_TYPE (arg);
7557 tree arg0 = strip_float_extensions (arg);
7558 tree newtype = TREE_TYPE (arg0);
7559 tree decl;
7561 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7562 && (decl = mathfn_built_in (newtype, fcode)))
7563 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7566 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7567 sizeof (long long) == sizeof (long). */
7568 if (TYPE_PRECISION (long_long_integer_type_node)
7569 == TYPE_PRECISION (long_integer_type_node))
7571 tree newfn = NULL_TREE;
7572 switch (fcode)
7574 CASE_FLT_FN (BUILT_IN_LLCEIL):
7575 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7576 break;
7578 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7579 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7580 break;
7582 CASE_FLT_FN (BUILT_IN_LLROUND):
7583 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7584 break;
7586 CASE_FLT_FN (BUILT_IN_LLRINT):
7587 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7588 break;
7590 default:
7591 break;
7594 if (newfn)
7596 tree newcall = build_call_expr(newfn, 1, arg);
7597 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7601 return NULL_TREE;
7604 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7605 return type. Return NULL_TREE if no simplification can be made. */
7607 static tree
7608 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7610 tree res;
7612 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7613 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7614 return NULL_TREE;
7616 /* Calculate the result when the argument is a constant. */
7617 if (TREE_CODE (arg) == COMPLEX_CST
7618 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7619 type, mpfr_hypot)))
7620 return res;
7622 if (TREE_CODE (arg) == COMPLEX_EXPR)
7624 tree real = TREE_OPERAND (arg, 0);
7625 tree imag = TREE_OPERAND (arg, 1);
7627 /* If either part is zero, cabs is fabs of the other. */
7628 if (real_zerop (real))
7629 return fold_build1 (ABS_EXPR, type, imag);
7630 if (real_zerop (imag))
7631 return fold_build1 (ABS_EXPR, type, real);
7633 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7634 if (flag_unsafe_math_optimizations
7635 && operand_equal_p (real, imag, OEP_PURE_SAME))
7637 const REAL_VALUE_TYPE sqrt2_trunc
7638 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7639 STRIP_NOPS (real);
7640 return fold_build2 (MULT_EXPR, type,
7641 fold_build1 (ABS_EXPR, type, real),
7642 build_real (type, sqrt2_trunc));
7646 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7647 if (TREE_CODE (arg) == NEGATE_EXPR
7648 || TREE_CODE (arg) == CONJ_EXPR)
7649 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7651 /* Don't do this when optimizing for size. */
7652 if (flag_unsafe_math_optimizations
7653 && optimize && optimize_function_for_speed_p (cfun))
7655 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7657 if (sqrtfn != NULL_TREE)
7659 tree rpart, ipart, result;
7661 arg = builtin_save_expr (arg);
7663 rpart = fold_build1 (REALPART_EXPR, type, arg);
7664 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7666 rpart = builtin_save_expr (rpart);
7667 ipart = builtin_save_expr (ipart);
7669 result = fold_build2 (PLUS_EXPR, type,
7670 fold_build2 (MULT_EXPR, type,
7671 rpart, rpart),
7672 fold_build2 (MULT_EXPR, type,
7673 ipart, ipart));
7675 return build_call_expr (sqrtfn, 1, result);
7679 return NULL_TREE;
7682 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7683 Return NULL_TREE if no simplification can be made. */
7685 static tree
7686 fold_builtin_sqrt (tree arg, tree type)
7689 enum built_in_function fcode;
7690 tree res;
7692 if (!validate_arg (arg, REAL_TYPE))
7693 return NULL_TREE;
7695 /* Calculate the result when the argument is a constant. */
7696 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7697 return res;
7699 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7700 fcode = builtin_mathfn_code (arg);
7701 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7703 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7704 arg = fold_build2 (MULT_EXPR, type,
7705 CALL_EXPR_ARG (arg, 0),
7706 build_real (type, dconsthalf));
7707 return build_call_expr (expfn, 1, arg);
7710 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7711 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7713 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7715 if (powfn)
7717 tree arg0 = CALL_EXPR_ARG (arg, 0);
7718 tree tree_root;
7719 /* The inner root was either sqrt or cbrt. */
7720 /* This was a conditional expression but it triggered a bug
7721 in Sun C 5.5. */
7722 REAL_VALUE_TYPE dconstroot;
7723 if (BUILTIN_SQRT_P (fcode))
7724 dconstroot = dconsthalf;
7725 else
7726 dconstroot = dconst_third ();
7728 /* Adjust for the outer root. */
7729 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7730 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7731 tree_root = build_real (type, dconstroot);
7732 return build_call_expr (powfn, 2, arg0, tree_root);
7736 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7737 if (flag_unsafe_math_optimizations
7738 && (fcode == BUILT_IN_POW
7739 || fcode == BUILT_IN_POWF
7740 || fcode == BUILT_IN_POWL))
7742 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7743 tree arg0 = CALL_EXPR_ARG (arg, 0);
7744 tree arg1 = CALL_EXPR_ARG (arg, 1);
7745 tree narg1;
7746 if (!tree_expr_nonnegative_p (arg0))
7747 arg0 = build1 (ABS_EXPR, type, arg0);
7748 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7749 build_real (type, dconsthalf));
7750 return build_call_expr (powfn, 2, arg0, narg1);
7753 return NULL_TREE;
7756 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7757 Return NULL_TREE if no simplification can be made. */
7759 static tree
7760 fold_builtin_cbrt (tree arg, tree type)
7762 const enum built_in_function fcode = builtin_mathfn_code (arg);
7763 tree res;
7765 if (!validate_arg (arg, REAL_TYPE))
7766 return NULL_TREE;
7768 /* Calculate the result when the argument is a constant. */
7769 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7770 return res;
7772 if (flag_unsafe_math_optimizations)
7774 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7775 if (BUILTIN_EXPONENT_P (fcode))
7777 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7778 const REAL_VALUE_TYPE third_trunc =
7779 real_value_truncate (TYPE_MODE (type), dconst_third ());
7780 arg = fold_build2 (MULT_EXPR, type,
7781 CALL_EXPR_ARG (arg, 0),
7782 build_real (type, third_trunc));
7783 return build_call_expr (expfn, 1, arg);
7786 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7787 if (BUILTIN_SQRT_P (fcode))
7789 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7791 if (powfn)
7793 tree arg0 = CALL_EXPR_ARG (arg, 0);
7794 tree tree_root;
7795 REAL_VALUE_TYPE dconstroot = dconst_third ();
7797 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7798 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7799 tree_root = build_real (type, dconstroot);
7800 return build_call_expr (powfn, 2, arg0, tree_root);
7804 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7805 if (BUILTIN_CBRT_P (fcode))
7807 tree arg0 = CALL_EXPR_ARG (arg, 0);
7808 if (tree_expr_nonnegative_p (arg0))
7810 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7812 if (powfn)
7814 tree tree_root;
7815 REAL_VALUE_TYPE dconstroot;
7817 real_arithmetic (&dconstroot, MULT_EXPR,
7818 dconst_third_ptr (), dconst_third_ptr ());
7819 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7820 tree_root = build_real (type, dconstroot);
7821 return build_call_expr (powfn, 2, arg0, tree_root);
7826 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7827 if (fcode == BUILT_IN_POW
7828 || fcode == BUILT_IN_POWF
7829 || fcode == BUILT_IN_POWL)
7831 tree arg00 = CALL_EXPR_ARG (arg, 0);
7832 tree arg01 = CALL_EXPR_ARG (arg, 1);
7833 if (tree_expr_nonnegative_p (arg00))
7835 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7836 const REAL_VALUE_TYPE dconstroot
7837 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7838 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7839 build_real (type, dconstroot));
7840 return build_call_expr (powfn, 2, arg00, narg01);
7844 return NULL_TREE;
7847 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7848 TYPE is the type of the return value. Return NULL_TREE if no
7849 simplification can be made. */
7851 static tree
7852 fold_builtin_cos (tree arg, tree type, tree fndecl)
7854 tree res, narg;
7856 if (!validate_arg (arg, REAL_TYPE))
7857 return NULL_TREE;
7859 /* Calculate the result when the argument is a constant. */
7860 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7861 return res;
7863 /* Optimize cos(-x) into cos (x). */
7864 if ((narg = fold_strip_sign_ops (arg)))
7865 return build_call_expr (fndecl, 1, narg);
7867 return NULL_TREE;
7870 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7871 Return NULL_TREE if no simplification can be made. */
7873 static tree
7874 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7876 if (validate_arg (arg, REAL_TYPE))
7878 tree res, narg;
7880 /* Calculate the result when the argument is a constant. */
7881 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7882 return res;
7884 /* Optimize cosh(-x) into cosh (x). */
7885 if ((narg = fold_strip_sign_ops (arg)))
7886 return build_call_expr (fndecl, 1, narg);
7889 return NULL_TREE;
7892 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7893 argument ARG. TYPE is the type of the return value. Return
7894 NULL_TREE if no simplification can be made. */
7896 static tree
7897 fold_builtin_ccos (tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7898 bool hyper ATTRIBUTE_UNUSED)
7900 if (validate_arg (arg, COMPLEX_TYPE)
7901 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7903 tree tmp;
7905 #ifdef HAVE_mpc
7906 /* Calculate the result when the argument is a constant. */
7907 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7908 return tmp;
7909 #endif
7911 /* Optimize fn(-x) into fn(x). */
7912 if ((tmp = fold_strip_sign_ops (arg)))
7913 return build_call_expr (fndecl, 1, tmp);
7916 return NULL_TREE;
7919 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7920 Return NULL_TREE if no simplification can be made. */
7922 static tree
7923 fold_builtin_tan (tree arg, tree type)
7925 enum built_in_function fcode;
7926 tree res;
7928 if (!validate_arg (arg, REAL_TYPE))
7929 return NULL_TREE;
7931 /* Calculate the result when the argument is a constant. */
7932 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7933 return res;
7935 /* Optimize tan(atan(x)) = x. */
7936 fcode = builtin_mathfn_code (arg);
7937 if (flag_unsafe_math_optimizations
7938 && (fcode == BUILT_IN_ATAN
7939 || fcode == BUILT_IN_ATANF
7940 || fcode == BUILT_IN_ATANL))
7941 return CALL_EXPR_ARG (arg, 0);
7943 return NULL_TREE;
7946 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7947 NULL_TREE if no simplification can be made. */
7949 static tree
7950 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7952 tree type;
7953 tree res, fn, call;
7955 if (!validate_arg (arg0, REAL_TYPE)
7956 || !validate_arg (arg1, POINTER_TYPE)
7957 || !validate_arg (arg2, POINTER_TYPE))
7958 return NULL_TREE;
7960 type = TREE_TYPE (arg0);
7962 /* Calculate the result when the argument is a constant. */
7963 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7964 return res;
7966 /* Canonicalize sincos to cexpi. */
7967 if (!TARGET_C99_FUNCTIONS)
7968 return NULL_TREE;
7969 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7970 if (!fn)
7971 return NULL_TREE;
7973 call = build_call_expr (fn, 1, arg0);
7974 call = builtin_save_expr (call);
7976 return build2 (COMPOUND_EXPR, void_type_node,
7977 build2 (MODIFY_EXPR, void_type_node,
7978 build_fold_indirect_ref (arg1),
7979 build1 (IMAGPART_EXPR, type, call)),
7980 build2 (MODIFY_EXPR, void_type_node,
7981 build_fold_indirect_ref (arg2),
7982 build1 (REALPART_EXPR, type, call)));
7985 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7986 NULL_TREE if no simplification can be made. */
7988 static tree
7989 fold_builtin_cexp (tree arg0, tree type)
7991 tree rtype;
7992 tree realp, imagp, ifn;
7993 #ifdef HAVE_mpc
7994 tree res;
7995 #endif
7997 if (!validate_arg (arg0, COMPLEX_TYPE)
7998 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7999 return NULL_TREE;
8001 #ifdef HAVE_mpc
8002 /* Calculate the result when the argument is a constant. */
8003 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8004 return res;
8005 #endif
8007 rtype = TREE_TYPE (TREE_TYPE (arg0));
8009 /* In case we can figure out the real part of arg0 and it is constant zero
8010 fold to cexpi. */
8011 if (!TARGET_C99_FUNCTIONS)
8012 return NULL_TREE;
8013 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8014 if (!ifn)
8015 return NULL_TREE;
8017 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
8018 && real_zerop (realp))
8020 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
8021 return build_call_expr (ifn, 1, narg);
8024 /* In case we can easily decompose real and imaginary parts split cexp
8025 to exp (r) * cexpi (i). */
8026 if (flag_unsafe_math_optimizations
8027 && realp)
8029 tree rfn, rcall, icall;
8031 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8032 if (!rfn)
8033 return NULL_TREE;
8035 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
8036 if (!imagp)
8037 return NULL_TREE;
8039 icall = build_call_expr (ifn, 1, imagp);
8040 icall = builtin_save_expr (icall);
8041 rcall = build_call_expr (rfn, 1, realp);
8042 rcall = builtin_save_expr (rcall);
8043 return fold_build2 (COMPLEX_EXPR, type,
8044 fold_build2 (MULT_EXPR, rtype,
8045 rcall,
8046 fold_build1 (REALPART_EXPR, rtype, icall)),
8047 fold_build2 (MULT_EXPR, rtype,
8048 rcall,
8049 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8052 return NULL_TREE;
8055 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8056 Return NULL_TREE if no simplification can be made. */
8058 static tree
8059 fold_builtin_trunc (tree fndecl, tree arg)
8061 if (!validate_arg (arg, REAL_TYPE))
8062 return NULL_TREE;
8064 /* Optimize trunc of constant value. */
8065 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8067 REAL_VALUE_TYPE r, x;
8068 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8070 x = TREE_REAL_CST (arg);
8071 real_trunc (&r, TYPE_MODE (type), &x);
8072 return build_real (type, r);
8075 return fold_trunc_transparent_mathfn (fndecl, arg);
8078 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8079 Return NULL_TREE if no simplification can be made. */
8081 static tree
8082 fold_builtin_floor (tree fndecl, tree arg)
8084 if (!validate_arg (arg, REAL_TYPE))
8085 return NULL_TREE;
8087 /* Optimize floor of constant value. */
8088 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8090 REAL_VALUE_TYPE x;
8092 x = TREE_REAL_CST (arg);
8093 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8095 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8096 REAL_VALUE_TYPE r;
8098 real_floor (&r, TYPE_MODE (type), &x);
8099 return build_real (type, r);
8103 /* Fold floor (x) where x is nonnegative to trunc (x). */
8104 if (tree_expr_nonnegative_p (arg))
8106 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8107 if (truncfn)
8108 return build_call_expr (truncfn, 1, arg);
8111 return fold_trunc_transparent_mathfn (fndecl, arg);
8114 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8115 Return NULL_TREE if no simplification can be made. */
8117 static tree
8118 fold_builtin_ceil (tree fndecl, tree arg)
8120 if (!validate_arg (arg, REAL_TYPE))
8121 return NULL_TREE;
8123 /* Optimize ceil of constant value. */
8124 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8126 REAL_VALUE_TYPE x;
8128 x = TREE_REAL_CST (arg);
8129 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8131 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8132 REAL_VALUE_TYPE r;
8134 real_ceil (&r, TYPE_MODE (type), &x);
8135 return build_real (type, r);
8139 return fold_trunc_transparent_mathfn (fndecl, arg);
8142 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8143 Return NULL_TREE if no simplification can be made. */
8145 static tree
8146 fold_builtin_round (tree fndecl, tree arg)
8148 if (!validate_arg (arg, REAL_TYPE))
8149 return NULL_TREE;
8151 /* Optimize round of constant value. */
8152 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8154 REAL_VALUE_TYPE x;
8156 x = TREE_REAL_CST (arg);
8157 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8159 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8160 REAL_VALUE_TYPE r;
8162 real_round (&r, TYPE_MODE (type), &x);
8163 return build_real (type, r);
8167 return fold_trunc_transparent_mathfn (fndecl, arg);
8170 /* Fold function call to builtin lround, lroundf or lroundl (or the
8171 corresponding long long versions) and other rounding functions. ARG
8172 is the argument to the call. Return NULL_TREE if no simplification
8173 can be made. */
8175 static tree
8176 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8178 if (!validate_arg (arg, REAL_TYPE))
8179 return NULL_TREE;
8181 /* Optimize lround of constant value. */
8182 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8184 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8186 if (real_isfinite (&x))
8188 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8189 tree ftype = TREE_TYPE (arg);
8190 unsigned HOST_WIDE_INT lo2;
8191 HOST_WIDE_INT hi, lo;
8192 REAL_VALUE_TYPE r;
8194 switch (DECL_FUNCTION_CODE (fndecl))
8196 CASE_FLT_FN (BUILT_IN_LFLOOR):
8197 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8198 real_floor (&r, TYPE_MODE (ftype), &x);
8199 break;
8201 CASE_FLT_FN (BUILT_IN_LCEIL):
8202 CASE_FLT_FN (BUILT_IN_LLCEIL):
8203 real_ceil (&r, TYPE_MODE (ftype), &x);
8204 break;
8206 CASE_FLT_FN (BUILT_IN_LROUND):
8207 CASE_FLT_FN (BUILT_IN_LLROUND):
8208 real_round (&r, TYPE_MODE (ftype), &x);
8209 break;
8211 default:
8212 gcc_unreachable ();
8215 REAL_VALUE_TO_INT (&lo, &hi, r);
8216 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8217 return build_int_cst_wide (itype, lo2, hi);
8221 switch (DECL_FUNCTION_CODE (fndecl))
8223 CASE_FLT_FN (BUILT_IN_LFLOOR):
8224 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8225 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8226 if (tree_expr_nonnegative_p (arg))
8227 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8228 arg);
8229 break;
8230 default:;
8233 return fold_fixed_mathfn (fndecl, arg);
8236 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8237 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8238 the argument to the call. Return NULL_TREE if no simplification can
8239 be made. */
8241 static tree
8242 fold_builtin_bitop (tree fndecl, tree arg)
8244 if (!validate_arg (arg, INTEGER_TYPE))
8245 return NULL_TREE;
8247 /* Optimize for constant argument. */
8248 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8250 HOST_WIDE_INT hi, width, result;
8251 unsigned HOST_WIDE_INT lo;
8252 tree type;
8254 type = TREE_TYPE (arg);
8255 width = TYPE_PRECISION (type);
8256 lo = TREE_INT_CST_LOW (arg);
8258 /* Clear all the bits that are beyond the type's precision. */
8259 if (width > HOST_BITS_PER_WIDE_INT)
8261 hi = TREE_INT_CST_HIGH (arg);
8262 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8263 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8265 else
8267 hi = 0;
8268 if (width < HOST_BITS_PER_WIDE_INT)
8269 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8272 switch (DECL_FUNCTION_CODE (fndecl))
8274 CASE_INT_FN (BUILT_IN_FFS):
8275 if (lo != 0)
8276 result = exact_log2 (lo & -lo) + 1;
8277 else if (hi != 0)
8278 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8279 else
8280 result = 0;
8281 break;
8283 CASE_INT_FN (BUILT_IN_CLZ):
8284 if (hi != 0)
8285 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8286 else if (lo != 0)
8287 result = width - floor_log2 (lo) - 1;
8288 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8289 result = width;
8290 break;
8292 CASE_INT_FN (BUILT_IN_CTZ):
8293 if (lo != 0)
8294 result = exact_log2 (lo & -lo);
8295 else if (hi != 0)
8296 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8297 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8298 result = width;
8299 break;
8301 CASE_INT_FN (BUILT_IN_POPCOUNT):
8302 result = 0;
8303 while (lo)
8304 result++, lo &= lo - 1;
8305 while (hi)
8306 result++, hi &= hi - 1;
8307 break;
8309 CASE_INT_FN (BUILT_IN_PARITY):
8310 result = 0;
8311 while (lo)
8312 result++, lo &= lo - 1;
8313 while (hi)
8314 result++, hi &= hi - 1;
8315 result &= 1;
8316 break;
8318 default:
8319 gcc_unreachable ();
8322 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8325 return NULL_TREE;
8328 /* Fold function call to builtin_bswap and the long and long long
8329 variants. Return NULL_TREE if no simplification can be made. */
8330 static tree
8331 fold_builtin_bswap (tree fndecl, tree arg)
8333 if (! validate_arg (arg, INTEGER_TYPE))
8334 return NULL_TREE;
8336 /* Optimize constant value. */
8337 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8339 HOST_WIDE_INT hi, width, r_hi = 0;
8340 unsigned HOST_WIDE_INT lo, r_lo = 0;
8341 tree type;
8343 type = TREE_TYPE (arg);
8344 width = TYPE_PRECISION (type);
8345 lo = TREE_INT_CST_LOW (arg);
8346 hi = TREE_INT_CST_HIGH (arg);
8348 switch (DECL_FUNCTION_CODE (fndecl))
8350 case BUILT_IN_BSWAP32:
8351 case BUILT_IN_BSWAP64:
8353 int s;
8355 for (s = 0; s < width; s += 8)
8357 int d = width - s - 8;
8358 unsigned HOST_WIDE_INT byte;
8360 if (s < HOST_BITS_PER_WIDE_INT)
8361 byte = (lo >> s) & 0xff;
8362 else
8363 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8365 if (d < HOST_BITS_PER_WIDE_INT)
8366 r_lo |= byte << d;
8367 else
8368 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8372 break;
8374 default:
8375 gcc_unreachable ();
8378 if (width < HOST_BITS_PER_WIDE_INT)
8379 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8380 else
8381 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8384 return NULL_TREE;
8387 /* A subroutine of fold_builtin to fold the various logarithmic
8388 functions. Return NULL_TREE if no simplification can me made.
8389 FUNC is the corresponding MPFR logarithm function. */
8391 static tree
8392 fold_builtin_logarithm (tree fndecl, tree arg,
8393 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8395 if (validate_arg (arg, REAL_TYPE))
8397 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8398 tree res;
8399 const enum built_in_function fcode = builtin_mathfn_code (arg);
8401 /* Calculate the result when the argument is a constant. */
8402 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8403 return res;
8405 /* Special case, optimize logN(expN(x)) = x. */
8406 if (flag_unsafe_math_optimizations
8407 && ((func == mpfr_log
8408 && (fcode == BUILT_IN_EXP
8409 || fcode == BUILT_IN_EXPF
8410 || fcode == BUILT_IN_EXPL))
8411 || (func == mpfr_log2
8412 && (fcode == BUILT_IN_EXP2
8413 || fcode == BUILT_IN_EXP2F
8414 || fcode == BUILT_IN_EXP2L))
8415 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8416 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8418 /* Optimize logN(func()) for various exponential functions. We
8419 want to determine the value "x" and the power "exponent" in
8420 order to transform logN(x**exponent) into exponent*logN(x). */
8421 if (flag_unsafe_math_optimizations)
8423 tree exponent = 0, x = 0;
8425 switch (fcode)
8427 CASE_FLT_FN (BUILT_IN_EXP):
8428 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8429 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8430 dconst_e ()));
8431 exponent = CALL_EXPR_ARG (arg, 0);
8432 break;
8433 CASE_FLT_FN (BUILT_IN_EXP2):
8434 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8435 x = build_real (type, dconst2);
8436 exponent = CALL_EXPR_ARG (arg, 0);
8437 break;
8438 CASE_FLT_FN (BUILT_IN_EXP10):
8439 CASE_FLT_FN (BUILT_IN_POW10):
8440 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8442 REAL_VALUE_TYPE dconst10;
8443 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8444 x = build_real (type, dconst10);
8446 exponent = CALL_EXPR_ARG (arg, 0);
8447 break;
8448 CASE_FLT_FN (BUILT_IN_SQRT):
8449 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8450 x = CALL_EXPR_ARG (arg, 0);
8451 exponent = build_real (type, dconsthalf);
8452 break;
8453 CASE_FLT_FN (BUILT_IN_CBRT):
8454 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8455 x = CALL_EXPR_ARG (arg, 0);
8456 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8457 dconst_third ()));
8458 break;
8459 CASE_FLT_FN (BUILT_IN_POW):
8460 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8461 x = CALL_EXPR_ARG (arg, 0);
8462 exponent = CALL_EXPR_ARG (arg, 1);
8463 break;
8464 default:
8465 break;
8468 /* Now perform the optimization. */
8469 if (x && exponent)
8471 tree logfn = build_call_expr (fndecl, 1, x);
8472 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8477 return NULL_TREE;
8480 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8481 NULL_TREE if no simplification can be made. */
8483 static tree
8484 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8486 tree res, narg0, narg1;
8488 if (!validate_arg (arg0, REAL_TYPE)
8489 || !validate_arg (arg1, REAL_TYPE))
8490 return NULL_TREE;
8492 /* Calculate the result when the argument is a constant. */
8493 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8494 return res;
8496 /* If either argument to hypot has a negate or abs, strip that off.
8497 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8498 narg0 = fold_strip_sign_ops (arg0);
8499 narg1 = fold_strip_sign_ops (arg1);
8500 if (narg0 || narg1)
8502 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8503 narg1 ? narg1 : arg1);
8506 /* If either argument is zero, hypot is fabs of the other. */
8507 if (real_zerop (arg0))
8508 return fold_build1 (ABS_EXPR, type, arg1);
8509 else if (real_zerop (arg1))
8510 return fold_build1 (ABS_EXPR, type, arg0);
8512 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8513 if (flag_unsafe_math_optimizations
8514 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8516 const REAL_VALUE_TYPE sqrt2_trunc
8517 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8518 return fold_build2 (MULT_EXPR, type,
8519 fold_build1 (ABS_EXPR, type, arg0),
8520 build_real (type, sqrt2_trunc));
8523 return NULL_TREE;
8527 /* Fold a builtin function call to pow, powf, or powl. Return
8528 NULL_TREE if no simplification can be made. */
8529 static tree
8530 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8532 tree res;
8534 if (!validate_arg (arg0, REAL_TYPE)
8535 || !validate_arg (arg1, REAL_TYPE))
8536 return NULL_TREE;
8538 /* Calculate the result when the argument is a constant. */
8539 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8540 return res;
8542 /* Optimize pow(1.0,y) = 1.0. */
8543 if (real_onep (arg0))
8544 return omit_one_operand (type, build_real (type, dconst1), arg1);
8546 if (TREE_CODE (arg1) == REAL_CST
8547 && !TREE_OVERFLOW (arg1))
8549 REAL_VALUE_TYPE cint;
8550 REAL_VALUE_TYPE c;
8551 HOST_WIDE_INT n;
8553 c = TREE_REAL_CST (arg1);
8555 /* Optimize pow(x,0.0) = 1.0. */
8556 if (REAL_VALUES_EQUAL (c, dconst0))
8557 return omit_one_operand (type, build_real (type, dconst1),
8558 arg0);
8560 /* Optimize pow(x,1.0) = x. */
8561 if (REAL_VALUES_EQUAL (c, dconst1))
8562 return arg0;
8564 /* Optimize pow(x,-1.0) = 1.0/x. */
8565 if (REAL_VALUES_EQUAL (c, dconstm1))
8566 return fold_build2 (RDIV_EXPR, type,
8567 build_real (type, dconst1), arg0);
8569 /* Optimize pow(x,0.5) = sqrt(x). */
8570 if (flag_unsafe_math_optimizations
8571 && REAL_VALUES_EQUAL (c, dconsthalf))
8573 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8575 if (sqrtfn != NULL_TREE)
8576 return build_call_expr (sqrtfn, 1, arg0);
8579 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8580 if (flag_unsafe_math_optimizations)
8582 const REAL_VALUE_TYPE dconstroot
8583 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8585 if (REAL_VALUES_EQUAL (c, dconstroot))
8587 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8588 if (cbrtfn != NULL_TREE)
8589 return build_call_expr (cbrtfn, 1, arg0);
8593 /* Check for an integer exponent. */
8594 n = real_to_integer (&c);
8595 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8596 if (real_identical (&c, &cint))
8598 /* Attempt to evaluate pow at compile-time, unless this should
8599 raise an exception. */
8600 if (TREE_CODE (arg0) == REAL_CST
8601 && !TREE_OVERFLOW (arg0)
8602 && (n > 0
8603 || (!flag_trapping_math && !flag_errno_math)
8604 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8606 REAL_VALUE_TYPE x;
8607 bool inexact;
8609 x = TREE_REAL_CST (arg0);
8610 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8611 if (flag_unsafe_math_optimizations || !inexact)
8612 return build_real (type, x);
8615 /* Strip sign ops from even integer powers. */
8616 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8618 tree narg0 = fold_strip_sign_ops (arg0);
8619 if (narg0)
8620 return build_call_expr (fndecl, 2, narg0, arg1);
8625 if (flag_unsafe_math_optimizations)
8627 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8629 /* Optimize pow(expN(x),y) = expN(x*y). */
8630 if (BUILTIN_EXPONENT_P (fcode))
8632 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8633 tree arg = CALL_EXPR_ARG (arg0, 0);
8634 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8635 return build_call_expr (expfn, 1, arg);
8638 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8639 if (BUILTIN_SQRT_P (fcode))
8641 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8642 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8643 build_real (type, dconsthalf));
8644 return build_call_expr (fndecl, 2, narg0, narg1);
8647 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8648 if (BUILTIN_CBRT_P (fcode))
8650 tree arg = CALL_EXPR_ARG (arg0, 0);
8651 if (tree_expr_nonnegative_p (arg))
8653 const REAL_VALUE_TYPE dconstroot
8654 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8655 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8656 build_real (type, dconstroot));
8657 return build_call_expr (fndecl, 2, arg, narg1);
8661 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8662 if (fcode == BUILT_IN_POW
8663 || fcode == BUILT_IN_POWF
8664 || fcode == BUILT_IN_POWL)
8666 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8667 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8668 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8669 return build_call_expr (fndecl, 2, arg00, narg1);
8673 return NULL_TREE;
8676 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8677 Return NULL_TREE if no simplification can be made. */
8678 static tree
8679 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8680 tree arg0, tree arg1, tree type)
8682 if (!validate_arg (arg0, REAL_TYPE)
8683 || !validate_arg (arg1, INTEGER_TYPE))
8684 return NULL_TREE;
8686 /* Optimize pow(1.0,y) = 1.0. */
8687 if (real_onep (arg0))
8688 return omit_one_operand (type, build_real (type, dconst1), arg1);
8690 if (host_integerp (arg1, 0))
8692 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8694 /* Evaluate powi at compile-time. */
8695 if (TREE_CODE (arg0) == REAL_CST
8696 && !TREE_OVERFLOW (arg0))
8698 REAL_VALUE_TYPE x;
8699 x = TREE_REAL_CST (arg0);
8700 real_powi (&x, TYPE_MODE (type), &x, c);
8701 return build_real (type, x);
8704 /* Optimize pow(x,0) = 1.0. */
8705 if (c == 0)
8706 return omit_one_operand (type, build_real (type, dconst1),
8707 arg0);
8709 /* Optimize pow(x,1) = x. */
8710 if (c == 1)
8711 return arg0;
8713 /* Optimize pow(x,-1) = 1.0/x. */
8714 if (c == -1)
8715 return fold_build2 (RDIV_EXPR, type,
8716 build_real (type, dconst1), arg0);
8719 return NULL_TREE;
8722 /* A subroutine of fold_builtin to fold the various exponent
8723 functions. Return NULL_TREE if no simplification can be made.
8724 FUNC is the corresponding MPFR exponent function. */
8726 static tree
8727 fold_builtin_exponent (tree fndecl, tree arg,
8728 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8730 if (validate_arg (arg, REAL_TYPE))
8732 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8733 tree res;
8735 /* Calculate the result when the argument is a constant. */
8736 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8737 return res;
8739 /* Optimize expN(logN(x)) = x. */
8740 if (flag_unsafe_math_optimizations)
8742 const enum built_in_function fcode = builtin_mathfn_code (arg);
8744 if ((func == mpfr_exp
8745 && (fcode == BUILT_IN_LOG
8746 || fcode == BUILT_IN_LOGF
8747 || fcode == BUILT_IN_LOGL))
8748 || (func == mpfr_exp2
8749 && (fcode == BUILT_IN_LOG2
8750 || fcode == BUILT_IN_LOG2F
8751 || fcode == BUILT_IN_LOG2L))
8752 || (func == mpfr_exp10
8753 && (fcode == BUILT_IN_LOG10
8754 || fcode == BUILT_IN_LOG10F
8755 || fcode == BUILT_IN_LOG10L)))
8756 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8760 return NULL_TREE;
8763 /* Return true if VAR is a VAR_DECL or a component thereof. */
8765 static bool
8766 var_decl_component_p (tree var)
8768 tree inner = var;
8769 while (handled_component_p (inner))
8770 inner = TREE_OPERAND (inner, 0);
8771 return SSA_VAR_P (inner);
8774 /* Fold function call to builtin memset. Return
8775 NULL_TREE if no simplification can be made. */
8777 static tree
8778 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8780 tree var, ret, etype;
8781 unsigned HOST_WIDE_INT length, cval;
8783 if (! validate_arg (dest, POINTER_TYPE)
8784 || ! validate_arg (c, INTEGER_TYPE)
8785 || ! validate_arg (len, INTEGER_TYPE))
8786 return NULL_TREE;
8788 if (! host_integerp (len, 1))
8789 return NULL_TREE;
8791 /* If the LEN parameter is zero, return DEST. */
8792 if (integer_zerop (len))
8793 return omit_one_operand (type, dest, c);
8795 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8796 return NULL_TREE;
8798 var = dest;
8799 STRIP_NOPS (var);
8800 if (TREE_CODE (var) != ADDR_EXPR)
8801 return NULL_TREE;
8803 var = TREE_OPERAND (var, 0);
8804 if (TREE_THIS_VOLATILE (var))
8805 return NULL_TREE;
8807 etype = TREE_TYPE (var);
8808 if (TREE_CODE (etype) == ARRAY_TYPE)
8809 etype = TREE_TYPE (etype);
8811 if (!INTEGRAL_TYPE_P (etype)
8812 && !POINTER_TYPE_P (etype))
8813 return NULL_TREE;
8815 if (! var_decl_component_p (var))
8816 return NULL_TREE;
8818 length = tree_low_cst (len, 1);
8819 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8820 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8821 < (int) length)
8822 return NULL_TREE;
8824 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8825 return NULL_TREE;
8827 if (integer_zerop (c))
8828 cval = 0;
8829 else
8831 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8832 return NULL_TREE;
8834 cval = tree_low_cst (c, 1);
8835 cval &= 0xff;
8836 cval |= cval << 8;
8837 cval |= cval << 16;
8838 cval |= (cval << 31) << 1;
8841 ret = build_int_cst_type (etype, cval);
8842 var = build_fold_indirect_ref (fold_convert (build_pointer_type (etype),
8843 dest));
8844 ret = build2 (MODIFY_EXPR, etype, var, ret);
8845 if (ignore)
8846 return ret;
8848 return omit_one_operand (type, dest, ret);
8851 /* Fold function call to builtin memset. Return
8852 NULL_TREE if no simplification can be made. */
8854 static tree
8855 fold_builtin_bzero (tree dest, tree size, bool ignore)
8857 if (! validate_arg (dest, POINTER_TYPE)
8858 || ! validate_arg (size, INTEGER_TYPE))
8859 return NULL_TREE;
8861 if (!ignore)
8862 return NULL_TREE;
8864 /* New argument list transforming bzero(ptr x, int y) to
8865 memset(ptr x, int 0, size_t y). This is done this way
8866 so that if it isn't expanded inline, we fallback to
8867 calling bzero instead of memset. */
8869 return fold_builtin_memset (dest, integer_zero_node,
8870 fold_convert (sizetype, size),
8871 void_type_node, ignore);
8874 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8875 NULL_TREE if no simplification can be made.
8876 If ENDP is 0, return DEST (like memcpy).
8877 If ENDP is 1, return DEST+LEN (like mempcpy).
8878 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8879 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8880 (memmove). */
8882 static tree
8883 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8885 tree destvar, srcvar, expr;
8887 if (! validate_arg (dest, POINTER_TYPE)
8888 || ! validate_arg (src, POINTER_TYPE)
8889 || ! validate_arg (len, INTEGER_TYPE))
8890 return NULL_TREE;
8892 /* If the LEN parameter is zero, return DEST. */
8893 if (integer_zerop (len))
8894 return omit_one_operand (type, dest, src);
8896 /* If SRC and DEST are the same (and not volatile), return
8897 DEST{,+LEN,+LEN-1}. */
8898 if (operand_equal_p (src, dest, 0))
8899 expr = len;
8900 else
8902 tree srctype, desttype;
8903 int src_align, dest_align;
8905 if (endp == 3)
8907 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8908 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8910 /* Both DEST and SRC must be pointer types.
8911 ??? This is what old code did. Is the testing for pointer types
8912 really mandatory?
8914 If either SRC is readonly or length is 1, we can use memcpy. */
8915 if (!dest_align || !src_align)
8916 return NULL_TREE;
8917 if (readonly_data_expr (src)
8918 || (host_integerp (len, 1)
8919 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8920 >= tree_low_cst (len, 1))))
8922 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8923 if (!fn)
8924 return NULL_TREE;
8925 return build_call_expr (fn, 3, dest, src, len);
8928 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8929 srcvar = build_fold_indirect_ref (src);
8930 destvar = build_fold_indirect_ref (dest);
8931 if (srcvar
8932 && !TREE_THIS_VOLATILE (srcvar)
8933 && destvar
8934 && !TREE_THIS_VOLATILE (destvar))
8936 tree src_base, dest_base, fn;
8937 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8938 HOST_WIDE_INT size = -1;
8939 HOST_WIDE_INT maxsize = -1;
8941 src_base = srcvar;
8942 if (handled_component_p (src_base))
8943 src_base = get_ref_base_and_extent (src_base, &src_offset,
8944 &size, &maxsize);
8945 dest_base = destvar;
8946 if (handled_component_p (dest_base))
8947 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8948 &size, &maxsize);
8949 if (host_integerp (len, 1))
8951 maxsize = tree_low_cst (len, 1);
8952 if (maxsize
8953 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8954 maxsize = -1;
8955 else
8956 maxsize *= BITS_PER_UNIT;
8958 else
8959 maxsize = -1;
8960 if (SSA_VAR_P (src_base)
8961 && SSA_VAR_P (dest_base))
8963 if (operand_equal_p (src_base, dest_base, 0)
8964 && ranges_overlap_p (src_offset, maxsize,
8965 dest_offset, maxsize))
8966 return NULL_TREE;
8968 else if (TREE_CODE (src_base) == INDIRECT_REF
8969 && TREE_CODE (dest_base) == INDIRECT_REF)
8971 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8972 TREE_OPERAND (dest_base, 0), 0)
8973 || ranges_overlap_p (src_offset, maxsize,
8974 dest_offset, maxsize))
8975 return NULL_TREE;
8977 else
8978 return NULL_TREE;
8980 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8981 if (!fn)
8982 return NULL_TREE;
8983 return build_call_expr (fn, 3, dest, src, len);
8985 return NULL_TREE;
8988 if (!host_integerp (len, 0))
8989 return NULL_TREE;
8990 /* FIXME:
8991 This logic lose for arguments like (type *)malloc (sizeof (type)),
8992 since we strip the casts of up to VOID return value from malloc.
8993 Perhaps we ought to inherit type from non-VOID argument here? */
8994 STRIP_NOPS (src);
8995 STRIP_NOPS (dest);
8996 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8997 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8999 tree tem = TREE_OPERAND (src, 0);
9000 STRIP_NOPS (tem);
9001 if (tem != TREE_OPERAND (src, 0))
9002 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9004 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9006 tree tem = TREE_OPERAND (dest, 0);
9007 STRIP_NOPS (tem);
9008 if (tem != TREE_OPERAND (dest, 0))
9009 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9011 srctype = TREE_TYPE (TREE_TYPE (src));
9012 if (srctype
9013 && TREE_CODE (srctype) == ARRAY_TYPE
9014 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9016 srctype = TREE_TYPE (srctype);
9017 STRIP_NOPS (src);
9018 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9020 desttype = TREE_TYPE (TREE_TYPE (dest));
9021 if (desttype
9022 && TREE_CODE (desttype) == ARRAY_TYPE
9023 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9025 desttype = TREE_TYPE (desttype);
9026 STRIP_NOPS (dest);
9027 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9029 if (!srctype || !desttype
9030 || !TYPE_SIZE_UNIT (srctype)
9031 || !TYPE_SIZE_UNIT (desttype)
9032 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9033 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9034 || TYPE_VOLATILE (srctype)
9035 || TYPE_VOLATILE (desttype))
9036 return NULL_TREE;
9038 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9039 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9040 if (dest_align < (int) TYPE_ALIGN (desttype)
9041 || src_align < (int) TYPE_ALIGN (srctype))
9042 return NULL_TREE;
9044 if (!ignore)
9045 dest = builtin_save_expr (dest);
9047 srcvar = NULL_TREE;
9048 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9050 srcvar = build_fold_indirect_ref (src);
9051 if (TREE_THIS_VOLATILE (srcvar))
9052 return NULL_TREE;
9053 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9054 srcvar = NULL_TREE;
9055 /* With memcpy, it is possible to bypass aliasing rules, so without
9056 this check i.e. execute/20060930-2.c would be misoptimized,
9057 because it use conflicting alias set to hold argument for the
9058 memcpy call. This check is probably unnecessary with
9059 -fno-strict-aliasing. Similarly for destvar. See also
9060 PR29286. */
9061 else if (!var_decl_component_p (srcvar))
9062 srcvar = NULL_TREE;
9065 destvar = NULL_TREE;
9066 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9068 destvar = build_fold_indirect_ref (dest);
9069 if (TREE_THIS_VOLATILE (destvar))
9070 return NULL_TREE;
9071 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9072 destvar = NULL_TREE;
9073 else if (!var_decl_component_p (destvar))
9074 destvar = NULL_TREE;
9077 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9078 return NULL_TREE;
9080 if (srcvar == NULL_TREE)
9082 tree srcptype;
9083 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9084 return NULL_TREE;
9086 srctype = build_qualified_type (desttype, 0);
9087 if (src_align < (int) TYPE_ALIGN (srctype))
9089 if (AGGREGATE_TYPE_P (srctype)
9090 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9091 return NULL_TREE;
9093 srctype = build_variant_type_copy (srctype);
9094 TYPE_ALIGN (srctype) = src_align;
9095 TYPE_USER_ALIGN (srctype) = 1;
9096 TYPE_PACKED (srctype) = 1;
9098 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9099 src = fold_convert (srcptype, src);
9100 srcvar = build_fold_indirect_ref (src);
9102 else if (destvar == NULL_TREE)
9104 tree destptype;
9105 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9106 return NULL_TREE;
9108 desttype = build_qualified_type (srctype, 0);
9109 if (dest_align < (int) TYPE_ALIGN (desttype))
9111 if (AGGREGATE_TYPE_P (desttype)
9112 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9113 return NULL_TREE;
9115 desttype = build_variant_type_copy (desttype);
9116 TYPE_ALIGN (desttype) = dest_align;
9117 TYPE_USER_ALIGN (desttype) = 1;
9118 TYPE_PACKED (desttype) = 1;
9120 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9121 dest = fold_convert (destptype, dest);
9122 destvar = build_fold_indirect_ref (dest);
9125 if (srctype == desttype
9126 || (gimple_in_ssa_p (cfun)
9127 && useless_type_conversion_p (desttype, srctype)))
9128 expr = srcvar;
9129 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9130 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9131 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9132 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9133 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9134 else
9135 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9136 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9139 if (ignore)
9140 return expr;
9142 if (endp == 0 || endp == 3)
9143 return omit_one_operand (type, dest, expr);
9145 if (expr == len)
9146 expr = NULL_TREE;
9148 if (endp == 2)
9149 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9150 ssize_int (1));
9152 len = fold_convert (sizetype, len);
9153 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9154 dest = fold_convert (type, dest);
9155 if (expr)
9156 dest = omit_one_operand (type, dest, expr);
9157 return dest;
9160 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9161 If LEN is not NULL, it represents the length of the string to be
9162 copied. Return NULL_TREE if no simplification can be made. */
9164 tree
9165 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9167 tree fn;
9169 if (!validate_arg (dest, POINTER_TYPE)
9170 || !validate_arg (src, POINTER_TYPE))
9171 return NULL_TREE;
9173 /* If SRC and DEST are the same (and not volatile), return DEST. */
9174 if (operand_equal_p (src, dest, 0))
9175 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9177 if (optimize_function_for_size_p (cfun))
9178 return NULL_TREE;
9180 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9181 if (!fn)
9182 return NULL_TREE;
9184 if (!len)
9186 len = c_strlen (src, 1);
9187 if (! len || TREE_SIDE_EFFECTS (len))
9188 return NULL_TREE;
9191 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9192 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9193 build_call_expr (fn, 3, dest, src, len));
9196 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9197 If SLEN is not NULL, it represents the length of the source string.
9198 Return NULL_TREE if no simplification can be made. */
9200 tree
9201 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9203 tree fn;
9205 if (!validate_arg (dest, POINTER_TYPE)
9206 || !validate_arg (src, POINTER_TYPE)
9207 || !validate_arg (len, INTEGER_TYPE))
9208 return NULL_TREE;
9210 /* If the LEN parameter is zero, return DEST. */
9211 if (integer_zerop (len))
9212 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9214 /* We can't compare slen with len as constants below if len is not a
9215 constant. */
9216 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9217 return NULL_TREE;
9219 if (!slen)
9220 slen = c_strlen (src, 1);
9222 /* Now, we must be passed a constant src ptr parameter. */
9223 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9224 return NULL_TREE;
9226 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9228 /* We do not support simplification of this case, though we do
9229 support it when expanding trees into RTL. */
9230 /* FIXME: generate a call to __builtin_memset. */
9231 if (tree_int_cst_lt (slen, len))
9232 return NULL_TREE;
9234 /* OK transform into builtin memcpy. */
9235 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9236 if (!fn)
9237 return NULL_TREE;
9238 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9239 build_call_expr (fn, 3, dest, src, len));
9242 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9243 arguments to the call, and TYPE is its return type.
9244 Return NULL_TREE if no simplification can be made. */
9246 static tree
9247 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9249 if (!validate_arg (arg1, POINTER_TYPE)
9250 || !validate_arg (arg2, INTEGER_TYPE)
9251 || !validate_arg (len, INTEGER_TYPE))
9252 return NULL_TREE;
9253 else
9255 const char *p1;
9257 if (TREE_CODE (arg2) != INTEGER_CST
9258 || !host_integerp (len, 1))
9259 return NULL_TREE;
9261 p1 = c_getstr (arg1);
9262 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9264 char c;
9265 const char *r;
9266 tree tem;
9268 if (target_char_cast (arg2, &c))
9269 return NULL_TREE;
9271 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9273 if (r == NULL)
9274 return build_int_cst (TREE_TYPE (arg1), 0);
9276 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9277 size_int (r - p1));
9278 return fold_convert (type, tem);
9280 return NULL_TREE;
9284 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9285 Return NULL_TREE if no simplification can be made. */
9287 static tree
9288 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9290 const char *p1, *p2;
9292 if (!validate_arg (arg1, POINTER_TYPE)
9293 || !validate_arg (arg2, POINTER_TYPE)
9294 || !validate_arg (len, INTEGER_TYPE))
9295 return NULL_TREE;
9297 /* If the LEN parameter is zero, return zero. */
9298 if (integer_zerop (len))
9299 return omit_two_operands (integer_type_node, integer_zero_node,
9300 arg1, arg2);
9302 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9303 if (operand_equal_p (arg1, arg2, 0))
9304 return omit_one_operand (integer_type_node, integer_zero_node, len);
9306 p1 = c_getstr (arg1);
9307 p2 = c_getstr (arg2);
9309 /* If all arguments are constant, and the value of len is not greater
9310 than the lengths of arg1 and arg2, evaluate at compile-time. */
9311 if (host_integerp (len, 1) && p1 && p2
9312 && compare_tree_int (len, strlen (p1) + 1) <= 0
9313 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9315 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9317 if (r > 0)
9318 return integer_one_node;
9319 else if (r < 0)
9320 return integer_minus_one_node;
9321 else
9322 return integer_zero_node;
9325 /* If len parameter is one, return an expression corresponding to
9326 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9327 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9329 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9330 tree cst_uchar_ptr_node
9331 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9333 tree ind1 = fold_convert (integer_type_node,
9334 build1 (INDIRECT_REF, cst_uchar_node,
9335 fold_convert (cst_uchar_ptr_node,
9336 arg1)));
9337 tree ind2 = fold_convert (integer_type_node,
9338 build1 (INDIRECT_REF, cst_uchar_node,
9339 fold_convert (cst_uchar_ptr_node,
9340 arg2)));
9341 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9344 return NULL_TREE;
9347 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9348 Return NULL_TREE if no simplification can be made. */
9350 static tree
9351 fold_builtin_strcmp (tree arg1, tree arg2)
9353 const char *p1, *p2;
9355 if (!validate_arg (arg1, POINTER_TYPE)
9356 || !validate_arg (arg2, POINTER_TYPE))
9357 return NULL_TREE;
9359 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9360 if (operand_equal_p (arg1, arg2, 0))
9361 return integer_zero_node;
9363 p1 = c_getstr (arg1);
9364 p2 = c_getstr (arg2);
9366 if (p1 && p2)
9368 const int i = strcmp (p1, p2);
9369 if (i < 0)
9370 return integer_minus_one_node;
9371 else if (i > 0)
9372 return integer_one_node;
9373 else
9374 return integer_zero_node;
9377 /* If the second arg is "", return *(const unsigned char*)arg1. */
9378 if (p2 && *p2 == '\0')
9380 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9381 tree cst_uchar_ptr_node
9382 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9384 return fold_convert (integer_type_node,
9385 build1 (INDIRECT_REF, cst_uchar_node,
9386 fold_convert (cst_uchar_ptr_node,
9387 arg1)));
9390 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9391 if (p1 && *p1 == '\0')
9393 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9394 tree cst_uchar_ptr_node
9395 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9397 tree temp = fold_convert (integer_type_node,
9398 build1 (INDIRECT_REF, cst_uchar_node,
9399 fold_convert (cst_uchar_ptr_node,
9400 arg2)));
9401 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9404 return NULL_TREE;
9407 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9408 Return NULL_TREE if no simplification can be made. */
9410 static tree
9411 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9413 const char *p1, *p2;
9415 if (!validate_arg (arg1, POINTER_TYPE)
9416 || !validate_arg (arg2, POINTER_TYPE)
9417 || !validate_arg (len, INTEGER_TYPE))
9418 return NULL_TREE;
9420 /* If the LEN parameter is zero, return zero. */
9421 if (integer_zerop (len))
9422 return omit_two_operands (integer_type_node, integer_zero_node,
9423 arg1, arg2);
9425 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9426 if (operand_equal_p (arg1, arg2, 0))
9427 return omit_one_operand (integer_type_node, integer_zero_node, len);
9429 p1 = c_getstr (arg1);
9430 p2 = c_getstr (arg2);
9432 if (host_integerp (len, 1) && p1 && p2)
9434 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9435 if (i > 0)
9436 return integer_one_node;
9437 else if (i < 0)
9438 return integer_minus_one_node;
9439 else
9440 return integer_zero_node;
9443 /* If the second arg is "", and the length is greater than zero,
9444 return *(const unsigned char*)arg1. */
9445 if (p2 && *p2 == '\0'
9446 && TREE_CODE (len) == INTEGER_CST
9447 && tree_int_cst_sgn (len) == 1)
9449 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9450 tree cst_uchar_ptr_node
9451 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9453 return fold_convert (integer_type_node,
9454 build1 (INDIRECT_REF, cst_uchar_node,
9455 fold_convert (cst_uchar_ptr_node,
9456 arg1)));
9459 /* If the first arg is "", and the length is greater than zero,
9460 return -*(const unsigned char*)arg2. */
9461 if (p1 && *p1 == '\0'
9462 && TREE_CODE (len) == INTEGER_CST
9463 && tree_int_cst_sgn (len) == 1)
9465 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9466 tree cst_uchar_ptr_node
9467 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9469 tree temp = fold_convert (integer_type_node,
9470 build1 (INDIRECT_REF, cst_uchar_node,
9471 fold_convert (cst_uchar_ptr_node,
9472 arg2)));
9473 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9476 /* If len parameter is one, return an expression corresponding to
9477 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9478 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9480 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9481 tree cst_uchar_ptr_node
9482 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9484 tree ind1 = fold_convert (integer_type_node,
9485 build1 (INDIRECT_REF, cst_uchar_node,
9486 fold_convert (cst_uchar_ptr_node,
9487 arg1)));
9488 tree ind2 = fold_convert (integer_type_node,
9489 build1 (INDIRECT_REF, cst_uchar_node,
9490 fold_convert (cst_uchar_ptr_node,
9491 arg2)));
9492 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9495 return NULL_TREE;
9498 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9499 ARG. Return NULL_TREE if no simplification can be made. */
9501 static tree
9502 fold_builtin_signbit (tree arg, tree type)
9504 tree temp;
9506 if (!validate_arg (arg, REAL_TYPE))
9507 return NULL_TREE;
9509 /* If ARG is a compile-time constant, determine the result. */
9510 if (TREE_CODE (arg) == REAL_CST
9511 && !TREE_OVERFLOW (arg))
9513 REAL_VALUE_TYPE c;
9515 c = TREE_REAL_CST (arg);
9516 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9517 return fold_convert (type, temp);
9520 /* If ARG is non-negative, the result is always zero. */
9521 if (tree_expr_nonnegative_p (arg))
9522 return omit_one_operand (type, integer_zero_node, arg);
9524 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9525 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9526 return fold_build2 (LT_EXPR, type, arg,
9527 build_real (TREE_TYPE (arg), dconst0));
9529 return NULL_TREE;
9532 /* Fold function call to builtin copysign, copysignf or copysignl with
9533 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9534 be made. */
9536 static tree
9537 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9539 tree tem;
9541 if (!validate_arg (arg1, REAL_TYPE)
9542 || !validate_arg (arg2, REAL_TYPE))
9543 return NULL_TREE;
9545 /* copysign(X,X) is X. */
9546 if (operand_equal_p (arg1, arg2, 0))
9547 return fold_convert (type, arg1);
9549 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9550 if (TREE_CODE (arg1) == REAL_CST
9551 && TREE_CODE (arg2) == REAL_CST
9552 && !TREE_OVERFLOW (arg1)
9553 && !TREE_OVERFLOW (arg2))
9555 REAL_VALUE_TYPE c1, c2;
9557 c1 = TREE_REAL_CST (arg1);
9558 c2 = TREE_REAL_CST (arg2);
9559 /* c1.sign := c2.sign. */
9560 real_copysign (&c1, &c2);
9561 return build_real (type, c1);
9564 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9565 Remember to evaluate Y for side-effects. */
9566 if (tree_expr_nonnegative_p (arg2))
9567 return omit_one_operand (type,
9568 fold_build1 (ABS_EXPR, type, arg1),
9569 arg2);
9571 /* Strip sign changing operations for the first argument. */
9572 tem = fold_strip_sign_ops (arg1);
9573 if (tem)
9574 return build_call_expr (fndecl, 2, tem, arg2);
9576 return NULL_TREE;
9579 /* Fold a call to builtin isascii with argument ARG. */
9581 static tree
9582 fold_builtin_isascii (tree arg)
9584 if (!validate_arg (arg, INTEGER_TYPE))
9585 return NULL_TREE;
9586 else
9588 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9589 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9590 build_int_cst (NULL_TREE,
9591 ~ (unsigned HOST_WIDE_INT) 0x7f));
9592 return fold_build2 (EQ_EXPR, integer_type_node,
9593 arg, integer_zero_node);
9597 /* Fold a call to builtin toascii with argument ARG. */
9599 static tree
9600 fold_builtin_toascii (tree arg)
9602 if (!validate_arg (arg, INTEGER_TYPE))
9603 return NULL_TREE;
9605 /* Transform toascii(c) -> (c & 0x7f). */
9606 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9607 build_int_cst (NULL_TREE, 0x7f));
9610 /* Fold a call to builtin isdigit with argument ARG. */
9612 static tree
9613 fold_builtin_isdigit (tree arg)
9615 if (!validate_arg (arg, INTEGER_TYPE))
9616 return NULL_TREE;
9617 else
9619 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9620 /* According to the C standard, isdigit is unaffected by locale.
9621 However, it definitely is affected by the target character set. */
9622 unsigned HOST_WIDE_INT target_digit0
9623 = lang_hooks.to_target_charset ('0');
9625 if (target_digit0 == 0)
9626 return NULL_TREE;
9628 arg = fold_convert (unsigned_type_node, arg);
9629 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9630 build_int_cst (unsigned_type_node, target_digit0));
9631 return fold_build2 (LE_EXPR, integer_type_node, arg,
9632 build_int_cst (unsigned_type_node, 9));
9636 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9638 static tree
9639 fold_builtin_fabs (tree arg, tree type)
9641 if (!validate_arg (arg, REAL_TYPE))
9642 return NULL_TREE;
9644 arg = fold_convert (type, arg);
9645 if (TREE_CODE (arg) == REAL_CST)
9646 return fold_abs_const (arg, type);
9647 return fold_build1 (ABS_EXPR, type, arg);
9650 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9652 static tree
9653 fold_builtin_abs (tree arg, tree type)
9655 if (!validate_arg (arg, INTEGER_TYPE))
9656 return NULL_TREE;
9658 arg = fold_convert (type, arg);
9659 if (TREE_CODE (arg) == INTEGER_CST)
9660 return fold_abs_const (arg, type);
9661 return fold_build1 (ABS_EXPR, type, arg);
9664 /* Fold a call to builtin fmin or fmax. */
9666 static tree
9667 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9669 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9671 /* Calculate the result when the argument is a constant. */
9672 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9674 if (res)
9675 return res;
9677 /* If either argument is NaN, return the other one. Avoid the
9678 transformation if we get (and honor) a signalling NaN. Using
9679 omit_one_operand() ensures we create a non-lvalue. */
9680 if (TREE_CODE (arg0) == REAL_CST
9681 && real_isnan (&TREE_REAL_CST (arg0))
9682 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9683 || ! TREE_REAL_CST (arg0).signalling))
9684 return omit_one_operand (type, arg1, arg0);
9685 if (TREE_CODE (arg1) == REAL_CST
9686 && real_isnan (&TREE_REAL_CST (arg1))
9687 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9688 || ! TREE_REAL_CST (arg1).signalling))
9689 return omit_one_operand (type, arg0, arg1);
9691 /* Transform fmin/fmax(x,x) -> x. */
9692 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9693 return omit_one_operand (type, arg0, arg1);
9695 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9696 functions to return the numeric arg if the other one is NaN.
9697 These tree codes don't honor that, so only transform if
9698 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9699 handled, so we don't have to worry about it either. */
9700 if (flag_finite_math_only)
9701 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9702 fold_convert (type, arg0),
9703 fold_convert (type, arg1));
9705 return NULL_TREE;
9708 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9710 static tree
9711 fold_builtin_carg (tree arg, tree type)
9713 if (validate_arg (arg, COMPLEX_TYPE)
9714 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9716 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9718 if (atan2_fn)
9720 tree new_arg = builtin_save_expr (arg);
9721 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9722 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9723 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9727 return NULL_TREE;
9730 /* Fold a call to builtin logb/ilogb. */
9732 static tree
9733 fold_builtin_logb (tree arg, tree rettype)
9735 if (! validate_arg (arg, REAL_TYPE))
9736 return NULL_TREE;
9738 STRIP_NOPS (arg);
9740 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9742 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9744 switch (value->cl)
9746 case rvc_nan:
9747 case rvc_inf:
9748 /* If arg is Inf or NaN and we're logb, return it. */
9749 if (TREE_CODE (rettype) == REAL_TYPE)
9750 return fold_convert (rettype, arg);
9751 /* Fall through... */
9752 case rvc_zero:
9753 /* Zero may set errno and/or raise an exception for logb, also
9754 for ilogb we don't know FP_ILOGB0. */
9755 return NULL_TREE;
9756 case rvc_normal:
9757 /* For normal numbers, proceed iff radix == 2. In GCC,
9758 normalized significands are in the range [0.5, 1.0). We
9759 want the exponent as if they were [1.0, 2.0) so get the
9760 exponent and subtract 1. */
9761 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9762 return fold_convert (rettype, build_int_cst (NULL_TREE,
9763 REAL_EXP (value)-1));
9764 break;
9768 return NULL_TREE;
9771 /* Fold a call to builtin significand, if radix == 2. */
9773 static tree
9774 fold_builtin_significand (tree arg, tree rettype)
9776 if (! validate_arg (arg, REAL_TYPE))
9777 return NULL_TREE;
9779 STRIP_NOPS (arg);
9781 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9783 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9785 switch (value->cl)
9787 case rvc_zero:
9788 case rvc_nan:
9789 case rvc_inf:
9790 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9791 return fold_convert (rettype, arg);
9792 case rvc_normal:
9793 /* For normal numbers, proceed iff radix == 2. */
9794 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9796 REAL_VALUE_TYPE result = *value;
9797 /* In GCC, normalized significands are in the range [0.5,
9798 1.0). We want them to be [1.0, 2.0) so set the
9799 exponent to 1. */
9800 SET_REAL_EXP (&result, 1);
9801 return build_real (rettype, result);
9803 break;
9807 return NULL_TREE;
9810 /* Fold a call to builtin frexp, we can assume the base is 2. */
9812 static tree
9813 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9815 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9816 return NULL_TREE;
9818 STRIP_NOPS (arg0);
9820 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9821 return NULL_TREE;
9823 arg1 = build_fold_indirect_ref (arg1);
9825 /* Proceed if a valid pointer type was passed in. */
9826 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9828 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9829 tree frac, exp;
9831 switch (value->cl)
9833 case rvc_zero:
9834 /* For +-0, return (*exp = 0, +-0). */
9835 exp = integer_zero_node;
9836 frac = arg0;
9837 break;
9838 case rvc_nan:
9839 case rvc_inf:
9840 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9841 return omit_one_operand (rettype, arg0, arg1);
9842 case rvc_normal:
9844 /* Since the frexp function always expects base 2, and in
9845 GCC normalized significands are already in the range
9846 [0.5, 1.0), we have exactly what frexp wants. */
9847 REAL_VALUE_TYPE frac_rvt = *value;
9848 SET_REAL_EXP (&frac_rvt, 0);
9849 frac = build_real (rettype, frac_rvt);
9850 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9852 break;
9853 default:
9854 gcc_unreachable ();
9857 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9858 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9859 TREE_SIDE_EFFECTS (arg1) = 1;
9860 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9863 return NULL_TREE;
9866 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9867 then we can assume the base is two. If it's false, then we have to
9868 check the mode of the TYPE parameter in certain cases. */
9870 static tree
9871 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9873 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9875 STRIP_NOPS (arg0);
9876 STRIP_NOPS (arg1);
9878 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9879 if (real_zerop (arg0) || integer_zerop (arg1)
9880 || (TREE_CODE (arg0) == REAL_CST
9881 && !real_isfinite (&TREE_REAL_CST (arg0))))
9882 return omit_one_operand (type, arg0, arg1);
9884 /* If both arguments are constant, then try to evaluate it. */
9885 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9886 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9887 && host_integerp (arg1, 0))
9889 /* Bound the maximum adjustment to twice the range of the
9890 mode's valid exponents. Use abs to ensure the range is
9891 positive as a sanity check. */
9892 const long max_exp_adj = 2 *
9893 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9894 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9896 /* Get the user-requested adjustment. */
9897 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9899 /* The requested adjustment must be inside this range. This
9900 is a preliminary cap to avoid things like overflow, we
9901 may still fail to compute the result for other reasons. */
9902 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9904 REAL_VALUE_TYPE initial_result;
9906 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9908 /* Ensure we didn't overflow. */
9909 if (! real_isinf (&initial_result))
9911 const REAL_VALUE_TYPE trunc_result
9912 = real_value_truncate (TYPE_MODE (type), initial_result);
9914 /* Only proceed if the target mode can hold the
9915 resulting value. */
9916 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9917 return build_real (type, trunc_result);
9923 return NULL_TREE;
9926 /* Fold a call to builtin modf. */
9928 static tree
9929 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9931 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9932 return NULL_TREE;
9934 STRIP_NOPS (arg0);
9936 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9937 return NULL_TREE;
9939 arg1 = build_fold_indirect_ref (arg1);
9941 /* Proceed if a valid pointer type was passed in. */
9942 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9944 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9945 REAL_VALUE_TYPE trunc, frac;
9947 switch (value->cl)
9949 case rvc_nan:
9950 case rvc_zero:
9951 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9952 trunc = frac = *value;
9953 break;
9954 case rvc_inf:
9955 /* For +-Inf, return (*arg1 = arg0, +-0). */
9956 frac = dconst0;
9957 frac.sign = value->sign;
9958 trunc = *value;
9959 break;
9960 case rvc_normal:
9961 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9962 real_trunc (&trunc, VOIDmode, value);
9963 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9964 /* If the original number was negative and already
9965 integral, then the fractional part is -0.0. */
9966 if (value->sign && frac.cl == rvc_zero)
9967 frac.sign = value->sign;
9968 break;
9971 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9972 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9973 build_real (rettype, trunc));
9974 TREE_SIDE_EFFECTS (arg1) = 1;
9975 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9976 build_real (rettype, frac));
9979 return NULL_TREE;
9982 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9983 ARG is the argument for the call. */
9985 static tree
9986 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9988 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9989 REAL_VALUE_TYPE r;
9991 if (!validate_arg (arg, REAL_TYPE))
9992 return NULL_TREE;
9994 switch (builtin_index)
9996 case BUILT_IN_ISINF:
9997 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9998 return omit_one_operand (type, integer_zero_node, arg);
10000 if (TREE_CODE (arg) == REAL_CST)
10002 r = TREE_REAL_CST (arg);
10003 if (real_isinf (&r))
10004 return real_compare (GT_EXPR, &r, &dconst0)
10005 ? integer_one_node : integer_minus_one_node;
10006 else
10007 return integer_zero_node;
10010 return NULL_TREE;
10012 case BUILT_IN_ISINF_SIGN:
10014 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10015 /* In a boolean context, GCC will fold the inner COND_EXPR to
10016 1. So e.g. "if (isinf_sign(x))" would be folded to just
10017 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10018 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10019 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10020 tree tmp = NULL_TREE;
10022 arg = builtin_save_expr (arg);
10024 if (signbit_fn && isinf_fn)
10026 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
10027 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
10029 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
10030 signbit_call, integer_zero_node);
10031 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
10032 isinf_call, integer_zero_node);
10034 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
10035 integer_minus_one_node, integer_one_node);
10036 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
10037 integer_zero_node);
10040 return tmp;
10043 case BUILT_IN_ISFINITE:
10044 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10045 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10046 return omit_one_operand (type, integer_one_node, arg);
10048 if (TREE_CODE (arg) == REAL_CST)
10050 r = TREE_REAL_CST (arg);
10051 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10054 return NULL_TREE;
10056 case BUILT_IN_ISNAN:
10057 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10058 return omit_one_operand (type, integer_zero_node, arg);
10060 if (TREE_CODE (arg) == REAL_CST)
10062 r = TREE_REAL_CST (arg);
10063 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10066 arg = builtin_save_expr (arg);
10067 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
10069 default:
10070 gcc_unreachable ();
10074 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10075 This builtin will generate code to return the appropriate floating
10076 point classification depending on the value of the floating point
10077 number passed in. The possible return values must be supplied as
10078 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10079 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10080 one floating point argument which is "type generic". */
10082 static tree
10083 fold_builtin_fpclassify (tree exp)
10085 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10086 arg, type, res, tmp;
10087 enum machine_mode mode;
10088 REAL_VALUE_TYPE r;
10089 char buf[128];
10091 /* Verify the required arguments in the original call. */
10092 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10093 INTEGER_TYPE, INTEGER_TYPE,
10094 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10095 return NULL_TREE;
10097 fp_nan = CALL_EXPR_ARG (exp, 0);
10098 fp_infinite = CALL_EXPR_ARG (exp, 1);
10099 fp_normal = CALL_EXPR_ARG (exp, 2);
10100 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10101 fp_zero = CALL_EXPR_ARG (exp, 4);
10102 arg = CALL_EXPR_ARG (exp, 5);
10103 type = TREE_TYPE (arg);
10104 mode = TYPE_MODE (type);
10105 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10107 /* fpclassify(x) ->
10108 isnan(x) ? FP_NAN :
10109 (fabs(x) == Inf ? FP_INFINITE :
10110 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10111 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10113 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10114 build_real (type, dconst0));
10115 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10117 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10118 real_from_string (&r, buf);
10119 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10120 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10122 if (HONOR_INFINITIES (mode))
10124 real_inf (&r);
10125 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10126 build_real (type, r));
10127 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10130 if (HONOR_NANS (mode))
10132 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10133 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10136 return res;
10139 /* Fold a call to an unordered comparison function such as
10140 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10141 being called and ARG0 and ARG1 are the arguments for the call.
10142 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10143 the opposite of the desired result. UNORDERED_CODE is used
10144 for modes that can hold NaNs and ORDERED_CODE is used for
10145 the rest. */
10147 static tree
10148 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10149 enum tree_code unordered_code,
10150 enum tree_code ordered_code)
10152 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10153 enum tree_code code;
10154 tree type0, type1;
10155 enum tree_code code0, code1;
10156 tree cmp_type = NULL_TREE;
10158 type0 = TREE_TYPE (arg0);
10159 type1 = TREE_TYPE (arg1);
10161 code0 = TREE_CODE (type0);
10162 code1 = TREE_CODE (type1);
10164 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10165 /* Choose the wider of two real types. */
10166 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10167 ? type0 : type1;
10168 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10169 cmp_type = type0;
10170 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10171 cmp_type = type1;
10173 arg0 = fold_convert (cmp_type, arg0);
10174 arg1 = fold_convert (cmp_type, arg1);
10176 if (unordered_code == UNORDERED_EXPR)
10178 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10179 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10180 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10183 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10184 : ordered_code;
10185 return fold_build1 (TRUTH_NOT_EXPR, type,
10186 fold_build2 (code, type, arg0, arg1));
10189 /* Fold a call to built-in function FNDECL with 0 arguments.
10190 IGNORE is true if the result of the function call is ignored. This
10191 function returns NULL_TREE if no simplification was possible. */
10193 static tree
10194 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10196 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10197 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10198 switch (fcode)
10200 CASE_FLT_FN (BUILT_IN_INF):
10201 case BUILT_IN_INFD32:
10202 case BUILT_IN_INFD64:
10203 case BUILT_IN_INFD128:
10204 return fold_builtin_inf (type, true);
10206 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10207 return fold_builtin_inf (type, false);
10209 case BUILT_IN_CLASSIFY_TYPE:
10210 return fold_builtin_classify_type (NULL_TREE);
10212 default:
10213 break;
10215 return NULL_TREE;
10218 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10219 IGNORE is true if the result of the function call is ignored. This
10220 function returns NULL_TREE if no simplification was possible. */
10222 static tree
10223 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10225 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10226 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10227 switch (fcode)
10230 case BUILT_IN_CONSTANT_P:
10232 tree val = fold_builtin_constant_p (arg0);
10234 /* Gimplification will pull the CALL_EXPR for the builtin out of
10235 an if condition. When not optimizing, we'll not CSE it back.
10236 To avoid link error types of regressions, return false now. */
10237 if (!val && !optimize)
10238 val = integer_zero_node;
10240 return val;
10243 case BUILT_IN_CLASSIFY_TYPE:
10244 return fold_builtin_classify_type (arg0);
10246 case BUILT_IN_STRLEN:
10247 return fold_builtin_strlen (arg0);
10249 CASE_FLT_FN (BUILT_IN_FABS):
10250 return fold_builtin_fabs (arg0, type);
10252 case BUILT_IN_ABS:
10253 case BUILT_IN_LABS:
10254 case BUILT_IN_LLABS:
10255 case BUILT_IN_IMAXABS:
10256 return fold_builtin_abs (arg0, type);
10258 CASE_FLT_FN (BUILT_IN_CONJ):
10259 if (validate_arg (arg0, COMPLEX_TYPE)
10260 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10261 return fold_build1 (CONJ_EXPR, type, arg0);
10262 break;
10264 CASE_FLT_FN (BUILT_IN_CREAL):
10265 if (validate_arg (arg0, COMPLEX_TYPE)
10266 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10267 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10268 break;
10270 CASE_FLT_FN (BUILT_IN_CIMAG):
10271 if (validate_arg (arg0, COMPLEX_TYPE))
10272 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10273 break;
10275 CASE_FLT_FN (BUILT_IN_CCOS):
10276 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ false);
10278 CASE_FLT_FN (BUILT_IN_CCOSH):
10279 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ true);
10281 #ifdef HAVE_mpc
10282 CASE_FLT_FN (BUILT_IN_CSIN):
10283 if (validate_arg (arg0, COMPLEX_TYPE)
10284 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10285 return do_mpc_arg1 (arg0, type, mpc_sin);
10286 break;
10288 CASE_FLT_FN (BUILT_IN_CSINH):
10289 if (validate_arg (arg0, COMPLEX_TYPE)
10290 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10291 return do_mpc_arg1 (arg0, type, mpc_sinh);
10292 break;
10294 CASE_FLT_FN (BUILT_IN_CTAN):
10295 if (validate_arg (arg0, COMPLEX_TYPE)
10296 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10297 return do_mpc_arg1 (arg0, type, mpc_tan);
10298 break;
10300 CASE_FLT_FN (BUILT_IN_CTANH):
10301 if (validate_arg (arg0, COMPLEX_TYPE)
10302 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10303 return do_mpc_arg1 (arg0, type, mpc_tanh);
10304 break;
10306 CASE_FLT_FN (BUILT_IN_CLOG):
10307 if (validate_arg (arg0, COMPLEX_TYPE)
10308 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10309 return do_mpc_arg1 (arg0, type, mpc_log);
10310 break;
10312 CASE_FLT_FN (BUILT_IN_CSQRT):
10313 if (validate_arg (arg0, COMPLEX_TYPE)
10314 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10315 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10316 break;
10317 #endif
10319 CASE_FLT_FN (BUILT_IN_CABS):
10320 return fold_builtin_cabs (arg0, type, fndecl);
10322 CASE_FLT_FN (BUILT_IN_CARG):
10323 return fold_builtin_carg (arg0, type);
10325 CASE_FLT_FN (BUILT_IN_SQRT):
10326 return fold_builtin_sqrt (arg0, type);
10328 CASE_FLT_FN (BUILT_IN_CBRT):
10329 return fold_builtin_cbrt (arg0, type);
10331 CASE_FLT_FN (BUILT_IN_ASIN):
10332 if (validate_arg (arg0, REAL_TYPE))
10333 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10334 &dconstm1, &dconst1, true);
10335 break;
10337 CASE_FLT_FN (BUILT_IN_ACOS):
10338 if (validate_arg (arg0, REAL_TYPE))
10339 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10340 &dconstm1, &dconst1, true);
10341 break;
10343 CASE_FLT_FN (BUILT_IN_ATAN):
10344 if (validate_arg (arg0, REAL_TYPE))
10345 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10346 break;
10348 CASE_FLT_FN (BUILT_IN_ASINH):
10349 if (validate_arg (arg0, REAL_TYPE))
10350 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10351 break;
10353 CASE_FLT_FN (BUILT_IN_ACOSH):
10354 if (validate_arg (arg0, REAL_TYPE))
10355 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10356 &dconst1, NULL, true);
10357 break;
10359 CASE_FLT_FN (BUILT_IN_ATANH):
10360 if (validate_arg (arg0, REAL_TYPE))
10361 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10362 &dconstm1, &dconst1, false);
10363 break;
10365 CASE_FLT_FN (BUILT_IN_SIN):
10366 if (validate_arg (arg0, REAL_TYPE))
10367 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10368 break;
10370 CASE_FLT_FN (BUILT_IN_COS):
10371 return fold_builtin_cos (arg0, type, fndecl);
10373 CASE_FLT_FN (BUILT_IN_TAN):
10374 return fold_builtin_tan (arg0, type);
10376 CASE_FLT_FN (BUILT_IN_CEXP):
10377 return fold_builtin_cexp (arg0, type);
10379 CASE_FLT_FN (BUILT_IN_CEXPI):
10380 if (validate_arg (arg0, REAL_TYPE))
10381 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10382 break;
10384 CASE_FLT_FN (BUILT_IN_SINH):
10385 if (validate_arg (arg0, REAL_TYPE))
10386 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10387 break;
10389 CASE_FLT_FN (BUILT_IN_COSH):
10390 return fold_builtin_cosh (arg0, type, fndecl);
10392 CASE_FLT_FN (BUILT_IN_TANH):
10393 if (validate_arg (arg0, REAL_TYPE))
10394 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10395 break;
10397 CASE_FLT_FN (BUILT_IN_ERF):
10398 if (validate_arg (arg0, REAL_TYPE))
10399 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10400 break;
10402 CASE_FLT_FN (BUILT_IN_ERFC):
10403 if (validate_arg (arg0, REAL_TYPE))
10404 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10405 break;
10407 CASE_FLT_FN (BUILT_IN_TGAMMA):
10408 if (validate_arg (arg0, REAL_TYPE))
10409 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10410 break;
10412 CASE_FLT_FN (BUILT_IN_EXP):
10413 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10415 CASE_FLT_FN (BUILT_IN_EXP2):
10416 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10418 CASE_FLT_FN (BUILT_IN_EXP10):
10419 CASE_FLT_FN (BUILT_IN_POW10):
10420 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10422 CASE_FLT_FN (BUILT_IN_EXPM1):
10423 if (validate_arg (arg0, REAL_TYPE))
10424 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10425 break;
10427 CASE_FLT_FN (BUILT_IN_LOG):
10428 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10430 CASE_FLT_FN (BUILT_IN_LOG2):
10431 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10433 CASE_FLT_FN (BUILT_IN_LOG10):
10434 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10436 CASE_FLT_FN (BUILT_IN_LOG1P):
10437 if (validate_arg (arg0, REAL_TYPE))
10438 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10439 &dconstm1, NULL, false);
10440 break;
10442 CASE_FLT_FN (BUILT_IN_J0):
10443 if (validate_arg (arg0, REAL_TYPE))
10444 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10445 NULL, NULL, 0);
10446 break;
10448 CASE_FLT_FN (BUILT_IN_J1):
10449 if (validate_arg (arg0, REAL_TYPE))
10450 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10451 NULL, NULL, 0);
10452 break;
10454 CASE_FLT_FN (BUILT_IN_Y0):
10455 if (validate_arg (arg0, REAL_TYPE))
10456 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10457 &dconst0, NULL, false);
10458 break;
10460 CASE_FLT_FN (BUILT_IN_Y1):
10461 if (validate_arg (arg0, REAL_TYPE))
10462 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10463 &dconst0, NULL, false);
10464 break;
10466 CASE_FLT_FN (BUILT_IN_NAN):
10467 case BUILT_IN_NAND32:
10468 case BUILT_IN_NAND64:
10469 case BUILT_IN_NAND128:
10470 return fold_builtin_nan (arg0, type, true);
10472 CASE_FLT_FN (BUILT_IN_NANS):
10473 return fold_builtin_nan (arg0, type, false);
10475 CASE_FLT_FN (BUILT_IN_FLOOR):
10476 return fold_builtin_floor (fndecl, arg0);
10478 CASE_FLT_FN (BUILT_IN_CEIL):
10479 return fold_builtin_ceil (fndecl, arg0);
10481 CASE_FLT_FN (BUILT_IN_TRUNC):
10482 return fold_builtin_trunc (fndecl, arg0);
10484 CASE_FLT_FN (BUILT_IN_ROUND):
10485 return fold_builtin_round (fndecl, arg0);
10487 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10488 CASE_FLT_FN (BUILT_IN_RINT):
10489 return fold_trunc_transparent_mathfn (fndecl, arg0);
10491 CASE_FLT_FN (BUILT_IN_LCEIL):
10492 CASE_FLT_FN (BUILT_IN_LLCEIL):
10493 CASE_FLT_FN (BUILT_IN_LFLOOR):
10494 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10495 CASE_FLT_FN (BUILT_IN_LROUND):
10496 CASE_FLT_FN (BUILT_IN_LLROUND):
10497 return fold_builtin_int_roundingfn (fndecl, arg0);
10499 CASE_FLT_FN (BUILT_IN_LRINT):
10500 CASE_FLT_FN (BUILT_IN_LLRINT):
10501 return fold_fixed_mathfn (fndecl, arg0);
10503 case BUILT_IN_BSWAP32:
10504 case BUILT_IN_BSWAP64:
10505 return fold_builtin_bswap (fndecl, arg0);
10507 CASE_INT_FN (BUILT_IN_FFS):
10508 CASE_INT_FN (BUILT_IN_CLZ):
10509 CASE_INT_FN (BUILT_IN_CTZ):
10510 CASE_INT_FN (BUILT_IN_POPCOUNT):
10511 CASE_INT_FN (BUILT_IN_PARITY):
10512 return fold_builtin_bitop (fndecl, arg0);
10514 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10515 return fold_builtin_signbit (arg0, type);
10517 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10518 return fold_builtin_significand (arg0, type);
10520 CASE_FLT_FN (BUILT_IN_ILOGB):
10521 CASE_FLT_FN (BUILT_IN_LOGB):
10522 return fold_builtin_logb (arg0, type);
10524 case BUILT_IN_ISASCII:
10525 return fold_builtin_isascii (arg0);
10527 case BUILT_IN_TOASCII:
10528 return fold_builtin_toascii (arg0);
10530 case BUILT_IN_ISDIGIT:
10531 return fold_builtin_isdigit (arg0);
10533 CASE_FLT_FN (BUILT_IN_FINITE):
10534 case BUILT_IN_FINITED32:
10535 case BUILT_IN_FINITED64:
10536 case BUILT_IN_FINITED128:
10537 case BUILT_IN_ISFINITE:
10538 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10540 CASE_FLT_FN (BUILT_IN_ISINF):
10541 case BUILT_IN_ISINFD32:
10542 case BUILT_IN_ISINFD64:
10543 case BUILT_IN_ISINFD128:
10544 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10546 case BUILT_IN_ISINF_SIGN:
10547 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10549 CASE_FLT_FN (BUILT_IN_ISNAN):
10550 case BUILT_IN_ISNAND32:
10551 case BUILT_IN_ISNAND64:
10552 case BUILT_IN_ISNAND128:
10553 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10555 case BUILT_IN_PRINTF:
10556 case BUILT_IN_PRINTF_UNLOCKED:
10557 case BUILT_IN_VPRINTF:
10558 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10560 default:
10561 break;
10564 return NULL_TREE;
10568 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10569 IGNORE is true if the result of the function call is ignored. This
10570 function returns NULL_TREE if no simplification was possible. */
10572 static tree
10573 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10575 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10576 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10578 switch (fcode)
10580 CASE_FLT_FN (BUILT_IN_JN):
10581 if (validate_arg (arg0, INTEGER_TYPE)
10582 && validate_arg (arg1, REAL_TYPE))
10583 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10584 break;
10586 CASE_FLT_FN (BUILT_IN_YN):
10587 if (validate_arg (arg0, INTEGER_TYPE)
10588 && validate_arg (arg1, REAL_TYPE))
10589 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10590 &dconst0, false);
10591 break;
10593 CASE_FLT_FN (BUILT_IN_DREM):
10594 CASE_FLT_FN (BUILT_IN_REMAINDER):
10595 if (validate_arg (arg0, REAL_TYPE)
10596 && validate_arg(arg1, REAL_TYPE))
10597 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10598 break;
10600 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10601 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10602 if (validate_arg (arg0, REAL_TYPE)
10603 && validate_arg(arg1, POINTER_TYPE))
10604 return do_mpfr_lgamma_r (arg0, arg1, type);
10605 break;
10607 CASE_FLT_FN (BUILT_IN_ATAN2):
10608 if (validate_arg (arg0, REAL_TYPE)
10609 && validate_arg(arg1, REAL_TYPE))
10610 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10611 break;
10613 CASE_FLT_FN (BUILT_IN_FDIM):
10614 if (validate_arg (arg0, REAL_TYPE)
10615 && validate_arg(arg1, REAL_TYPE))
10616 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10617 break;
10619 CASE_FLT_FN (BUILT_IN_HYPOT):
10620 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10622 CASE_FLT_FN (BUILT_IN_LDEXP):
10623 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10624 CASE_FLT_FN (BUILT_IN_SCALBN):
10625 CASE_FLT_FN (BUILT_IN_SCALBLN):
10626 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10628 CASE_FLT_FN (BUILT_IN_FREXP):
10629 return fold_builtin_frexp (arg0, arg1, type);
10631 CASE_FLT_FN (BUILT_IN_MODF):
10632 return fold_builtin_modf (arg0, arg1, type);
10634 case BUILT_IN_BZERO:
10635 return fold_builtin_bzero (arg0, arg1, ignore);
10637 case BUILT_IN_FPUTS:
10638 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10640 case BUILT_IN_FPUTS_UNLOCKED:
10641 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10643 case BUILT_IN_STRSTR:
10644 return fold_builtin_strstr (arg0, arg1, type);
10646 case BUILT_IN_STRCAT:
10647 return fold_builtin_strcat (arg0, arg1);
10649 case BUILT_IN_STRSPN:
10650 return fold_builtin_strspn (arg0, arg1);
10652 case BUILT_IN_STRCSPN:
10653 return fold_builtin_strcspn (arg0, arg1);
10655 case BUILT_IN_STRCHR:
10656 case BUILT_IN_INDEX:
10657 return fold_builtin_strchr (arg0, arg1, type);
10659 case BUILT_IN_STRRCHR:
10660 case BUILT_IN_RINDEX:
10661 return fold_builtin_strrchr (arg0, arg1, type);
10663 case BUILT_IN_STRCPY:
10664 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10666 case BUILT_IN_STPCPY:
10667 if (ignore)
10669 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10670 if (!fn)
10671 break;
10673 return build_call_expr (fn, 2, arg0, arg1);
10675 break;
10677 case BUILT_IN_STRCMP:
10678 return fold_builtin_strcmp (arg0, arg1);
10680 case BUILT_IN_STRPBRK:
10681 return fold_builtin_strpbrk (arg0, arg1, type);
10683 case BUILT_IN_EXPECT:
10684 return fold_builtin_expect (arg0, arg1);
10686 CASE_FLT_FN (BUILT_IN_POW):
10687 return fold_builtin_pow (fndecl, arg0, arg1, type);
10689 CASE_FLT_FN (BUILT_IN_POWI):
10690 return fold_builtin_powi (fndecl, arg0, arg1, type);
10692 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10693 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10695 CASE_FLT_FN (BUILT_IN_FMIN):
10696 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10698 CASE_FLT_FN (BUILT_IN_FMAX):
10699 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10701 case BUILT_IN_ISGREATER:
10702 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10703 case BUILT_IN_ISGREATEREQUAL:
10704 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10705 case BUILT_IN_ISLESS:
10706 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10707 case BUILT_IN_ISLESSEQUAL:
10708 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10709 case BUILT_IN_ISLESSGREATER:
10710 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10711 case BUILT_IN_ISUNORDERED:
10712 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10713 NOP_EXPR);
10715 /* We do the folding for va_start in the expander. */
10716 case BUILT_IN_VA_START:
10717 break;
10719 case BUILT_IN_SPRINTF:
10720 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10722 case BUILT_IN_OBJECT_SIZE:
10723 return fold_builtin_object_size (arg0, arg1);
10725 case BUILT_IN_PRINTF:
10726 case BUILT_IN_PRINTF_UNLOCKED:
10727 case BUILT_IN_VPRINTF:
10728 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10730 case BUILT_IN_PRINTF_CHK:
10731 case BUILT_IN_VPRINTF_CHK:
10732 if (!validate_arg (arg0, INTEGER_TYPE)
10733 || TREE_SIDE_EFFECTS (arg0))
10734 return NULL_TREE;
10735 else
10736 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10737 break;
10739 case BUILT_IN_FPRINTF:
10740 case BUILT_IN_FPRINTF_UNLOCKED:
10741 case BUILT_IN_VFPRINTF:
10742 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10743 ignore, fcode);
10745 default:
10746 break;
10748 return NULL_TREE;
10751 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10752 and ARG2. IGNORE is true if the result of the function call is ignored.
10753 This function returns NULL_TREE if no simplification was possible. */
10755 static tree
10756 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10758 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10759 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10760 switch (fcode)
10763 CASE_FLT_FN (BUILT_IN_SINCOS):
10764 return fold_builtin_sincos (arg0, arg1, arg2);
10766 CASE_FLT_FN (BUILT_IN_FMA):
10767 if (validate_arg (arg0, REAL_TYPE)
10768 && validate_arg(arg1, REAL_TYPE)
10769 && validate_arg(arg2, REAL_TYPE))
10770 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10771 break;
10773 CASE_FLT_FN (BUILT_IN_REMQUO):
10774 if (validate_arg (arg0, REAL_TYPE)
10775 && validate_arg(arg1, REAL_TYPE)
10776 && validate_arg(arg2, POINTER_TYPE))
10777 return do_mpfr_remquo (arg0, arg1, arg2);
10778 break;
10780 case BUILT_IN_MEMSET:
10781 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10783 case BUILT_IN_BCOPY:
10784 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10786 case BUILT_IN_MEMCPY:
10787 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10789 case BUILT_IN_MEMPCPY:
10790 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10792 case BUILT_IN_MEMMOVE:
10793 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10795 case BUILT_IN_STRNCAT:
10796 return fold_builtin_strncat (arg0, arg1, arg2);
10798 case BUILT_IN_STRNCPY:
10799 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10801 case BUILT_IN_STRNCMP:
10802 return fold_builtin_strncmp (arg0, arg1, arg2);
10804 case BUILT_IN_MEMCHR:
10805 return fold_builtin_memchr (arg0, arg1, arg2, type);
10807 case BUILT_IN_BCMP:
10808 case BUILT_IN_MEMCMP:
10809 return fold_builtin_memcmp (arg0, arg1, arg2);;
10811 case BUILT_IN_SPRINTF:
10812 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10814 case BUILT_IN_STRCPY_CHK:
10815 case BUILT_IN_STPCPY_CHK:
10816 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10817 ignore, fcode);
10819 case BUILT_IN_STRCAT_CHK:
10820 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10822 case BUILT_IN_PRINTF_CHK:
10823 case BUILT_IN_VPRINTF_CHK:
10824 if (!validate_arg (arg0, INTEGER_TYPE)
10825 || TREE_SIDE_EFFECTS (arg0))
10826 return NULL_TREE;
10827 else
10828 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10829 break;
10831 case BUILT_IN_FPRINTF:
10832 case BUILT_IN_FPRINTF_UNLOCKED:
10833 case BUILT_IN_VFPRINTF:
10834 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10836 case BUILT_IN_FPRINTF_CHK:
10837 case BUILT_IN_VFPRINTF_CHK:
10838 if (!validate_arg (arg1, INTEGER_TYPE)
10839 || TREE_SIDE_EFFECTS (arg1))
10840 return NULL_TREE;
10841 else
10842 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10843 ignore, fcode);
10845 default:
10846 break;
10848 return NULL_TREE;
10851 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10852 ARG2, and ARG3. IGNORE is true if the result of the function call is
10853 ignored. This function returns NULL_TREE if no simplification was
10854 possible. */
10856 static tree
10857 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10858 bool ignore)
10860 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10862 switch (fcode)
10864 case BUILT_IN_MEMCPY_CHK:
10865 case BUILT_IN_MEMPCPY_CHK:
10866 case BUILT_IN_MEMMOVE_CHK:
10867 case BUILT_IN_MEMSET_CHK:
10868 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10869 NULL_TREE, ignore,
10870 DECL_FUNCTION_CODE (fndecl));
10872 case BUILT_IN_STRNCPY_CHK:
10873 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10875 case BUILT_IN_STRNCAT_CHK:
10876 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10878 case BUILT_IN_FPRINTF_CHK:
10879 case BUILT_IN_VFPRINTF_CHK:
10880 if (!validate_arg (arg1, INTEGER_TYPE)
10881 || TREE_SIDE_EFFECTS (arg1))
10882 return NULL_TREE;
10883 else
10884 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10885 ignore, fcode);
10886 break;
10888 default:
10889 break;
10891 return NULL_TREE;
10894 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10895 arguments, where NARGS <= 4. IGNORE is true if the result of the
10896 function call is ignored. This function returns NULL_TREE if no
10897 simplification was possible. Note that this only folds builtins with
10898 fixed argument patterns. Foldings that do varargs-to-varargs
10899 transformations, or that match calls with more than 4 arguments,
10900 need to be handled with fold_builtin_varargs instead. */
10902 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10904 static tree
10905 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10907 tree ret = NULL_TREE;
10909 switch (nargs)
10911 case 0:
10912 ret = fold_builtin_0 (fndecl, ignore);
10913 break;
10914 case 1:
10915 ret = fold_builtin_1 (fndecl, args[0], ignore);
10916 break;
10917 case 2:
10918 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10919 break;
10920 case 3:
10921 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10922 break;
10923 case 4:
10924 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10925 ignore);
10926 break;
10927 default:
10928 break;
10930 if (ret)
10932 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10933 TREE_NO_WARNING (ret) = 1;
10934 return ret;
10936 return NULL_TREE;
10939 /* Builtins with folding operations that operate on "..." arguments
10940 need special handling; we need to store the arguments in a convenient
10941 data structure before attempting any folding. Fortunately there are
10942 only a few builtins that fall into this category. FNDECL is the
10943 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10944 result of the function call is ignored. */
10946 static tree
10947 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10949 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10950 tree ret = NULL_TREE;
10952 switch (fcode)
10954 case BUILT_IN_SPRINTF_CHK:
10955 case BUILT_IN_VSPRINTF_CHK:
10956 ret = fold_builtin_sprintf_chk (exp, fcode);
10957 break;
10959 case BUILT_IN_SNPRINTF_CHK:
10960 case BUILT_IN_VSNPRINTF_CHK:
10961 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10962 break;
10964 case BUILT_IN_FPCLASSIFY:
10965 ret = fold_builtin_fpclassify (exp);
10966 break;
10968 default:
10969 break;
10971 if (ret)
10973 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10974 TREE_NO_WARNING (ret) = 1;
10975 return ret;
10977 return NULL_TREE;
10980 /* Return true if FNDECL shouldn't be folded right now.
10981 If a built-in function has an inline attribute always_inline
10982 wrapper, defer folding it after always_inline functions have
10983 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10984 might not be performed. */
10986 static bool
10987 avoid_folding_inline_builtin (tree fndecl)
10989 return (DECL_DECLARED_INLINE_P (fndecl)
10990 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10991 && cfun
10992 && !cfun->always_inline_functions_inlined
10993 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10996 /* A wrapper function for builtin folding that prevents warnings for
10997 "statement without effect" and the like, caused by removing the
10998 call node earlier than the warning is generated. */
11000 tree
11001 fold_call_expr (tree exp, bool ignore)
11003 tree ret = NULL_TREE;
11004 tree fndecl = get_callee_fndecl (exp);
11005 if (fndecl
11006 && TREE_CODE (fndecl) == FUNCTION_DECL
11007 && DECL_BUILT_IN (fndecl)
11008 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11009 yet. Defer folding until we see all the arguments
11010 (after inlining). */
11011 && !CALL_EXPR_VA_ARG_PACK (exp))
11013 int nargs = call_expr_nargs (exp);
11015 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11016 instead last argument is __builtin_va_arg_pack (). Defer folding
11017 even in that case, until arguments are finalized. */
11018 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11020 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11021 if (fndecl2
11022 && TREE_CODE (fndecl2) == FUNCTION_DECL
11023 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11024 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11025 return NULL_TREE;
11028 if (avoid_folding_inline_builtin (fndecl))
11029 return NULL_TREE;
11031 /* FIXME: Don't use a list in this interface. */
11032 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11033 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11034 else
11036 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11038 tree *args = CALL_EXPR_ARGP (exp);
11039 ret = fold_builtin_n (fndecl, args, nargs, ignore);
11041 if (!ret)
11042 ret = fold_builtin_varargs (fndecl, exp, ignore);
11043 if (ret)
11045 /* Propagate location information from original call to
11046 expansion of builtin. Otherwise things like
11047 maybe_emit_chk_warning, that operate on the expansion
11048 of a builtin, will use the wrong location information. */
11049 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
11051 tree realret = ret;
11052 if (TREE_CODE (ret) == NOP_EXPR)
11053 realret = TREE_OPERAND (ret, 0);
11054 if (CAN_HAVE_LOCATION_P (realret)
11055 && !EXPR_HAS_LOCATION (realret))
11056 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
11058 return ret;
11062 return NULL_TREE;
11065 /* Conveniently construct a function call expression. FNDECL names the
11066 function to be called and ARGLIST is a TREE_LIST of arguments. */
11068 tree
11069 build_function_call_expr (tree fndecl, tree arglist)
11071 tree fntype = TREE_TYPE (fndecl);
11072 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11073 int n = list_length (arglist);
11074 tree *argarray = (tree *) alloca (n * sizeof (tree));
11075 int i;
11077 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11078 argarray[i] = TREE_VALUE (arglist);
11079 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11082 /* Conveniently construct a function call expression. FNDECL names the
11083 function to be called, N is the number of arguments, and the "..."
11084 parameters are the argument expressions. */
11086 tree
11087 build_call_expr (tree fndecl, int n, ...)
11089 va_list ap;
11090 tree fntype = TREE_TYPE (fndecl);
11091 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11092 tree *argarray = (tree *) alloca (n * sizeof (tree));
11093 int i;
11095 va_start (ap, n);
11096 for (i = 0; i < n; i++)
11097 argarray[i] = va_arg (ap, tree);
11098 va_end (ap);
11099 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11102 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11103 N arguments are passed in the array ARGARRAY. */
11105 tree
11106 fold_builtin_call_array (tree type,
11107 tree fn,
11108 int n,
11109 tree *argarray)
11111 tree ret = NULL_TREE;
11112 int i;
11113 tree exp;
11115 if (TREE_CODE (fn) == ADDR_EXPR)
11117 tree fndecl = TREE_OPERAND (fn, 0);
11118 if (TREE_CODE (fndecl) == FUNCTION_DECL
11119 && DECL_BUILT_IN (fndecl))
11121 /* If last argument is __builtin_va_arg_pack (), arguments to this
11122 function are not finalized yet. Defer folding until they are. */
11123 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11125 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11126 if (fndecl2
11127 && TREE_CODE (fndecl2) == FUNCTION_DECL
11128 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11129 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11130 return build_call_array (type, fn, n, argarray);
11132 if (avoid_folding_inline_builtin (fndecl))
11133 return build_call_array (type, fn, n, argarray);
11134 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11136 tree arglist = NULL_TREE;
11137 for (i = n - 1; i >= 0; i--)
11138 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11139 ret = targetm.fold_builtin (fndecl, arglist, false);
11140 if (ret)
11141 return ret;
11142 return build_call_array (type, fn, n, argarray);
11144 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11146 /* First try the transformations that don't require consing up
11147 an exp. */
11148 ret = fold_builtin_n (fndecl, argarray, n, false);
11149 if (ret)
11150 return ret;
11153 /* If we got this far, we need to build an exp. */
11154 exp = build_call_array (type, fn, n, argarray);
11155 ret = fold_builtin_varargs (fndecl, exp, false);
11156 return ret ? ret : exp;
11160 return build_call_array (type, fn, n, argarray);
11163 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11164 along with N new arguments specified as the "..." parameters. SKIP
11165 is the number of arguments in EXP to be omitted. This function is used
11166 to do varargs-to-varargs transformations. */
11168 static tree
11169 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11171 int oldnargs = call_expr_nargs (exp);
11172 int nargs = oldnargs - skip + n;
11173 tree fntype = TREE_TYPE (fndecl);
11174 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11175 tree *buffer;
11177 if (n > 0)
11179 int i, j;
11180 va_list ap;
11182 buffer = XALLOCAVEC (tree, nargs);
11183 va_start (ap, n);
11184 for (i = 0; i < n; i++)
11185 buffer[i] = va_arg (ap, tree);
11186 va_end (ap);
11187 for (j = skip; j < oldnargs; j++, i++)
11188 buffer[i] = CALL_EXPR_ARG (exp, j);
11190 else
11191 buffer = CALL_EXPR_ARGP (exp) + skip;
11193 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11196 /* Validate a single argument ARG against a tree code CODE representing
11197 a type. */
11199 static bool
11200 validate_arg (const_tree arg, enum tree_code code)
11202 if (!arg)
11203 return false;
11204 else if (code == POINTER_TYPE)
11205 return POINTER_TYPE_P (TREE_TYPE (arg));
11206 else if (code == INTEGER_TYPE)
11207 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11208 return code == TREE_CODE (TREE_TYPE (arg));
11211 /* This function validates the types of a function call argument list
11212 against a specified list of tree_codes. If the last specifier is a 0,
11213 that represents an ellipses, otherwise the last specifier must be a
11214 VOID_TYPE.
11216 This is the GIMPLE version of validate_arglist. Eventually we want to
11217 completely convert builtins.c to work from GIMPLEs and the tree based
11218 validate_arglist will then be removed. */
11220 bool
11221 validate_gimple_arglist (const_gimple call, ...)
11223 enum tree_code code;
11224 bool res = 0;
11225 va_list ap;
11226 const_tree arg;
11227 size_t i;
11229 va_start (ap, call);
11230 i = 0;
11234 code = (enum tree_code) va_arg (ap, int);
11235 switch (code)
11237 case 0:
11238 /* This signifies an ellipses, any further arguments are all ok. */
11239 res = true;
11240 goto end;
11241 case VOID_TYPE:
11242 /* This signifies an endlink, if no arguments remain, return
11243 true, otherwise return false. */
11244 res = (i == gimple_call_num_args (call));
11245 goto end;
11246 default:
11247 /* If no parameters remain or the parameter's code does not
11248 match the specified code, return false. Otherwise continue
11249 checking any remaining arguments. */
11250 arg = gimple_call_arg (call, i++);
11251 if (!validate_arg (arg, code))
11252 goto end;
11253 break;
11256 while (1);
11258 /* We need gotos here since we can only have one VA_CLOSE in a
11259 function. */
11260 end: ;
11261 va_end (ap);
11263 return res;
11266 /* This function validates the types of a function call argument list
11267 against a specified list of tree_codes. If the last specifier is a 0,
11268 that represents an ellipses, otherwise the last specifier must be a
11269 VOID_TYPE. */
11271 bool
11272 validate_arglist (const_tree callexpr, ...)
11274 enum tree_code code;
11275 bool res = 0;
11276 va_list ap;
11277 const_call_expr_arg_iterator iter;
11278 const_tree arg;
11280 va_start (ap, callexpr);
11281 init_const_call_expr_arg_iterator (callexpr, &iter);
11285 code = (enum tree_code) va_arg (ap, int);
11286 switch (code)
11288 case 0:
11289 /* This signifies an ellipses, any further arguments are all ok. */
11290 res = true;
11291 goto end;
11292 case VOID_TYPE:
11293 /* This signifies an endlink, if no arguments remain, return
11294 true, otherwise return false. */
11295 res = !more_const_call_expr_args_p (&iter);
11296 goto end;
11297 default:
11298 /* If no parameters remain or the parameter's code does not
11299 match the specified code, return false. Otherwise continue
11300 checking any remaining arguments. */
11301 arg = next_const_call_expr_arg (&iter);
11302 if (!validate_arg (arg, code))
11303 goto end;
11304 break;
11307 while (1);
11309 /* We need gotos here since we can only have one VA_CLOSE in a
11310 function. */
11311 end: ;
11312 va_end (ap);
11314 return res;
11317 /* Default target-specific builtin expander that does nothing. */
11320 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11321 rtx target ATTRIBUTE_UNUSED,
11322 rtx subtarget ATTRIBUTE_UNUSED,
11323 enum machine_mode mode ATTRIBUTE_UNUSED,
11324 int ignore ATTRIBUTE_UNUSED)
11326 return NULL_RTX;
11329 /* Returns true is EXP represents data that would potentially reside
11330 in a readonly section. */
11332 static bool
11333 readonly_data_expr (tree exp)
11335 STRIP_NOPS (exp);
11337 if (TREE_CODE (exp) != ADDR_EXPR)
11338 return false;
11340 exp = get_base_address (TREE_OPERAND (exp, 0));
11341 if (!exp)
11342 return false;
11344 /* Make sure we call decl_readonly_section only for trees it
11345 can handle (since it returns true for everything it doesn't
11346 understand). */
11347 if (TREE_CODE (exp) == STRING_CST
11348 || TREE_CODE (exp) == CONSTRUCTOR
11349 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11350 return decl_readonly_section (exp, 0);
11351 else
11352 return false;
11355 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11356 to the call, and TYPE is its return type.
11358 Return NULL_TREE if no simplification was possible, otherwise return the
11359 simplified form of the call as a tree.
11361 The simplified form may be a constant or other expression which
11362 computes the same value, but in a more efficient manner (including
11363 calls to other builtin functions).
11365 The call may contain arguments which need to be evaluated, but
11366 which are not useful to determine the result of the call. In
11367 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11368 COMPOUND_EXPR will be an argument which must be evaluated.
11369 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11370 COMPOUND_EXPR in the chain will contain the tree for the simplified
11371 form of the builtin function call. */
11373 static tree
11374 fold_builtin_strstr (tree s1, tree s2, tree type)
11376 if (!validate_arg (s1, POINTER_TYPE)
11377 || !validate_arg (s2, POINTER_TYPE))
11378 return NULL_TREE;
11379 else
11381 tree fn;
11382 const char *p1, *p2;
11384 p2 = c_getstr (s2);
11385 if (p2 == NULL)
11386 return NULL_TREE;
11388 p1 = c_getstr (s1);
11389 if (p1 != NULL)
11391 const char *r = strstr (p1, p2);
11392 tree tem;
11394 if (r == NULL)
11395 return build_int_cst (TREE_TYPE (s1), 0);
11397 /* Return an offset into the constant string argument. */
11398 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11399 s1, size_int (r - p1));
11400 return fold_convert (type, tem);
11403 /* The argument is const char *, and the result is char *, so we need
11404 a type conversion here to avoid a warning. */
11405 if (p2[0] == '\0')
11406 return fold_convert (type, s1);
11408 if (p2[1] != '\0')
11409 return NULL_TREE;
11411 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11412 if (!fn)
11413 return NULL_TREE;
11415 /* New argument list transforming strstr(s1, s2) to
11416 strchr(s1, s2[0]). */
11417 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11421 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11422 the call, and TYPE is its return type.
11424 Return NULL_TREE if no simplification was possible, otherwise return the
11425 simplified form of the call as a tree.
11427 The simplified form may be a constant or other expression which
11428 computes the same value, but in a more efficient manner (including
11429 calls to other builtin functions).
11431 The call may contain arguments which need to be evaluated, but
11432 which are not useful to determine the result of the call. In
11433 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11434 COMPOUND_EXPR will be an argument which must be evaluated.
11435 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11436 COMPOUND_EXPR in the chain will contain the tree for the simplified
11437 form of the builtin function call. */
11439 static tree
11440 fold_builtin_strchr (tree s1, tree s2, tree type)
11442 if (!validate_arg (s1, POINTER_TYPE)
11443 || !validate_arg (s2, INTEGER_TYPE))
11444 return NULL_TREE;
11445 else
11447 const char *p1;
11449 if (TREE_CODE (s2) != INTEGER_CST)
11450 return NULL_TREE;
11452 p1 = c_getstr (s1);
11453 if (p1 != NULL)
11455 char c;
11456 const char *r;
11457 tree tem;
11459 if (target_char_cast (s2, &c))
11460 return NULL_TREE;
11462 r = strchr (p1, c);
11464 if (r == NULL)
11465 return build_int_cst (TREE_TYPE (s1), 0);
11467 /* Return an offset into the constant string argument. */
11468 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11469 s1, size_int (r - p1));
11470 return fold_convert (type, tem);
11472 return NULL_TREE;
11476 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11477 the call, and TYPE is its return type.
11479 Return NULL_TREE if no simplification was possible, otherwise return the
11480 simplified form of the call as a tree.
11482 The simplified form may be a constant or other expression which
11483 computes the same value, but in a more efficient manner (including
11484 calls to other builtin functions).
11486 The call may contain arguments which need to be evaluated, but
11487 which are not useful to determine the result of the call. In
11488 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11489 COMPOUND_EXPR will be an argument which must be evaluated.
11490 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11491 COMPOUND_EXPR in the chain will contain the tree for the simplified
11492 form of the builtin function call. */
11494 static tree
11495 fold_builtin_strrchr (tree s1, tree s2, tree type)
11497 if (!validate_arg (s1, POINTER_TYPE)
11498 || !validate_arg (s2, INTEGER_TYPE))
11499 return NULL_TREE;
11500 else
11502 tree fn;
11503 const char *p1;
11505 if (TREE_CODE (s2) != INTEGER_CST)
11506 return NULL_TREE;
11508 p1 = c_getstr (s1);
11509 if (p1 != NULL)
11511 char c;
11512 const char *r;
11513 tree tem;
11515 if (target_char_cast (s2, &c))
11516 return NULL_TREE;
11518 r = strrchr (p1, c);
11520 if (r == NULL)
11521 return build_int_cst (TREE_TYPE (s1), 0);
11523 /* Return an offset into the constant string argument. */
11524 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11525 s1, size_int (r - p1));
11526 return fold_convert (type, tem);
11529 if (! integer_zerop (s2))
11530 return NULL_TREE;
11532 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11533 if (!fn)
11534 return NULL_TREE;
11536 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11537 return build_call_expr (fn, 2, s1, s2);
11541 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11542 to the call, and TYPE is its return type.
11544 Return NULL_TREE if no simplification was possible, otherwise return the
11545 simplified form of the call as a tree.
11547 The simplified form may be a constant or other expression which
11548 computes the same value, but in a more efficient manner (including
11549 calls to other builtin functions).
11551 The call may contain arguments which need to be evaluated, but
11552 which are not useful to determine the result of the call. In
11553 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11554 COMPOUND_EXPR will be an argument which must be evaluated.
11555 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11556 COMPOUND_EXPR in the chain will contain the tree for the simplified
11557 form of the builtin function call. */
11559 static tree
11560 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11562 if (!validate_arg (s1, POINTER_TYPE)
11563 || !validate_arg (s2, POINTER_TYPE))
11564 return NULL_TREE;
11565 else
11567 tree fn;
11568 const char *p1, *p2;
11570 p2 = c_getstr (s2);
11571 if (p2 == NULL)
11572 return NULL_TREE;
11574 p1 = c_getstr (s1);
11575 if (p1 != NULL)
11577 const char *r = strpbrk (p1, p2);
11578 tree tem;
11580 if (r == NULL)
11581 return build_int_cst (TREE_TYPE (s1), 0);
11583 /* Return an offset into the constant string argument. */
11584 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11585 s1, size_int (r - p1));
11586 return fold_convert (type, tem);
11589 if (p2[0] == '\0')
11590 /* strpbrk(x, "") == NULL.
11591 Evaluate and ignore s1 in case it had side-effects. */
11592 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11594 if (p2[1] != '\0')
11595 return NULL_TREE; /* Really call strpbrk. */
11597 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11598 if (!fn)
11599 return NULL_TREE;
11601 /* New argument list transforming strpbrk(s1, s2) to
11602 strchr(s1, s2[0]). */
11603 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11607 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11608 to the call.
11610 Return NULL_TREE if no simplification was possible, otherwise return the
11611 simplified form of the call as a tree.
11613 The simplified form may be a constant or other expression which
11614 computes the same value, but in a more efficient manner (including
11615 calls to other builtin functions).
11617 The call may contain arguments which need to be evaluated, but
11618 which are not useful to determine the result of the call. In
11619 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11620 COMPOUND_EXPR will be an argument which must be evaluated.
11621 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11622 COMPOUND_EXPR in the chain will contain the tree for the simplified
11623 form of the builtin function call. */
11625 static tree
11626 fold_builtin_strcat (tree dst, tree src)
11628 if (!validate_arg (dst, POINTER_TYPE)
11629 || !validate_arg (src, POINTER_TYPE))
11630 return NULL_TREE;
11631 else
11633 const char *p = c_getstr (src);
11635 /* If the string length is zero, return the dst parameter. */
11636 if (p && *p == '\0')
11637 return dst;
11639 return NULL_TREE;
11643 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11644 arguments to the call.
11646 Return NULL_TREE if no simplification was possible, otherwise return the
11647 simplified form of the call as a tree.
11649 The simplified form may be a constant or other expression which
11650 computes the same value, but in a more efficient manner (including
11651 calls to other builtin functions).
11653 The call may contain arguments which need to be evaluated, but
11654 which are not useful to determine the result of the call. In
11655 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11656 COMPOUND_EXPR will be an argument which must be evaluated.
11657 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11658 COMPOUND_EXPR in the chain will contain the tree for the simplified
11659 form of the builtin function call. */
11661 static tree
11662 fold_builtin_strncat (tree dst, tree src, tree len)
11664 if (!validate_arg (dst, POINTER_TYPE)
11665 || !validate_arg (src, POINTER_TYPE)
11666 || !validate_arg (len, INTEGER_TYPE))
11667 return NULL_TREE;
11668 else
11670 const char *p = c_getstr (src);
11672 /* If the requested length is zero, or the src parameter string
11673 length is zero, return the dst parameter. */
11674 if (integer_zerop (len) || (p && *p == '\0'))
11675 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11677 /* If the requested len is greater than or equal to the string
11678 length, call strcat. */
11679 if (TREE_CODE (len) == INTEGER_CST && p
11680 && compare_tree_int (len, strlen (p)) >= 0)
11682 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11684 /* If the replacement _DECL isn't initialized, don't do the
11685 transformation. */
11686 if (!fn)
11687 return NULL_TREE;
11689 return build_call_expr (fn, 2, dst, src);
11691 return NULL_TREE;
11695 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11696 to the call.
11698 Return NULL_TREE if no simplification was possible, otherwise return the
11699 simplified form of the call as a tree.
11701 The simplified form may be a constant or other expression which
11702 computes the same value, but in a more efficient manner (including
11703 calls to other builtin functions).
11705 The call may contain arguments which need to be evaluated, but
11706 which are not useful to determine the result of the call. In
11707 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11708 COMPOUND_EXPR will be an argument which must be evaluated.
11709 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11710 COMPOUND_EXPR in the chain will contain the tree for the simplified
11711 form of the builtin function call. */
11713 static tree
11714 fold_builtin_strspn (tree s1, tree s2)
11716 if (!validate_arg (s1, POINTER_TYPE)
11717 || !validate_arg (s2, POINTER_TYPE))
11718 return NULL_TREE;
11719 else
11721 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11723 /* If both arguments are constants, evaluate at compile-time. */
11724 if (p1 && p2)
11726 const size_t r = strspn (p1, p2);
11727 return size_int (r);
11730 /* If either argument is "", return NULL_TREE. */
11731 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11732 /* Evaluate and ignore both arguments in case either one has
11733 side-effects. */
11734 return omit_two_operands (size_type_node, size_zero_node,
11735 s1, s2);
11736 return NULL_TREE;
11740 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11741 to the call.
11743 Return NULL_TREE if no simplification was possible, otherwise return the
11744 simplified form of the call as a tree.
11746 The simplified form may be a constant or other expression which
11747 computes the same value, but in a more efficient manner (including
11748 calls to other builtin functions).
11750 The call may contain arguments which need to be evaluated, but
11751 which are not useful to determine the result of the call. In
11752 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11753 COMPOUND_EXPR will be an argument which must be evaluated.
11754 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11755 COMPOUND_EXPR in the chain will contain the tree for the simplified
11756 form of the builtin function call. */
11758 static tree
11759 fold_builtin_strcspn (tree s1, tree s2)
11761 if (!validate_arg (s1, POINTER_TYPE)
11762 || !validate_arg (s2, POINTER_TYPE))
11763 return NULL_TREE;
11764 else
11766 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11768 /* If both arguments are constants, evaluate at compile-time. */
11769 if (p1 && p2)
11771 const size_t r = strcspn (p1, p2);
11772 return size_int (r);
11775 /* If the first argument is "", return NULL_TREE. */
11776 if (p1 && *p1 == '\0')
11778 /* Evaluate and ignore argument s2 in case it has
11779 side-effects. */
11780 return omit_one_operand (size_type_node,
11781 size_zero_node, s2);
11784 /* If the second argument is "", return __builtin_strlen(s1). */
11785 if (p2 && *p2 == '\0')
11787 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11789 /* If the replacement _DECL isn't initialized, don't do the
11790 transformation. */
11791 if (!fn)
11792 return NULL_TREE;
11794 return build_call_expr (fn, 1, s1);
11796 return NULL_TREE;
11800 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11801 to the call. IGNORE is true if the value returned
11802 by the builtin will be ignored. UNLOCKED is true is true if this
11803 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11804 the known length of the string. Return NULL_TREE if no simplification
11805 was possible. */
11807 tree
11808 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11810 /* If we're using an unlocked function, assume the other unlocked
11811 functions exist explicitly. */
11812 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11813 : implicit_built_in_decls[BUILT_IN_FPUTC];
11814 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11815 : implicit_built_in_decls[BUILT_IN_FWRITE];
11817 /* If the return value is used, don't do the transformation. */
11818 if (!ignore)
11819 return NULL_TREE;
11821 /* Verify the arguments in the original call. */
11822 if (!validate_arg (arg0, POINTER_TYPE)
11823 || !validate_arg (arg1, POINTER_TYPE))
11824 return NULL_TREE;
11826 if (! len)
11827 len = c_strlen (arg0, 0);
11829 /* Get the length of the string passed to fputs. If the length
11830 can't be determined, punt. */
11831 if (!len
11832 || TREE_CODE (len) != INTEGER_CST)
11833 return NULL_TREE;
11835 switch (compare_tree_int (len, 1))
11837 case -1: /* length is 0, delete the call entirely . */
11838 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11840 case 0: /* length is 1, call fputc. */
11842 const char *p = c_getstr (arg0);
11844 if (p != NULL)
11846 if (fn_fputc)
11847 return build_call_expr (fn_fputc, 2,
11848 build_int_cst (NULL_TREE, p[0]), arg1);
11849 else
11850 return NULL_TREE;
11853 /* FALLTHROUGH */
11854 case 1: /* length is greater than 1, call fwrite. */
11856 /* If optimizing for size keep fputs. */
11857 if (optimize_function_for_size_p (cfun))
11858 return NULL_TREE;
11859 /* New argument list transforming fputs(string, stream) to
11860 fwrite(string, 1, len, stream). */
11861 if (fn_fwrite)
11862 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11863 else
11864 return NULL_TREE;
11866 default:
11867 gcc_unreachable ();
11869 return NULL_TREE;
11872 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11873 produced. False otherwise. This is done so that we don't output the error
11874 or warning twice or three times. */
11876 bool
11877 fold_builtin_next_arg (tree exp, bool va_start_p)
11879 tree fntype = TREE_TYPE (current_function_decl);
11880 int nargs = call_expr_nargs (exp);
11881 tree arg;
11883 if (TYPE_ARG_TYPES (fntype) == 0
11884 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11885 == void_type_node))
11887 error ("%<va_start%> used in function with fixed args");
11888 return true;
11891 if (va_start_p)
11893 if (va_start_p && (nargs != 2))
11895 error ("wrong number of arguments to function %<va_start%>");
11896 return true;
11898 arg = CALL_EXPR_ARG (exp, 1);
11900 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11901 when we checked the arguments and if needed issued a warning. */
11902 else
11904 if (nargs == 0)
11906 /* Evidently an out of date version of <stdarg.h>; can't validate
11907 va_start's second argument, but can still work as intended. */
11908 warning (0, "%<__builtin_next_arg%> called without an argument");
11909 return true;
11911 else if (nargs > 1)
11913 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11914 return true;
11916 arg = CALL_EXPR_ARG (exp, 0);
11919 if (TREE_CODE (arg) == SSA_NAME)
11920 arg = SSA_NAME_VAR (arg);
11922 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11923 or __builtin_next_arg (0) the first time we see it, after checking
11924 the arguments and if needed issuing a warning. */
11925 if (!integer_zerop (arg))
11927 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11929 /* Strip off all nops for the sake of the comparison. This
11930 is not quite the same as STRIP_NOPS. It does more.
11931 We must also strip off INDIRECT_EXPR for C++ reference
11932 parameters. */
11933 while (CONVERT_EXPR_P (arg)
11934 || TREE_CODE (arg) == INDIRECT_REF)
11935 arg = TREE_OPERAND (arg, 0);
11936 if (arg != last_parm)
11938 /* FIXME: Sometimes with the tree optimizers we can get the
11939 not the last argument even though the user used the last
11940 argument. We just warn and set the arg to be the last
11941 argument so that we will get wrong-code because of
11942 it. */
11943 warning (0, "second parameter of %<va_start%> not last named argument");
11946 /* Undefined by C99 7.15.1.4p4 (va_start):
11947 "If the parameter parmN is declared with the register storage
11948 class, with a function or array type, or with a type that is
11949 not compatible with the type that results after application of
11950 the default argument promotions, the behavior is undefined."
11952 else if (DECL_REGISTER (arg))
11953 warning (0, "undefined behaviour when second parameter of "
11954 "%<va_start%> is declared with %<register%> storage");
11956 /* We want to verify the second parameter just once before the tree
11957 optimizers are run and then avoid keeping it in the tree,
11958 as otherwise we could warn even for correct code like:
11959 void foo (int i, ...)
11960 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11961 if (va_start_p)
11962 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11963 else
11964 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11966 return false;
11970 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11971 ORIG may be null if this is a 2-argument call. We don't attempt to
11972 simplify calls with more than 3 arguments.
11974 Return NULL_TREE if no simplification was possible, otherwise return the
11975 simplified form of the call as a tree. If IGNORED is true, it means that
11976 the caller does not use the returned value of the function. */
11978 static tree
11979 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11981 tree call, retval;
11982 const char *fmt_str = NULL;
11984 /* Verify the required arguments in the original call. We deal with two
11985 types of sprintf() calls: 'sprintf (str, fmt)' and
11986 'sprintf (dest, "%s", orig)'. */
11987 if (!validate_arg (dest, POINTER_TYPE)
11988 || !validate_arg (fmt, POINTER_TYPE))
11989 return NULL_TREE;
11990 if (orig && !validate_arg (orig, POINTER_TYPE))
11991 return NULL_TREE;
11993 /* Check whether the format is a literal string constant. */
11994 fmt_str = c_getstr (fmt);
11995 if (fmt_str == NULL)
11996 return NULL_TREE;
11998 call = NULL_TREE;
11999 retval = NULL_TREE;
12001 if (!init_target_chars ())
12002 return NULL_TREE;
12004 /* If the format doesn't contain % args or %%, use strcpy. */
12005 if (strchr (fmt_str, target_percent) == NULL)
12007 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12009 if (!fn)
12010 return NULL_TREE;
12012 /* Don't optimize sprintf (buf, "abc", ptr++). */
12013 if (orig)
12014 return NULL_TREE;
12016 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12017 'format' is known to contain no % formats. */
12018 call = build_call_expr (fn, 2, dest, fmt);
12019 if (!ignored)
12020 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12023 /* If the format is "%s", use strcpy if the result isn't used. */
12024 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12026 tree fn;
12027 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12029 if (!fn)
12030 return NULL_TREE;
12032 /* Don't crash on sprintf (str1, "%s"). */
12033 if (!orig)
12034 return NULL_TREE;
12036 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12037 if (!ignored)
12039 retval = c_strlen (orig, 1);
12040 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12041 return NULL_TREE;
12043 call = build_call_expr (fn, 2, dest, orig);
12046 if (call && retval)
12048 retval = fold_convert
12049 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12050 retval);
12051 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12053 else
12054 return call;
12057 /* Expand a call EXP to __builtin_object_size. */
12060 expand_builtin_object_size (tree exp)
12062 tree ost;
12063 int object_size_type;
12064 tree fndecl = get_callee_fndecl (exp);
12066 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12068 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12069 exp, fndecl);
12070 expand_builtin_trap ();
12071 return const0_rtx;
12074 ost = CALL_EXPR_ARG (exp, 1);
12075 STRIP_NOPS (ost);
12077 if (TREE_CODE (ost) != INTEGER_CST
12078 || tree_int_cst_sgn (ost) < 0
12079 || compare_tree_int (ost, 3) > 0)
12081 error ("%Klast argument of %D is not integer constant between 0 and 3",
12082 exp, fndecl);
12083 expand_builtin_trap ();
12084 return const0_rtx;
12087 object_size_type = tree_low_cst (ost, 0);
12089 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12092 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12093 FCODE is the BUILT_IN_* to use.
12094 Return NULL_RTX if we failed; the caller should emit a normal call,
12095 otherwise try to get the result in TARGET, if convenient (and in
12096 mode MODE if that's convenient). */
12098 static rtx
12099 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12100 enum built_in_function fcode)
12102 tree dest, src, len, size;
12104 if (!validate_arglist (exp,
12105 POINTER_TYPE,
12106 fcode == BUILT_IN_MEMSET_CHK
12107 ? INTEGER_TYPE : POINTER_TYPE,
12108 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12109 return NULL_RTX;
12111 dest = CALL_EXPR_ARG (exp, 0);
12112 src = CALL_EXPR_ARG (exp, 1);
12113 len = CALL_EXPR_ARG (exp, 2);
12114 size = CALL_EXPR_ARG (exp, 3);
12116 if (! host_integerp (size, 1))
12117 return NULL_RTX;
12119 if (host_integerp (len, 1) || integer_all_onesp (size))
12121 tree fn;
12123 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12125 warning_at (tree_nonartificial_location (exp),
12126 0, "%Kcall to %D will always overflow destination buffer",
12127 exp, get_callee_fndecl (exp));
12128 return NULL_RTX;
12131 fn = NULL_TREE;
12132 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12133 mem{cpy,pcpy,move,set} is available. */
12134 switch (fcode)
12136 case BUILT_IN_MEMCPY_CHK:
12137 fn = built_in_decls[BUILT_IN_MEMCPY];
12138 break;
12139 case BUILT_IN_MEMPCPY_CHK:
12140 fn = built_in_decls[BUILT_IN_MEMPCPY];
12141 break;
12142 case BUILT_IN_MEMMOVE_CHK:
12143 fn = built_in_decls[BUILT_IN_MEMMOVE];
12144 break;
12145 case BUILT_IN_MEMSET_CHK:
12146 fn = built_in_decls[BUILT_IN_MEMSET];
12147 break;
12148 default:
12149 break;
12152 if (! fn)
12153 return NULL_RTX;
12155 fn = build_call_expr (fn, 3, dest, src, len);
12156 STRIP_TYPE_NOPS (fn);
12157 while (TREE_CODE (fn) == COMPOUND_EXPR)
12159 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12160 EXPAND_NORMAL);
12161 fn = TREE_OPERAND (fn, 1);
12163 if (TREE_CODE (fn) == CALL_EXPR)
12164 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12165 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12167 else if (fcode == BUILT_IN_MEMSET_CHK)
12168 return NULL_RTX;
12169 else
12171 unsigned int dest_align
12172 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12174 /* If DEST is not a pointer type, call the normal function. */
12175 if (dest_align == 0)
12176 return NULL_RTX;
12178 /* If SRC and DEST are the same (and not volatile), do nothing. */
12179 if (operand_equal_p (src, dest, 0))
12181 tree expr;
12183 if (fcode != BUILT_IN_MEMPCPY_CHK)
12185 /* Evaluate and ignore LEN in case it has side-effects. */
12186 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12187 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12190 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12191 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12194 /* __memmove_chk special case. */
12195 if (fcode == BUILT_IN_MEMMOVE_CHK)
12197 unsigned int src_align
12198 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12200 if (src_align == 0)
12201 return NULL_RTX;
12203 /* If src is categorized for a readonly section we can use
12204 normal __memcpy_chk. */
12205 if (readonly_data_expr (src))
12207 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12208 if (!fn)
12209 return NULL_RTX;
12210 fn = build_call_expr (fn, 4, dest, src, len, size);
12211 STRIP_TYPE_NOPS (fn);
12212 while (TREE_CODE (fn) == COMPOUND_EXPR)
12214 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12215 EXPAND_NORMAL);
12216 fn = TREE_OPERAND (fn, 1);
12218 if (TREE_CODE (fn) == CALL_EXPR)
12219 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12220 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12223 return NULL_RTX;
12227 /* Emit warning if a buffer overflow is detected at compile time. */
12229 static void
12230 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12232 int is_strlen = 0;
12233 tree len, size;
12234 location_t loc = tree_nonartificial_location (exp);
12236 switch (fcode)
12238 case BUILT_IN_STRCPY_CHK:
12239 case BUILT_IN_STPCPY_CHK:
12240 /* For __strcat_chk the warning will be emitted only if overflowing
12241 by at least strlen (dest) + 1 bytes. */
12242 case BUILT_IN_STRCAT_CHK:
12243 len = CALL_EXPR_ARG (exp, 1);
12244 size = CALL_EXPR_ARG (exp, 2);
12245 is_strlen = 1;
12246 break;
12247 case BUILT_IN_STRNCAT_CHK:
12248 case BUILT_IN_STRNCPY_CHK:
12249 len = CALL_EXPR_ARG (exp, 2);
12250 size = CALL_EXPR_ARG (exp, 3);
12251 break;
12252 case BUILT_IN_SNPRINTF_CHK:
12253 case BUILT_IN_VSNPRINTF_CHK:
12254 len = CALL_EXPR_ARG (exp, 1);
12255 size = CALL_EXPR_ARG (exp, 3);
12256 break;
12257 default:
12258 gcc_unreachable ();
12261 if (!len || !size)
12262 return;
12264 if (! host_integerp (size, 1) || integer_all_onesp (size))
12265 return;
12267 if (is_strlen)
12269 len = c_strlen (len, 1);
12270 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12271 return;
12273 else if (fcode == BUILT_IN_STRNCAT_CHK)
12275 tree src = CALL_EXPR_ARG (exp, 1);
12276 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12277 return;
12278 src = c_strlen (src, 1);
12279 if (! src || ! host_integerp (src, 1))
12281 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12282 exp, get_callee_fndecl (exp));
12283 return;
12285 else if (tree_int_cst_lt (src, size))
12286 return;
12288 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12289 return;
12291 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12292 exp, get_callee_fndecl (exp));
12295 /* Emit warning if a buffer overflow is detected at compile time
12296 in __sprintf_chk/__vsprintf_chk calls. */
12298 static void
12299 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12301 tree dest, size, len, fmt, flag;
12302 const char *fmt_str;
12303 int nargs = call_expr_nargs (exp);
12305 /* Verify the required arguments in the original call. */
12307 if (nargs < 4)
12308 return;
12309 dest = CALL_EXPR_ARG (exp, 0);
12310 flag = CALL_EXPR_ARG (exp, 1);
12311 size = CALL_EXPR_ARG (exp, 2);
12312 fmt = CALL_EXPR_ARG (exp, 3);
12314 if (! host_integerp (size, 1) || integer_all_onesp (size))
12315 return;
12317 /* Check whether the format is a literal string constant. */
12318 fmt_str = c_getstr (fmt);
12319 if (fmt_str == NULL)
12320 return;
12322 if (!init_target_chars ())
12323 return;
12325 /* If the format doesn't contain % args or %%, we know its size. */
12326 if (strchr (fmt_str, target_percent) == 0)
12327 len = build_int_cstu (size_type_node, strlen (fmt_str));
12328 /* If the format is "%s" and first ... argument is a string literal,
12329 we know it too. */
12330 else if (fcode == BUILT_IN_SPRINTF_CHK
12331 && strcmp (fmt_str, target_percent_s) == 0)
12333 tree arg;
12335 if (nargs < 5)
12336 return;
12337 arg = CALL_EXPR_ARG (exp, 4);
12338 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12339 return;
12341 len = c_strlen (arg, 1);
12342 if (!len || ! host_integerp (len, 1))
12343 return;
12345 else
12346 return;
12348 if (! tree_int_cst_lt (len, size))
12349 warning_at (tree_nonartificial_location (exp),
12350 0, "%Kcall to %D will always overflow destination buffer",
12351 exp, get_callee_fndecl (exp));
12354 /* Emit warning if a free is called with address of a variable. */
12356 static void
12357 maybe_emit_free_warning (tree exp)
12359 tree arg = CALL_EXPR_ARG (exp, 0);
12361 STRIP_NOPS (arg);
12362 if (TREE_CODE (arg) != ADDR_EXPR)
12363 return;
12365 arg = get_base_address (TREE_OPERAND (arg, 0));
12366 if (arg == NULL || INDIRECT_REF_P (arg))
12367 return;
12369 if (SSA_VAR_P (arg))
12370 warning_at (tree_nonartificial_location (exp),
12371 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12372 else
12373 warning_at (tree_nonartificial_location (exp),
12374 0, "%Kattempt to free a non-heap object", exp);
12377 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12378 if possible. */
12380 tree
12381 fold_builtin_object_size (tree ptr, tree ost)
12383 tree ret = NULL_TREE;
12384 int object_size_type;
12386 if (!validate_arg (ptr, POINTER_TYPE)
12387 || !validate_arg (ost, INTEGER_TYPE))
12388 return NULL_TREE;
12390 STRIP_NOPS (ost);
12392 if (TREE_CODE (ost) != INTEGER_CST
12393 || tree_int_cst_sgn (ost) < 0
12394 || compare_tree_int (ost, 3) > 0)
12395 return NULL_TREE;
12397 object_size_type = tree_low_cst (ost, 0);
12399 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12400 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12401 and (size_t) 0 for types 2 and 3. */
12402 if (TREE_SIDE_EFFECTS (ptr))
12403 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12405 if (TREE_CODE (ptr) == ADDR_EXPR)
12406 ret = build_int_cstu (size_type_node,
12407 compute_builtin_object_size (ptr, object_size_type));
12409 else if (TREE_CODE (ptr) == SSA_NAME)
12411 unsigned HOST_WIDE_INT bytes;
12413 /* If object size is not known yet, delay folding until
12414 later. Maybe subsequent passes will help determining
12415 it. */
12416 bytes = compute_builtin_object_size (ptr, object_size_type);
12417 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12418 ? -1 : 0))
12419 ret = build_int_cstu (size_type_node, bytes);
12422 if (ret)
12424 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12425 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12426 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12427 ret = NULL_TREE;
12430 return ret;
12433 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12434 DEST, SRC, LEN, and SIZE are the arguments to the call.
12435 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12436 code of the builtin. If MAXLEN is not NULL, it is maximum length
12437 passed as third argument. */
12439 tree
12440 fold_builtin_memory_chk (tree fndecl,
12441 tree dest, tree src, tree len, tree size,
12442 tree maxlen, bool ignore,
12443 enum built_in_function fcode)
12445 tree fn;
12447 if (!validate_arg (dest, POINTER_TYPE)
12448 || !validate_arg (src,
12449 (fcode == BUILT_IN_MEMSET_CHK
12450 ? INTEGER_TYPE : POINTER_TYPE))
12451 || !validate_arg (len, INTEGER_TYPE)
12452 || !validate_arg (size, INTEGER_TYPE))
12453 return NULL_TREE;
12455 /* If SRC and DEST are the same (and not volatile), return DEST
12456 (resp. DEST+LEN for __mempcpy_chk). */
12457 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12459 if (fcode != BUILT_IN_MEMPCPY_CHK)
12460 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12461 else
12463 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12464 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12468 if (! host_integerp (size, 1))
12469 return NULL_TREE;
12471 if (! integer_all_onesp (size))
12473 if (! host_integerp (len, 1))
12475 /* If LEN is not constant, try MAXLEN too.
12476 For MAXLEN only allow optimizing into non-_ocs function
12477 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12478 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12480 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12482 /* (void) __mempcpy_chk () can be optimized into
12483 (void) __memcpy_chk (). */
12484 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12485 if (!fn)
12486 return NULL_TREE;
12488 return build_call_expr (fn, 4, dest, src, len, size);
12490 return NULL_TREE;
12493 else
12494 maxlen = len;
12496 if (tree_int_cst_lt (size, maxlen))
12497 return NULL_TREE;
12500 fn = NULL_TREE;
12501 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12502 mem{cpy,pcpy,move,set} is available. */
12503 switch (fcode)
12505 case BUILT_IN_MEMCPY_CHK:
12506 fn = built_in_decls[BUILT_IN_MEMCPY];
12507 break;
12508 case BUILT_IN_MEMPCPY_CHK:
12509 fn = built_in_decls[BUILT_IN_MEMPCPY];
12510 break;
12511 case BUILT_IN_MEMMOVE_CHK:
12512 fn = built_in_decls[BUILT_IN_MEMMOVE];
12513 break;
12514 case BUILT_IN_MEMSET_CHK:
12515 fn = built_in_decls[BUILT_IN_MEMSET];
12516 break;
12517 default:
12518 break;
12521 if (!fn)
12522 return NULL_TREE;
12524 return build_call_expr (fn, 3, dest, src, len);
12527 /* Fold a call to the __st[rp]cpy_chk builtin.
12528 DEST, SRC, and SIZE are the arguments to the call.
12529 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12530 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12531 strings passed as second argument. */
12533 tree
12534 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12535 tree maxlen, bool ignore,
12536 enum built_in_function fcode)
12538 tree len, fn;
12540 if (!validate_arg (dest, POINTER_TYPE)
12541 || !validate_arg (src, POINTER_TYPE)
12542 || !validate_arg (size, INTEGER_TYPE))
12543 return NULL_TREE;
12545 /* If SRC and DEST are the same (and not volatile), return DEST. */
12546 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12547 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12549 if (! host_integerp (size, 1))
12550 return NULL_TREE;
12552 if (! integer_all_onesp (size))
12554 len = c_strlen (src, 1);
12555 if (! len || ! host_integerp (len, 1))
12557 /* If LEN is not constant, try MAXLEN too.
12558 For MAXLEN only allow optimizing into non-_ocs function
12559 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12560 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12562 if (fcode == BUILT_IN_STPCPY_CHK)
12564 if (! ignore)
12565 return NULL_TREE;
12567 /* If return value of __stpcpy_chk is ignored,
12568 optimize into __strcpy_chk. */
12569 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12570 if (!fn)
12571 return NULL_TREE;
12573 return build_call_expr (fn, 3, dest, src, size);
12576 if (! len || TREE_SIDE_EFFECTS (len))
12577 return NULL_TREE;
12579 /* If c_strlen returned something, but not a constant,
12580 transform __strcpy_chk into __memcpy_chk. */
12581 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12582 if (!fn)
12583 return NULL_TREE;
12585 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12586 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12587 build_call_expr (fn, 4,
12588 dest, src, len, size));
12591 else
12592 maxlen = len;
12594 if (! tree_int_cst_lt (maxlen, size))
12595 return NULL_TREE;
12598 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12599 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12600 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12601 if (!fn)
12602 return NULL_TREE;
12604 return build_call_expr (fn, 2, dest, src);
12607 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12608 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12609 length passed as third argument. */
12611 tree
12612 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12613 tree maxlen)
12615 tree fn;
12617 if (!validate_arg (dest, POINTER_TYPE)
12618 || !validate_arg (src, POINTER_TYPE)
12619 || !validate_arg (len, INTEGER_TYPE)
12620 || !validate_arg (size, INTEGER_TYPE))
12621 return NULL_TREE;
12623 if (! host_integerp (size, 1))
12624 return NULL_TREE;
12626 if (! integer_all_onesp (size))
12628 if (! host_integerp (len, 1))
12630 /* If LEN is not constant, try MAXLEN too.
12631 For MAXLEN only allow optimizing into non-_ocs function
12632 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12633 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12634 return NULL_TREE;
12636 else
12637 maxlen = len;
12639 if (tree_int_cst_lt (size, maxlen))
12640 return NULL_TREE;
12643 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12644 fn = built_in_decls[BUILT_IN_STRNCPY];
12645 if (!fn)
12646 return NULL_TREE;
12648 return build_call_expr (fn, 3, dest, src, len);
12651 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12652 are the arguments to the call. */
12654 static tree
12655 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12657 tree fn;
12658 const char *p;
12660 if (!validate_arg (dest, POINTER_TYPE)
12661 || !validate_arg (src, POINTER_TYPE)
12662 || !validate_arg (size, INTEGER_TYPE))
12663 return NULL_TREE;
12665 p = c_getstr (src);
12666 /* If the SRC parameter is "", return DEST. */
12667 if (p && *p == '\0')
12668 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12670 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12671 return NULL_TREE;
12673 /* If __builtin_strcat_chk is used, assume strcat is available. */
12674 fn = built_in_decls[BUILT_IN_STRCAT];
12675 if (!fn)
12676 return NULL_TREE;
12678 return build_call_expr (fn, 2, dest, src);
12681 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12682 LEN, and SIZE. */
12684 static tree
12685 fold_builtin_strncat_chk (tree fndecl,
12686 tree dest, tree src, tree len, tree size)
12688 tree fn;
12689 const char *p;
12691 if (!validate_arg (dest, POINTER_TYPE)
12692 || !validate_arg (src, POINTER_TYPE)
12693 || !validate_arg (size, INTEGER_TYPE)
12694 || !validate_arg (size, INTEGER_TYPE))
12695 return NULL_TREE;
12697 p = c_getstr (src);
12698 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12699 if (p && *p == '\0')
12700 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12701 else if (integer_zerop (len))
12702 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12704 if (! host_integerp (size, 1))
12705 return NULL_TREE;
12707 if (! integer_all_onesp (size))
12709 tree src_len = c_strlen (src, 1);
12710 if (src_len
12711 && host_integerp (src_len, 1)
12712 && host_integerp (len, 1)
12713 && ! tree_int_cst_lt (len, src_len))
12715 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12716 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12717 if (!fn)
12718 return NULL_TREE;
12720 return build_call_expr (fn, 3, dest, src, size);
12722 return NULL_TREE;
12725 /* If __builtin_strncat_chk is used, assume strncat is available. */
12726 fn = built_in_decls[BUILT_IN_STRNCAT];
12727 if (!fn)
12728 return NULL_TREE;
12730 return build_call_expr (fn, 3, dest, src, len);
12733 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12734 a normal call should be emitted rather than expanding the function
12735 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12737 static tree
12738 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12740 tree dest, size, len, fn, fmt, flag;
12741 const char *fmt_str;
12742 int nargs = call_expr_nargs (exp);
12744 /* Verify the required arguments in the original call. */
12745 if (nargs < 4)
12746 return NULL_TREE;
12747 dest = CALL_EXPR_ARG (exp, 0);
12748 if (!validate_arg (dest, POINTER_TYPE))
12749 return NULL_TREE;
12750 flag = CALL_EXPR_ARG (exp, 1);
12751 if (!validate_arg (flag, INTEGER_TYPE))
12752 return NULL_TREE;
12753 size = CALL_EXPR_ARG (exp, 2);
12754 if (!validate_arg (size, INTEGER_TYPE))
12755 return NULL_TREE;
12756 fmt = CALL_EXPR_ARG (exp, 3);
12757 if (!validate_arg (fmt, POINTER_TYPE))
12758 return NULL_TREE;
12760 if (! host_integerp (size, 1))
12761 return NULL_TREE;
12763 len = NULL_TREE;
12765 if (!init_target_chars ())
12766 return NULL_TREE;
12768 /* Check whether the format is a literal string constant. */
12769 fmt_str = c_getstr (fmt);
12770 if (fmt_str != NULL)
12772 /* If the format doesn't contain % args or %%, we know the size. */
12773 if (strchr (fmt_str, target_percent) == 0)
12775 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12776 len = build_int_cstu (size_type_node, strlen (fmt_str));
12778 /* If the format is "%s" and first ... argument is a string literal,
12779 we know the size too. */
12780 else if (fcode == BUILT_IN_SPRINTF_CHK
12781 && strcmp (fmt_str, target_percent_s) == 0)
12783 tree arg;
12785 if (nargs == 5)
12787 arg = CALL_EXPR_ARG (exp, 4);
12788 if (validate_arg (arg, POINTER_TYPE))
12790 len = c_strlen (arg, 1);
12791 if (! len || ! host_integerp (len, 1))
12792 len = NULL_TREE;
12798 if (! integer_all_onesp (size))
12800 if (! len || ! tree_int_cst_lt (len, size))
12801 return NULL_TREE;
12804 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12805 or if format doesn't contain % chars or is "%s". */
12806 if (! integer_zerop (flag))
12808 if (fmt_str == NULL)
12809 return NULL_TREE;
12810 if (strchr (fmt_str, target_percent) != NULL
12811 && strcmp (fmt_str, target_percent_s))
12812 return NULL_TREE;
12815 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12816 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12817 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12818 if (!fn)
12819 return NULL_TREE;
12821 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12824 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12825 a normal call should be emitted rather than expanding the function
12826 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12827 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12828 passed as second argument. */
12830 tree
12831 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12832 enum built_in_function fcode)
12834 tree dest, size, len, fn, fmt, flag;
12835 const char *fmt_str;
12837 /* Verify the required arguments in the original call. */
12838 if (call_expr_nargs (exp) < 5)
12839 return NULL_TREE;
12840 dest = CALL_EXPR_ARG (exp, 0);
12841 if (!validate_arg (dest, POINTER_TYPE))
12842 return NULL_TREE;
12843 len = CALL_EXPR_ARG (exp, 1);
12844 if (!validate_arg (len, INTEGER_TYPE))
12845 return NULL_TREE;
12846 flag = CALL_EXPR_ARG (exp, 2);
12847 if (!validate_arg (flag, INTEGER_TYPE))
12848 return NULL_TREE;
12849 size = CALL_EXPR_ARG (exp, 3);
12850 if (!validate_arg (size, INTEGER_TYPE))
12851 return NULL_TREE;
12852 fmt = CALL_EXPR_ARG (exp, 4);
12853 if (!validate_arg (fmt, POINTER_TYPE))
12854 return NULL_TREE;
12856 if (! host_integerp (size, 1))
12857 return NULL_TREE;
12859 if (! integer_all_onesp (size))
12861 if (! host_integerp (len, 1))
12863 /* If LEN is not constant, try MAXLEN too.
12864 For MAXLEN only allow optimizing into non-_ocs function
12865 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12866 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12867 return NULL_TREE;
12869 else
12870 maxlen = len;
12872 if (tree_int_cst_lt (size, maxlen))
12873 return NULL_TREE;
12876 if (!init_target_chars ())
12877 return NULL_TREE;
12879 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12880 or if format doesn't contain % chars or is "%s". */
12881 if (! integer_zerop (flag))
12883 fmt_str = c_getstr (fmt);
12884 if (fmt_str == NULL)
12885 return NULL_TREE;
12886 if (strchr (fmt_str, target_percent) != NULL
12887 && strcmp (fmt_str, target_percent_s))
12888 return NULL_TREE;
12891 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12892 available. */
12893 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12894 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12895 if (!fn)
12896 return NULL_TREE;
12898 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12901 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12902 FMT and ARG are the arguments to the call; we don't fold cases with
12903 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12905 Return NULL_TREE if no simplification was possible, otherwise return the
12906 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12907 code of the function to be simplified. */
12909 static tree
12910 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12911 enum built_in_function fcode)
12913 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12914 const char *fmt_str = NULL;
12916 /* If the return value is used, don't do the transformation. */
12917 if (! ignore)
12918 return NULL_TREE;
12920 /* Verify the required arguments in the original call. */
12921 if (!validate_arg (fmt, POINTER_TYPE))
12922 return NULL_TREE;
12924 /* Check whether the format is a literal string constant. */
12925 fmt_str = c_getstr (fmt);
12926 if (fmt_str == NULL)
12927 return NULL_TREE;
12929 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12931 /* If we're using an unlocked function, assume the other
12932 unlocked functions exist explicitly. */
12933 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12934 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12936 else
12938 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12939 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12942 if (!init_target_chars ())
12943 return NULL_TREE;
12945 if (strcmp (fmt_str, target_percent_s) == 0
12946 || strchr (fmt_str, target_percent) == NULL)
12948 const char *str;
12950 if (strcmp (fmt_str, target_percent_s) == 0)
12952 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12953 return NULL_TREE;
12955 if (!arg || !validate_arg (arg, POINTER_TYPE))
12956 return NULL_TREE;
12958 str = c_getstr (arg);
12959 if (str == NULL)
12960 return NULL_TREE;
12962 else
12964 /* The format specifier doesn't contain any '%' characters. */
12965 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12966 && arg)
12967 return NULL_TREE;
12968 str = fmt_str;
12971 /* If the string was "", printf does nothing. */
12972 if (str[0] == '\0')
12973 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12975 /* If the string has length of 1, call putchar. */
12976 if (str[1] == '\0')
12978 /* Given printf("c"), (where c is any one character,)
12979 convert "c"[0] to an int and pass that to the replacement
12980 function. */
12981 newarg = build_int_cst (NULL_TREE, str[0]);
12982 if (fn_putchar)
12983 call = build_call_expr (fn_putchar, 1, newarg);
12985 else
12987 /* If the string was "string\n", call puts("string"). */
12988 size_t len = strlen (str);
12989 if ((unsigned char)str[len - 1] == target_newline)
12991 /* Create a NUL-terminated string that's one char shorter
12992 than the original, stripping off the trailing '\n'. */
12993 char *newstr = XALLOCAVEC (char, len);
12994 memcpy (newstr, str, len - 1);
12995 newstr[len - 1] = 0;
12997 newarg = build_string_literal (len, newstr);
12998 if (fn_puts)
12999 call = build_call_expr (fn_puts, 1, newarg);
13001 else
13002 /* We'd like to arrange to call fputs(string,stdout) here,
13003 but we need stdout and don't have a way to get it yet. */
13004 return NULL_TREE;
13008 /* The other optimizations can be done only on the non-va_list variants. */
13009 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13010 return NULL_TREE;
13012 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13013 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13015 if (!arg || !validate_arg (arg, POINTER_TYPE))
13016 return NULL_TREE;
13017 if (fn_puts)
13018 call = build_call_expr (fn_puts, 1, arg);
13021 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13022 else if (strcmp (fmt_str, target_percent_c) == 0)
13024 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13025 return NULL_TREE;
13026 if (fn_putchar)
13027 call = build_call_expr (fn_putchar, 1, arg);
13030 if (!call)
13031 return NULL_TREE;
13033 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13036 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13037 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13038 more than 3 arguments, and ARG may be null in the 2-argument case.
13040 Return NULL_TREE if no simplification was possible, otherwise return the
13041 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13042 code of the function to be simplified. */
13044 static tree
13045 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
13046 enum built_in_function fcode)
13048 tree fn_fputc, fn_fputs, call = NULL_TREE;
13049 const char *fmt_str = NULL;
13051 /* If the return value is used, don't do the transformation. */
13052 if (! ignore)
13053 return NULL_TREE;
13055 /* Verify the required arguments in the original call. */
13056 if (!validate_arg (fp, POINTER_TYPE))
13057 return NULL_TREE;
13058 if (!validate_arg (fmt, POINTER_TYPE))
13059 return NULL_TREE;
13061 /* Check whether the format is a literal string constant. */
13062 fmt_str = c_getstr (fmt);
13063 if (fmt_str == NULL)
13064 return NULL_TREE;
13066 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13068 /* If we're using an unlocked function, assume the other
13069 unlocked functions exist explicitly. */
13070 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13071 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13073 else
13075 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13076 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13079 if (!init_target_chars ())
13080 return NULL_TREE;
13082 /* If the format doesn't contain % args or %%, use strcpy. */
13083 if (strchr (fmt_str, target_percent) == NULL)
13085 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13086 && arg)
13087 return NULL_TREE;
13089 /* If the format specifier was "", fprintf does nothing. */
13090 if (fmt_str[0] == '\0')
13092 /* If FP has side-effects, just wait until gimplification is
13093 done. */
13094 if (TREE_SIDE_EFFECTS (fp))
13095 return NULL_TREE;
13097 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13100 /* When "string" doesn't contain %, replace all cases of
13101 fprintf (fp, string) with fputs (string, fp). The fputs
13102 builtin will take care of special cases like length == 1. */
13103 if (fn_fputs)
13104 call = build_call_expr (fn_fputs, 2, fmt, fp);
13107 /* The other optimizations can be done only on the non-va_list variants. */
13108 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13109 return NULL_TREE;
13111 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13112 else if (strcmp (fmt_str, target_percent_s) == 0)
13114 if (!arg || !validate_arg (arg, POINTER_TYPE))
13115 return NULL_TREE;
13116 if (fn_fputs)
13117 call = build_call_expr (fn_fputs, 2, arg, fp);
13120 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13121 else if (strcmp (fmt_str, target_percent_c) == 0)
13123 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13124 return NULL_TREE;
13125 if (fn_fputc)
13126 call = build_call_expr (fn_fputc, 2, arg, fp);
13129 if (!call)
13130 return NULL_TREE;
13131 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13134 /* Initialize format string characters in the target charset. */
13136 static bool
13137 init_target_chars (void)
13139 static bool init;
13140 if (!init)
13142 target_newline = lang_hooks.to_target_charset ('\n');
13143 target_percent = lang_hooks.to_target_charset ('%');
13144 target_c = lang_hooks.to_target_charset ('c');
13145 target_s = lang_hooks.to_target_charset ('s');
13146 if (target_newline == 0 || target_percent == 0 || target_c == 0
13147 || target_s == 0)
13148 return false;
13150 target_percent_c[0] = target_percent;
13151 target_percent_c[1] = target_c;
13152 target_percent_c[2] = '\0';
13154 target_percent_s[0] = target_percent;
13155 target_percent_s[1] = target_s;
13156 target_percent_s[2] = '\0';
13158 target_percent_s_newline[0] = target_percent;
13159 target_percent_s_newline[1] = target_s;
13160 target_percent_s_newline[2] = target_newline;
13161 target_percent_s_newline[3] = '\0';
13163 init = true;
13165 return true;
13168 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13169 and no overflow/underflow occurred. INEXACT is true if M was not
13170 exactly calculated. TYPE is the tree type for the result. This
13171 function assumes that you cleared the MPFR flags and then
13172 calculated M to see if anything subsequently set a flag prior to
13173 entering this function. Return NULL_TREE if any checks fail. */
13175 static tree
13176 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13178 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13179 overflow/underflow occurred. If -frounding-math, proceed iff the
13180 result of calling FUNC was exact. */
13181 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13182 && (!flag_rounding_math || !inexact))
13184 REAL_VALUE_TYPE rr;
13186 real_from_mpfr (&rr, m, type, GMP_RNDN);
13187 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13188 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13189 but the mpft_t is not, then we underflowed in the
13190 conversion. */
13191 if (real_isfinite (&rr)
13192 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13194 REAL_VALUE_TYPE rmode;
13196 real_convert (&rmode, TYPE_MODE (type), &rr);
13197 /* Proceed iff the specified mode can hold the value. */
13198 if (real_identical (&rmode, &rr))
13199 return build_real (type, rmode);
13202 return NULL_TREE;
13205 #ifdef HAVE_mpc
13206 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13207 number and no overflow/underflow occurred. INEXACT is true if M
13208 was not exactly calculated. TYPE is the tree type for the result.
13209 This function assumes that you cleared the MPFR flags and then
13210 calculated M to see if anything subsequently set a flag prior to
13211 entering this function. Return NULL_TREE if any checks fail. */
13213 static tree
13214 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13216 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13217 overflow/underflow occurred. If -frounding-math, proceed iff the
13218 result of calling FUNC was exact. */
13219 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13220 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13221 && (!flag_rounding_math || !inexact))
13223 REAL_VALUE_TYPE re, im;
13225 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13226 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13227 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13228 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13229 but the mpft_t is not, then we underflowed in the
13230 conversion. */
13231 if (real_isfinite (&re) && real_isfinite (&im)
13232 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13233 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13235 REAL_VALUE_TYPE re_mode, im_mode;
13237 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13238 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13239 /* Proceed iff the specified mode can hold the value. */
13240 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13241 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13242 build_real (TREE_TYPE (type), im_mode));
13245 return NULL_TREE;
13247 #endif /* HAVE_mpc */
13249 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13250 FUNC on it and return the resulting value as a tree with type TYPE.
13251 If MIN and/or MAX are not NULL, then the supplied ARG must be
13252 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13253 acceptable values, otherwise they are not. The mpfr precision is
13254 set to the precision of TYPE. We assume that function FUNC returns
13255 zero if the result could be calculated exactly within the requested
13256 precision. */
13258 static tree
13259 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13260 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13261 bool inclusive)
13263 tree result = NULL_TREE;
13265 STRIP_NOPS (arg);
13267 /* To proceed, MPFR must exactly represent the target floating point
13268 format, which only happens when the target base equals two. */
13269 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13270 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13272 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13274 if (real_isfinite (ra)
13275 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13276 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13278 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13279 const int prec = fmt->p;
13280 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13281 int inexact;
13282 mpfr_t m;
13284 mpfr_init2 (m, prec);
13285 mpfr_from_real (m, ra, GMP_RNDN);
13286 mpfr_clear_flags ();
13287 inexact = func (m, m, rnd);
13288 result = do_mpfr_ckconv (m, type, inexact);
13289 mpfr_clear (m);
13293 return result;
13296 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13297 FUNC on it and return the resulting value as a tree with type TYPE.
13298 The mpfr precision is set to the precision of TYPE. We assume that
13299 function FUNC returns zero if the result could be calculated
13300 exactly within the requested precision. */
13302 static tree
13303 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13304 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13306 tree result = NULL_TREE;
13308 STRIP_NOPS (arg1);
13309 STRIP_NOPS (arg2);
13311 /* To proceed, MPFR must exactly represent the target floating point
13312 format, which only happens when the target base equals two. */
13313 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13314 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13315 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13317 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13318 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13320 if (real_isfinite (ra1) && real_isfinite (ra2))
13322 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13323 const int prec = fmt->p;
13324 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13325 int inexact;
13326 mpfr_t m1, m2;
13328 mpfr_inits2 (prec, m1, m2, NULL);
13329 mpfr_from_real (m1, ra1, GMP_RNDN);
13330 mpfr_from_real (m2, ra2, GMP_RNDN);
13331 mpfr_clear_flags ();
13332 inexact = func (m1, m1, m2, rnd);
13333 result = do_mpfr_ckconv (m1, type, inexact);
13334 mpfr_clears (m1, m2, NULL);
13338 return result;
13341 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13342 FUNC on it and return the resulting value as a tree with type TYPE.
13343 The mpfr precision is set to the precision of TYPE. We assume that
13344 function FUNC returns zero if the result could be calculated
13345 exactly within the requested precision. */
13347 static tree
13348 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13349 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13351 tree result = NULL_TREE;
13353 STRIP_NOPS (arg1);
13354 STRIP_NOPS (arg2);
13355 STRIP_NOPS (arg3);
13357 /* To proceed, MPFR must exactly represent the target floating point
13358 format, which only happens when the target base equals two. */
13359 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13360 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13361 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13362 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13364 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13365 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13366 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13368 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13370 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13371 const int prec = fmt->p;
13372 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13373 int inexact;
13374 mpfr_t m1, m2, m3;
13376 mpfr_inits2 (prec, m1, m2, m3, NULL);
13377 mpfr_from_real (m1, ra1, GMP_RNDN);
13378 mpfr_from_real (m2, ra2, GMP_RNDN);
13379 mpfr_from_real (m3, ra3, GMP_RNDN);
13380 mpfr_clear_flags ();
13381 inexact = func (m1, m1, m2, m3, rnd);
13382 result = do_mpfr_ckconv (m1, type, inexact);
13383 mpfr_clears (m1, m2, m3, NULL);
13387 return result;
13390 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13391 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13392 If ARG_SINP and ARG_COSP are NULL then the result is returned
13393 as a complex value.
13394 The type is taken from the type of ARG and is used for setting the
13395 precision of the calculation and results. */
13397 static tree
13398 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13400 tree const type = TREE_TYPE (arg);
13401 tree result = NULL_TREE;
13403 STRIP_NOPS (arg);
13405 /* To proceed, MPFR must exactly represent the target floating point
13406 format, which only happens when the target base equals two. */
13407 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13408 && TREE_CODE (arg) == REAL_CST
13409 && !TREE_OVERFLOW (arg))
13411 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13413 if (real_isfinite (ra))
13415 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13416 const int prec = fmt->p;
13417 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13418 tree result_s, result_c;
13419 int inexact;
13420 mpfr_t m, ms, mc;
13422 mpfr_inits2 (prec, m, ms, mc, NULL);
13423 mpfr_from_real (m, ra, GMP_RNDN);
13424 mpfr_clear_flags ();
13425 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13426 result_s = do_mpfr_ckconv (ms, type, inexact);
13427 result_c = do_mpfr_ckconv (mc, type, inexact);
13428 mpfr_clears (m, ms, mc, NULL);
13429 if (result_s && result_c)
13431 /* If we are to return in a complex value do so. */
13432 if (!arg_sinp && !arg_cosp)
13433 return build_complex (build_complex_type (type),
13434 result_c, result_s);
13436 /* Dereference the sin/cos pointer arguments. */
13437 arg_sinp = build_fold_indirect_ref (arg_sinp);
13438 arg_cosp = build_fold_indirect_ref (arg_cosp);
13439 /* Proceed if valid pointer type were passed in. */
13440 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13441 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13443 /* Set the values. */
13444 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13445 result_s);
13446 TREE_SIDE_EFFECTS (result_s) = 1;
13447 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13448 result_c);
13449 TREE_SIDE_EFFECTS (result_c) = 1;
13450 /* Combine the assignments into a compound expr. */
13451 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13452 result_s, result_c));
13457 return result;
13460 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13461 two-argument mpfr order N Bessel function FUNC on them and return
13462 the resulting value as a tree with type TYPE. The mpfr precision
13463 is set to the precision of TYPE. We assume that function FUNC
13464 returns zero if the result could be calculated exactly within the
13465 requested precision. */
13466 static tree
13467 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13468 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13469 const REAL_VALUE_TYPE *min, bool inclusive)
13471 tree result = NULL_TREE;
13473 STRIP_NOPS (arg1);
13474 STRIP_NOPS (arg2);
13476 /* To proceed, MPFR must exactly represent the target floating point
13477 format, which only happens when the target base equals two. */
13478 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13479 && host_integerp (arg1, 0)
13480 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13482 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13483 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13485 if (n == (long)n
13486 && real_isfinite (ra)
13487 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13489 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13490 const int prec = fmt->p;
13491 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13492 int inexact;
13493 mpfr_t m;
13495 mpfr_init2 (m, prec);
13496 mpfr_from_real (m, ra, GMP_RNDN);
13497 mpfr_clear_flags ();
13498 inexact = func (m, n, m, rnd);
13499 result = do_mpfr_ckconv (m, type, inexact);
13500 mpfr_clear (m);
13504 return result;
13507 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13508 the pointer *(ARG_QUO) and return the result. The type is taken
13509 from the type of ARG0 and is used for setting the precision of the
13510 calculation and results. */
13512 static tree
13513 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13515 tree const type = TREE_TYPE (arg0);
13516 tree result = NULL_TREE;
13518 STRIP_NOPS (arg0);
13519 STRIP_NOPS (arg1);
13521 /* To proceed, MPFR must exactly represent the target floating point
13522 format, which only happens when the target base equals two. */
13523 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13524 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13525 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13527 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13528 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13530 if (real_isfinite (ra0) && real_isfinite (ra1))
13532 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13533 const int prec = fmt->p;
13534 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13535 tree result_rem;
13536 long integer_quo;
13537 mpfr_t m0, m1;
13539 mpfr_inits2 (prec, m0, m1, NULL);
13540 mpfr_from_real (m0, ra0, GMP_RNDN);
13541 mpfr_from_real (m1, ra1, GMP_RNDN);
13542 mpfr_clear_flags ();
13543 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13544 /* Remquo is independent of the rounding mode, so pass
13545 inexact=0 to do_mpfr_ckconv(). */
13546 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13547 mpfr_clears (m0, m1, NULL);
13548 if (result_rem)
13550 /* MPFR calculates quo in the host's long so it may
13551 return more bits in quo than the target int can hold
13552 if sizeof(host long) > sizeof(target int). This can
13553 happen even for native compilers in LP64 mode. In
13554 these cases, modulo the quo value with the largest
13555 number that the target int can hold while leaving one
13556 bit for the sign. */
13557 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13558 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13560 /* Dereference the quo pointer argument. */
13561 arg_quo = build_fold_indirect_ref (arg_quo);
13562 /* Proceed iff a valid pointer type was passed in. */
13563 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13565 /* Set the value. */
13566 tree result_quo = fold_build2 (MODIFY_EXPR,
13567 TREE_TYPE (arg_quo), arg_quo,
13568 build_int_cst (NULL, integer_quo));
13569 TREE_SIDE_EFFECTS (result_quo) = 1;
13570 /* Combine the quo assignment with the rem. */
13571 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13572 result_quo, result_rem));
13577 return result;
13580 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13581 resulting value as a tree with type TYPE. The mpfr precision is
13582 set to the precision of TYPE. We assume that this mpfr function
13583 returns zero if the result could be calculated exactly within the
13584 requested precision. In addition, the integer pointer represented
13585 by ARG_SG will be dereferenced and set to the appropriate signgam
13586 (-1,1) value. */
13588 static tree
13589 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13591 tree result = NULL_TREE;
13593 STRIP_NOPS (arg);
13595 /* To proceed, MPFR must exactly represent the target floating point
13596 format, which only happens when the target base equals two. Also
13597 verify ARG is a constant and that ARG_SG is an int pointer. */
13598 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13599 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13600 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13601 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13603 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13605 /* In addition to NaN and Inf, the argument cannot be zero or a
13606 negative integer. */
13607 if (real_isfinite (ra)
13608 && ra->cl != rvc_zero
13609 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13611 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13612 const int prec = fmt->p;
13613 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13614 int inexact, sg;
13615 mpfr_t m;
13616 tree result_lg;
13618 mpfr_init2 (m, prec);
13619 mpfr_from_real (m, ra, GMP_RNDN);
13620 mpfr_clear_flags ();
13621 inexact = mpfr_lgamma (m, &sg, m, rnd);
13622 result_lg = do_mpfr_ckconv (m, type, inexact);
13623 mpfr_clear (m);
13624 if (result_lg)
13626 tree result_sg;
13628 /* Dereference the arg_sg pointer argument. */
13629 arg_sg = build_fold_indirect_ref (arg_sg);
13630 /* Assign the signgam value into *arg_sg. */
13631 result_sg = fold_build2 (MODIFY_EXPR,
13632 TREE_TYPE (arg_sg), arg_sg,
13633 build_int_cst (NULL, sg));
13634 TREE_SIDE_EFFECTS (result_sg) = 1;
13635 /* Combine the signgam assignment with the lgamma result. */
13636 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13637 result_sg, result_lg));
13642 return result;
13645 #ifdef HAVE_mpc
13646 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13647 function FUNC on it and return the resulting value as a tree with
13648 type TYPE. The mpfr precision is set to the precision of TYPE. We
13649 assume that function FUNC returns zero if the result could be
13650 calculated exactly within the requested precision. */
13652 static tree
13653 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13655 tree result = NULL_TREE;
13657 STRIP_NOPS (arg);
13659 /* To proceed, MPFR must exactly represent the target floating point
13660 format, which only happens when the target base equals two. */
13661 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13662 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13663 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13665 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13666 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13668 if (real_isfinite (re) && real_isfinite (im))
13670 const struct real_format *const fmt =
13671 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13672 const int prec = fmt->p;
13673 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13674 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13675 int inexact;
13676 mpc_t m;
13678 mpc_init2 (m, prec);
13679 mpfr_from_real (mpc_realref(m), re, rnd);
13680 mpfr_from_real (mpc_imagref(m), im, rnd);
13681 mpfr_clear_flags ();
13682 inexact = func (m, m, crnd);
13683 result = do_mpc_ckconv (m, type, inexact);
13684 mpc_clear (m);
13688 return result;
13690 #endif /* HAVE_mpc */
13692 /* FIXME tuples.
13693 The functions below provide an alternate interface for folding
13694 builtin function calls presented as GIMPLE_CALL statements rather
13695 than as CALL_EXPRs. The folded result is still expressed as a
13696 tree. There is too much code duplication in the handling of
13697 varargs functions, and a more intrusive re-factoring would permit
13698 better sharing of code between the tree and statement-based
13699 versions of these functions. */
13701 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13702 along with N new arguments specified as the "..." parameters. SKIP
13703 is the number of arguments in STMT to be omitted. This function is used
13704 to do varargs-to-varargs transformations. */
13706 static tree
13707 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13709 int oldnargs = gimple_call_num_args (stmt);
13710 int nargs = oldnargs - skip + n;
13711 tree fntype = TREE_TYPE (fndecl);
13712 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13713 tree *buffer;
13714 int i, j;
13715 va_list ap;
13717 buffer = XALLOCAVEC (tree, nargs);
13718 va_start (ap, n);
13719 for (i = 0; i < n; i++)
13720 buffer[i] = va_arg (ap, tree);
13721 va_end (ap);
13722 for (j = skip; j < oldnargs; j++, i++)
13723 buffer[i] = gimple_call_arg (stmt, j);
13725 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13728 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13729 a normal call should be emitted rather than expanding the function
13730 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13732 static tree
13733 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13735 tree dest, size, len, fn, fmt, flag;
13736 const char *fmt_str;
13737 int nargs = gimple_call_num_args (stmt);
13739 /* Verify the required arguments in the original call. */
13740 if (nargs < 4)
13741 return NULL_TREE;
13742 dest = gimple_call_arg (stmt, 0);
13743 if (!validate_arg (dest, POINTER_TYPE))
13744 return NULL_TREE;
13745 flag = gimple_call_arg (stmt, 1);
13746 if (!validate_arg (flag, INTEGER_TYPE))
13747 return NULL_TREE;
13748 size = gimple_call_arg (stmt, 2);
13749 if (!validate_arg (size, INTEGER_TYPE))
13750 return NULL_TREE;
13751 fmt = gimple_call_arg (stmt, 3);
13752 if (!validate_arg (fmt, POINTER_TYPE))
13753 return NULL_TREE;
13755 if (! host_integerp (size, 1))
13756 return NULL_TREE;
13758 len = NULL_TREE;
13760 if (!init_target_chars ())
13761 return NULL_TREE;
13763 /* Check whether the format is a literal string constant. */
13764 fmt_str = c_getstr (fmt);
13765 if (fmt_str != NULL)
13767 /* If the format doesn't contain % args or %%, we know the size. */
13768 if (strchr (fmt_str, target_percent) == 0)
13770 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13771 len = build_int_cstu (size_type_node, strlen (fmt_str));
13773 /* If the format is "%s" and first ... argument is a string literal,
13774 we know the size too. */
13775 else if (fcode == BUILT_IN_SPRINTF_CHK
13776 && strcmp (fmt_str, target_percent_s) == 0)
13778 tree arg;
13780 if (nargs == 5)
13782 arg = gimple_call_arg (stmt, 4);
13783 if (validate_arg (arg, POINTER_TYPE))
13785 len = c_strlen (arg, 1);
13786 if (! len || ! host_integerp (len, 1))
13787 len = NULL_TREE;
13793 if (! integer_all_onesp (size))
13795 if (! len || ! tree_int_cst_lt (len, size))
13796 return NULL_TREE;
13799 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13800 or if format doesn't contain % chars or is "%s". */
13801 if (! integer_zerop (flag))
13803 if (fmt_str == NULL)
13804 return NULL_TREE;
13805 if (strchr (fmt_str, target_percent) != NULL
13806 && strcmp (fmt_str, target_percent_s))
13807 return NULL_TREE;
13810 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13811 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13812 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13813 if (!fn)
13814 return NULL_TREE;
13816 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13819 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13820 a normal call should be emitted rather than expanding the function
13821 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13822 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13823 passed as second argument. */
13825 tree
13826 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13827 enum built_in_function fcode)
13829 tree dest, size, len, fn, fmt, flag;
13830 const char *fmt_str;
13832 /* Verify the required arguments in the original call. */
13833 if (gimple_call_num_args (stmt) < 5)
13834 return NULL_TREE;
13835 dest = gimple_call_arg (stmt, 0);
13836 if (!validate_arg (dest, POINTER_TYPE))
13837 return NULL_TREE;
13838 len = gimple_call_arg (stmt, 1);
13839 if (!validate_arg (len, INTEGER_TYPE))
13840 return NULL_TREE;
13841 flag = gimple_call_arg (stmt, 2);
13842 if (!validate_arg (flag, INTEGER_TYPE))
13843 return NULL_TREE;
13844 size = gimple_call_arg (stmt, 3);
13845 if (!validate_arg (size, INTEGER_TYPE))
13846 return NULL_TREE;
13847 fmt = gimple_call_arg (stmt, 4);
13848 if (!validate_arg (fmt, POINTER_TYPE))
13849 return NULL_TREE;
13851 if (! host_integerp (size, 1))
13852 return NULL_TREE;
13854 if (! integer_all_onesp (size))
13856 if (! host_integerp (len, 1))
13858 /* If LEN is not constant, try MAXLEN too.
13859 For MAXLEN only allow optimizing into non-_ocs function
13860 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13861 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13862 return NULL_TREE;
13864 else
13865 maxlen = len;
13867 if (tree_int_cst_lt (size, maxlen))
13868 return NULL_TREE;
13871 if (!init_target_chars ())
13872 return NULL_TREE;
13874 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13875 or if format doesn't contain % chars or is "%s". */
13876 if (! integer_zerop (flag))
13878 fmt_str = c_getstr (fmt);
13879 if (fmt_str == NULL)
13880 return NULL_TREE;
13881 if (strchr (fmt_str, target_percent) != NULL
13882 && strcmp (fmt_str, target_percent_s))
13883 return NULL_TREE;
13886 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13887 available. */
13888 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13889 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13890 if (!fn)
13891 return NULL_TREE;
13893 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13896 /* Builtins with folding operations that operate on "..." arguments
13897 need special handling; we need to store the arguments in a convenient
13898 data structure before attempting any folding. Fortunately there are
13899 only a few builtins that fall into this category. FNDECL is the
13900 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13901 result of the function call is ignored. */
13903 static tree
13904 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13906 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13907 tree ret = NULL_TREE;
13909 switch (fcode)
13911 case BUILT_IN_SPRINTF_CHK:
13912 case BUILT_IN_VSPRINTF_CHK:
13913 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13914 break;
13916 case BUILT_IN_SNPRINTF_CHK:
13917 case BUILT_IN_VSNPRINTF_CHK:
13918 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13920 default:
13921 break;
13923 if (ret)
13925 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13926 TREE_NO_WARNING (ret) = 1;
13927 return ret;
13929 return NULL_TREE;
13932 /* A wrapper function for builtin folding that prevents warnings for
13933 "statement without effect" and the like, caused by removing the
13934 call node earlier than the warning is generated. */
13936 tree
13937 fold_call_stmt (gimple stmt, bool ignore)
13939 tree ret = NULL_TREE;
13940 tree fndecl = gimple_call_fndecl (stmt);
13941 if (fndecl
13942 && TREE_CODE (fndecl) == FUNCTION_DECL
13943 && DECL_BUILT_IN (fndecl)
13944 && !gimple_call_va_arg_pack_p (stmt))
13946 int nargs = gimple_call_num_args (stmt);
13948 if (avoid_folding_inline_builtin (fndecl))
13949 return NULL_TREE;
13950 /* FIXME: Don't use a list in this interface. */
13951 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13953 tree arglist = NULL_TREE;
13954 int i;
13955 for (i = nargs - 1; i >= 0; i--)
13956 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13957 return targetm.fold_builtin (fndecl, arglist, ignore);
13959 else
13961 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13963 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13964 int i;
13965 for (i = 0; i < nargs; i++)
13966 args[i] = gimple_call_arg (stmt, i);
13967 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13969 if (!ret)
13970 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13971 if (ret)
13973 /* Propagate location information from original call to
13974 expansion of builtin. Otherwise things like
13975 maybe_emit_chk_warning, that operate on the expansion
13976 of a builtin, will use the wrong location information. */
13977 if (gimple_has_location (stmt))
13979 tree realret = ret;
13980 if (TREE_CODE (ret) == NOP_EXPR)
13981 realret = TREE_OPERAND (ret, 0);
13982 if (CAN_HAVE_LOCATION_P (realret)
13983 && !EXPR_HAS_LOCATION (realret))
13984 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13985 return realret;
13987 return ret;
13991 return NULL_TREE;