PR c++/40619
[official-gcc/constexpr.git] / gcc / builtins.c
blobde1984e5ab5a5aa465d39184cb40baca53074f1c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list (tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (tree);
158 static tree fold_builtin_inf (tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (tree, tree);
168 static tree fold_builtin_cbrt (tree, tree);
169 static tree fold_builtin_pow (tree, tree, tree, tree);
170 static tree fold_builtin_powi (tree, tree, tree, tree);
171 static tree fold_builtin_cos (tree, tree, tree);
172 static tree fold_builtin_cosh (tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (tree, tree);
175 static tree fold_builtin_floor (tree, tree);
176 static tree fold_builtin_ceil (tree, tree);
177 static tree fold_builtin_round (tree, tree);
178 static tree fold_builtin_int_roundingfn (tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (tree, tree, tree);
182 static tree fold_builtin_memchr (tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (tree, tree, tree);
184 static tree fold_builtin_strcmp (tree, tree);
185 static tree fold_builtin_strncmp (tree, tree, tree);
186 static tree fold_builtin_signbit (tree, tree);
187 static tree fold_builtin_copysign (tree, tree, tree, tree);
188 static tree fold_builtin_isascii (tree);
189 static tree fold_builtin_toascii (tree);
190 static tree fold_builtin_isdigit (tree);
191 static tree fold_builtin_fabs (tree, tree);
192 static tree fold_builtin_abs (tree, tree);
193 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
194 enum tree_code);
195 static tree fold_builtin_n (tree, tree *, int, bool);
196 static tree fold_builtin_0 (tree, bool);
197 static tree fold_builtin_1 (tree, tree, bool);
198 static tree fold_builtin_2 (tree, tree, tree, bool);
199 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (tree, tree, bool);
203 static tree fold_builtin_strpbrk (tree, tree, tree);
204 static tree fold_builtin_strstr (tree, tree, tree);
205 static tree fold_builtin_strrchr (tree, tree, tree);
206 static tree fold_builtin_strcat (tree, tree);
207 static tree fold_builtin_strncat (tree, tree, tree);
208 static tree fold_builtin_strspn (tree, tree);
209 static tree fold_builtin_strcspn (tree, tree);
210 static tree fold_builtin_sprintf (tree, tree, tree, int);
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
222 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
247 bool
248 is_builtin_name (const char *name)
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
261 static bool
262 called_as_built_in (tree node)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
266 will have. */
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
278 unsigned int inner;
280 inner = max_align;
281 if (handled_component_p (exp))
283 HOST_WIDE_INT bitsize, bitpos;
284 tree offset;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
290 if (bitpos)
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
292 while (offset)
294 tree next_offset;
296 if (TREE_CODE (offset) == PLUS_EXPR)
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
301 else
302 next_offset = NULL;
303 if (host_integerp (offset, 1))
305 /* Any overflow in calculating offset_bits won't change
306 the alignment. */
307 unsigned offset_bits
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
310 if (offset_bits)
311 inner = MIN (inner, (offset_bits & -offset_bits));
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
316 /* Any overflow in calculating offset_factor won't change
317 the alignment. */
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
320 * BITS_PER_UNIT);
322 if (offset_factor)
323 inner = MIN (inner, (offset_factor & -offset_factor));
325 else
327 inner = MIN (inner, BITS_PER_UNIT);
328 break;
330 offset = next_offset;
333 if (DECL_P (exp))
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
338 #endif
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
342 else
343 align = MIN (align, inner);
344 return MIN (align, max_align);
347 /* Return the alignment in bits of EXP, a pointer valued expression.
348 But don't return more than MAX_ALIGN no matter what.
349 The alignment returned is, by default, the alignment of the thing that
350 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
352 Otherwise, look at the expression to see if we can do better, i.e., if the
353 expression is actually pointing at an object whose alignment is tighter. */
356 get_pointer_alignment (tree exp, unsigned int max_align)
358 unsigned int align, inner;
360 /* We rely on TER to compute accurate alignment information. */
361 if (!(optimize && flag_tree_ter))
362 return 0;
364 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
365 return 0;
367 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
368 align = MIN (align, max_align);
370 while (1)
372 switch (TREE_CODE (exp))
374 CASE_CONVERT:
375 exp = TREE_OPERAND (exp, 0);
376 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
377 return align;
379 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
380 align = MIN (inner, max_align);
381 break;
383 case POINTER_PLUS_EXPR:
384 /* If sum of pointer + int, restrict our maximum alignment to that
385 imposed by the integer. If not, we can't do any better than
386 ALIGN. */
387 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
388 return align;
390 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
391 & (max_align / BITS_PER_UNIT - 1))
392 != 0)
393 max_align >>= 1;
395 exp = TREE_OPERAND (exp, 0);
396 break;
398 case ADDR_EXPR:
399 /* See what we are pointing at and look at its alignment. */
400 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
402 default:
403 return align;
408 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
409 way, because it could contain a zero byte in the middle.
410 TREE_STRING_LENGTH is the size of the character array, not the string.
412 ONLY_VALUE should be nonzero if the result is not going to be emitted
413 into the instruction stream and zero if it is going to be expanded.
414 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
415 is returned, otherwise NULL, since
416 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
417 evaluate the side-effects.
419 The value returned is of type `ssizetype'.
421 Unfortunately, string_constant can't access the values of const char
422 arrays with initializers, so neither can we do so here. */
424 tree
425 c_strlen (tree src, int only_value)
427 tree offset_node;
428 HOST_WIDE_INT offset;
429 int max;
430 const char *ptr;
432 STRIP_NOPS (src);
433 if (TREE_CODE (src) == COND_EXPR
434 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
436 tree len1, len2;
438 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
439 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
440 if (tree_int_cst_equal (len1, len2))
441 return len1;
444 if (TREE_CODE (src) == COMPOUND_EXPR
445 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
446 return c_strlen (TREE_OPERAND (src, 1), only_value);
448 src = string_constant (src, &offset_node);
449 if (src == 0)
450 return NULL_TREE;
452 max = TREE_STRING_LENGTH (src) - 1;
453 ptr = TREE_STRING_POINTER (src);
455 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
457 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
458 compute the offset to the following null if we don't know where to
459 start searching for it. */
460 int i;
462 for (i = 0; i < max; i++)
463 if (ptr[i] == 0)
464 return NULL_TREE;
466 /* We don't know the starting offset, but we do know that the string
467 has no internal zero bytes. We can assume that the offset falls
468 within the bounds of the string; otherwise, the programmer deserves
469 what he gets. Subtract the offset from the length of the string,
470 and return that. This would perhaps not be valid if we were dealing
471 with named arrays in addition to literal string constants. */
473 return size_diffop (size_int (max), offset_node);
476 /* We have a known offset into the string. Start searching there for
477 a null character if we can represent it as a single HOST_WIDE_INT. */
478 if (offset_node == 0)
479 offset = 0;
480 else if (! host_integerp (offset_node, 0))
481 offset = -1;
482 else
483 offset = tree_low_cst (offset_node, 0);
485 /* If the offset is known to be out of bounds, warn, and call strlen at
486 runtime. */
487 if (offset < 0 || offset > max)
489 /* Suppress multiple warnings for propagated constant strings. */
490 if (! TREE_NO_WARNING (src))
492 warning (0, "offset outside bounds of constant string");
493 TREE_NO_WARNING (src) = 1;
495 return NULL_TREE;
498 /* Use strlen to search for the first zero byte. Since any strings
499 constructed with build_string will have nulls appended, we win even
500 if we get handed something like (char[4])"abcd".
502 Since OFFSET is our starting index into the string, no further
503 calculation is needed. */
504 return ssize_int (strlen (ptr + offset));
507 /* Return a char pointer for a C string if it is a string constant
508 or sum of string constant and integer constant. */
510 static const char *
511 c_getstr (tree src)
513 tree offset_node;
515 src = string_constant (src, &offset_node);
516 if (src == 0)
517 return 0;
519 if (offset_node == 0)
520 return TREE_STRING_POINTER (src);
521 else if (!host_integerp (offset_node, 1)
522 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
523 return 0;
525 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
528 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
529 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
531 static rtx
532 c_readstr (const char *str, enum machine_mode mode)
534 HOST_WIDE_INT c[2];
535 HOST_WIDE_INT ch;
536 unsigned int i, j;
538 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
540 c[0] = 0;
541 c[1] = 0;
542 ch = 1;
543 for (i = 0; i < GET_MODE_SIZE (mode); i++)
545 j = i;
546 if (WORDS_BIG_ENDIAN)
547 j = GET_MODE_SIZE (mode) - i - 1;
548 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
549 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
550 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
551 j *= BITS_PER_UNIT;
552 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
554 if (ch)
555 ch = (unsigned char) str[i];
556 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
558 return immed_double_const (c[0], c[1], mode);
561 /* Cast a target constant CST to target CHAR and if that value fits into
562 host char type, return zero and put that value into variable pointed to by
563 P. */
565 static int
566 target_char_cast (tree cst, char *p)
568 unsigned HOST_WIDE_INT val, hostval;
570 if (!host_integerp (cst, 1)
571 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
572 return 1;
574 val = tree_low_cst (cst, 1);
575 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
576 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
578 hostval = val;
579 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
580 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
582 if (val != hostval)
583 return 1;
585 *p = hostval;
586 return 0;
589 /* Similar to save_expr, but assumes that arbitrary code is not executed
590 in between the multiple evaluations. In particular, we assume that a
591 non-addressable local variable will not be modified. */
593 static tree
594 builtin_save_expr (tree exp)
596 if (TREE_ADDRESSABLE (exp) == 0
597 && (TREE_CODE (exp) == PARM_DECL
598 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
599 return exp;
601 return save_expr (exp);
604 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
605 times to get the address of either a higher stack frame, or a return
606 address located within it (depending on FNDECL_CODE). */
608 static rtx
609 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
611 int i;
613 #ifdef INITIAL_FRAME_ADDRESS_RTX
614 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
615 #else
616 rtx tem;
618 /* For a zero count with __builtin_return_address, we don't care what
619 frame address we return, because target-specific definitions will
620 override us. Therefore frame pointer elimination is OK, and using
621 the soft frame pointer is OK.
623 For a nonzero count, or a zero count with __builtin_frame_address,
624 we require a stable offset from the current frame pointer to the
625 previous one, so we must use the hard frame pointer, and
626 we must disable frame pointer elimination. */
627 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
628 tem = frame_pointer_rtx;
629 else
631 tem = hard_frame_pointer_rtx;
633 /* Tell reload not to eliminate the frame pointer. */
634 crtl->accesses_prior_frames = 1;
636 #endif
638 /* Some machines need special handling before we can access
639 arbitrary frames. For example, on the SPARC, we must first flush
640 all register windows to the stack. */
641 #ifdef SETUP_FRAME_ADDRESSES
642 if (count > 0)
643 SETUP_FRAME_ADDRESSES ();
644 #endif
646 /* On the SPARC, the return address is not in the frame, it is in a
647 register. There is no way to access it off of the current frame
648 pointer, but it can be accessed off the previous frame pointer by
649 reading the value from the register window save area. */
650 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
651 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
652 count--;
653 #endif
655 /* Scan back COUNT frames to the specified frame. */
656 for (i = 0; i < count; i++)
658 /* Assume the dynamic chain pointer is in the word that the
659 frame address points to, unless otherwise specified. */
660 #ifdef DYNAMIC_CHAIN_ADDRESS
661 tem = DYNAMIC_CHAIN_ADDRESS (tem);
662 #endif
663 tem = memory_address (Pmode, tem);
664 tem = gen_frame_mem (Pmode, tem);
665 tem = copy_to_reg (tem);
668 /* For __builtin_frame_address, return what we've got. But, on
669 the SPARC for example, we may have to add a bias. */
670 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
671 #ifdef FRAME_ADDR_RTX
672 return FRAME_ADDR_RTX (tem);
673 #else
674 return tem;
675 #endif
677 /* For __builtin_return_address, get the return address from that frame. */
678 #ifdef RETURN_ADDR_RTX
679 tem = RETURN_ADDR_RTX (count, tem);
680 #else
681 tem = memory_address (Pmode,
682 plus_constant (tem, GET_MODE_SIZE (Pmode)));
683 tem = gen_frame_mem (Pmode, tem);
684 #endif
685 return tem;
688 /* Alias set used for setjmp buffer. */
689 static alias_set_type setjmp_alias_set = -1;
691 /* Construct the leading half of a __builtin_setjmp call. Control will
692 return to RECEIVER_LABEL. This is also called directly by the SJLJ
693 exception handling code. */
695 void
696 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
698 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
699 rtx stack_save;
700 rtx mem;
702 if (setjmp_alias_set == -1)
703 setjmp_alias_set = new_alias_set ();
705 buf_addr = convert_memory_address (Pmode, buf_addr);
707 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
709 /* We store the frame pointer and the address of receiver_label in
710 the buffer and use the rest of it for the stack save area, which
711 is machine-dependent. */
713 mem = gen_rtx_MEM (Pmode, buf_addr);
714 set_mem_alias_set (mem, setjmp_alias_set);
715 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
717 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
718 set_mem_alias_set (mem, setjmp_alias_set);
720 emit_move_insn (validize_mem (mem),
721 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
723 stack_save = gen_rtx_MEM (sa_mode,
724 plus_constant (buf_addr,
725 2 * GET_MODE_SIZE (Pmode)));
726 set_mem_alias_set (stack_save, setjmp_alias_set);
727 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
729 /* If there is further processing to do, do it. */
730 #ifdef HAVE_builtin_setjmp_setup
731 if (HAVE_builtin_setjmp_setup)
732 emit_insn (gen_builtin_setjmp_setup (buf_addr));
733 #endif
735 /* Tell optimize_save_area_alloca that extra work is going to
736 need to go on during alloca. */
737 cfun->calls_setjmp = 1;
739 /* We have a nonlocal label. */
740 cfun->has_nonlocal_label = 1;
743 /* Construct the trailing part of a __builtin_setjmp call. This is
744 also called directly by the SJLJ exception handling code. */
746 void
747 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
749 /* Clobber the FP when we get here, so we have to make sure it's
750 marked as used by this function. */
751 emit_use (hard_frame_pointer_rtx);
753 /* Mark the static chain as clobbered here so life information
754 doesn't get messed up for it. */
755 emit_clobber (static_chain_rtx);
757 /* Now put in the code to restore the frame pointer, and argument
758 pointer, if needed. */
759 #ifdef HAVE_nonlocal_goto
760 if (! HAVE_nonlocal_goto)
761 #endif
763 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
764 /* This might change the hard frame pointer in ways that aren't
765 apparent to early optimization passes, so force a clobber. */
766 emit_clobber (hard_frame_pointer_rtx);
769 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
770 if (fixed_regs[ARG_POINTER_REGNUM])
772 #ifdef ELIMINABLE_REGS
773 size_t i;
774 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
776 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
777 if (elim_regs[i].from == ARG_POINTER_REGNUM
778 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
779 break;
781 if (i == ARRAY_SIZE (elim_regs))
782 #endif
784 /* Now restore our arg pointer from the address at which it
785 was saved in our stack frame. */
786 emit_move_insn (crtl->args.internal_arg_pointer,
787 copy_to_reg (get_arg_pointer_save_area ()));
790 #endif
792 #ifdef HAVE_builtin_setjmp_receiver
793 if (HAVE_builtin_setjmp_receiver)
794 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
795 else
796 #endif
797 #ifdef HAVE_nonlocal_goto_receiver
798 if (HAVE_nonlocal_goto_receiver)
799 emit_insn (gen_nonlocal_goto_receiver ());
800 else
801 #endif
802 { /* Nothing */ }
804 /* We must not allow the code we just generated to be reordered by
805 scheduling. Specifically, the update of the frame pointer must
806 happen immediately, not later. */
807 emit_insn (gen_blockage ());
810 /* __builtin_longjmp is passed a pointer to an array of five words (not
811 all will be used on all machines). It operates similarly to the C
812 library function of the same name, but is more efficient. Much of
813 the code below is copied from the handling of non-local gotos. */
815 static void
816 expand_builtin_longjmp (rtx buf_addr, rtx value)
818 rtx fp, lab, stack, insn, last;
819 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
821 /* DRAP is needed for stack realign if longjmp is expanded to current
822 function */
823 if (SUPPORTS_STACK_ALIGNMENT)
824 crtl->need_drap = true;
826 if (setjmp_alias_set == -1)
827 setjmp_alias_set = new_alias_set ();
829 buf_addr = convert_memory_address (Pmode, buf_addr);
831 buf_addr = force_reg (Pmode, buf_addr);
833 /* We used to store value in static_chain_rtx, but that fails if pointers
834 are smaller than integers. We instead require that the user must pass
835 a second argument of 1, because that is what builtin_setjmp will
836 return. This also makes EH slightly more efficient, since we are no
837 longer copying around a value that we don't care about. */
838 gcc_assert (value == const1_rtx);
840 last = get_last_insn ();
841 #ifdef HAVE_builtin_longjmp
842 if (HAVE_builtin_longjmp)
843 emit_insn (gen_builtin_longjmp (buf_addr));
844 else
845 #endif
847 fp = gen_rtx_MEM (Pmode, buf_addr);
848 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
849 GET_MODE_SIZE (Pmode)));
851 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
852 2 * GET_MODE_SIZE (Pmode)));
853 set_mem_alias_set (fp, setjmp_alias_set);
854 set_mem_alias_set (lab, setjmp_alias_set);
855 set_mem_alias_set (stack, setjmp_alias_set);
857 /* Pick up FP, label, and SP from the block and jump. This code is
858 from expand_goto in stmt.c; see there for detailed comments. */
859 #ifdef HAVE_nonlocal_goto
860 if (HAVE_nonlocal_goto)
861 /* We have to pass a value to the nonlocal_goto pattern that will
862 get copied into the static_chain pointer, but it does not matter
863 what that value is, because builtin_setjmp does not use it. */
864 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
865 else
866 #endif
868 lab = copy_to_reg (lab);
870 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
871 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
873 emit_move_insn (hard_frame_pointer_rtx, fp);
874 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
876 emit_use (hard_frame_pointer_rtx);
877 emit_use (stack_pointer_rtx);
878 emit_indirect_jump (lab);
882 /* Search backwards and mark the jump insn as a non-local goto.
883 Note that this precludes the use of __builtin_longjmp to a
884 __builtin_setjmp target in the same function. However, we've
885 already cautioned the user that these functions are for
886 internal exception handling use only. */
887 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
889 gcc_assert (insn != last);
891 if (JUMP_P (insn))
893 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
894 break;
896 else if (CALL_P (insn))
897 break;
901 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
902 and the address of the save area. */
904 static rtx
905 expand_builtin_nonlocal_goto (tree exp)
907 tree t_label, t_save_area;
908 rtx r_label, r_save_area, r_fp, r_sp, insn;
910 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
911 return NULL_RTX;
913 t_label = CALL_EXPR_ARG (exp, 0);
914 t_save_area = CALL_EXPR_ARG (exp, 1);
916 r_label = expand_normal (t_label);
917 r_label = convert_memory_address (Pmode, r_label);
918 r_save_area = expand_normal (t_save_area);
919 r_save_area = convert_memory_address (Pmode, r_save_area);
920 /* Copy the address of the save location to a register just in case it was based
921 on the frame pointer. */
922 r_save_area = copy_to_reg (r_save_area);
923 r_fp = gen_rtx_MEM (Pmode, r_save_area);
924 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
925 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
927 crtl->has_nonlocal_goto = 1;
929 #ifdef HAVE_nonlocal_goto
930 /* ??? We no longer need to pass the static chain value, afaik. */
931 if (HAVE_nonlocal_goto)
932 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
933 else
934 #endif
936 r_label = copy_to_reg (r_label);
938 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
939 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
941 /* Restore frame pointer for containing function.
942 This sets the actual hard register used for the frame pointer
943 to the location of the function's incoming static chain info.
944 The non-local goto handler will then adjust it to contain the
945 proper value and reload the argument pointer, if needed. */
946 emit_move_insn (hard_frame_pointer_rtx, r_fp);
947 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
949 /* USE of hard_frame_pointer_rtx added for consistency;
950 not clear if really needed. */
951 emit_use (hard_frame_pointer_rtx);
952 emit_use (stack_pointer_rtx);
954 /* If the architecture is using a GP register, we must
955 conservatively assume that the target function makes use of it.
956 The prologue of functions with nonlocal gotos must therefore
957 initialize the GP register to the appropriate value, and we
958 must then make sure that this value is live at the point
959 of the jump. (Note that this doesn't necessarily apply
960 to targets with a nonlocal_goto pattern; they are free
961 to implement it in their own way. Note also that this is
962 a no-op if the GP register is a global invariant.) */
963 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
964 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
965 emit_use (pic_offset_table_rtx);
967 emit_indirect_jump (r_label);
970 /* Search backwards to the jump insn and mark it as a
971 non-local goto. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
974 if (JUMP_P (insn))
976 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
977 break;
979 else if (CALL_P (insn))
980 break;
983 return const0_rtx;
986 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
987 (not all will be used on all machines) that was passed to __builtin_setjmp.
988 It updates the stack pointer in that block to correspond to the current
989 stack pointer. */
991 static void
992 expand_builtin_update_setjmp_buf (rtx buf_addr)
994 enum machine_mode sa_mode = Pmode;
995 rtx stack_save;
998 #ifdef HAVE_save_stack_nonlocal
999 if (HAVE_save_stack_nonlocal)
1000 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1001 #endif
1002 #ifdef STACK_SAVEAREA_MODE
1003 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004 #endif
1006 stack_save
1007 = gen_rtx_MEM (sa_mode,
1008 memory_address
1009 (sa_mode,
1010 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1012 #ifdef HAVE_setjmp
1013 if (HAVE_setjmp)
1014 emit_insn (gen_setjmp ());
1015 #endif
1017 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1020 /* Expand a call to __builtin_prefetch. For a target that does not support
1021 data prefetch, evaluate the memory address argument in case it has side
1022 effects. */
1024 static void
1025 expand_builtin_prefetch (tree exp)
1027 tree arg0, arg1, arg2;
1028 int nargs;
1029 rtx op0, op1, op2;
1031 if (!validate_arglist (exp, POINTER_TYPE, 0))
1032 return;
1034 arg0 = CALL_EXPR_ARG (exp, 0);
1036 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1037 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1038 locality). */
1039 nargs = call_expr_nargs (exp);
1040 if (nargs > 1)
1041 arg1 = CALL_EXPR_ARG (exp, 1);
1042 else
1043 arg1 = integer_zero_node;
1044 if (nargs > 2)
1045 arg2 = CALL_EXPR_ARG (exp, 2);
1046 else
1047 arg2 = build_int_cst (NULL_TREE, 3);
1049 /* Argument 0 is an address. */
1050 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1052 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1053 if (TREE_CODE (arg1) != INTEGER_CST)
1055 error ("second argument to %<__builtin_prefetch%> must be a constant");
1056 arg1 = integer_zero_node;
1058 op1 = expand_normal (arg1);
1059 /* Argument 1 must be either zero or one. */
1060 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1062 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1063 " using zero");
1064 op1 = const0_rtx;
1067 /* Argument 2 (locality) must be a compile-time constant int. */
1068 if (TREE_CODE (arg2) != INTEGER_CST)
1070 error ("third argument to %<__builtin_prefetch%> must be a constant");
1071 arg2 = integer_zero_node;
1073 op2 = expand_normal (arg2);
1074 /* Argument 2 must be 0, 1, 2, or 3. */
1075 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1077 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1078 op2 = const0_rtx;
1081 #ifdef HAVE_prefetch
1082 if (HAVE_prefetch)
1084 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1085 (op0,
1086 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1087 || (GET_MODE (op0) != Pmode))
1089 op0 = convert_memory_address (Pmode, op0);
1090 op0 = force_reg (Pmode, op0);
1092 emit_insn (gen_prefetch (op0, op1, op2));
1094 #endif
1096 /* Don't do anything with direct references to volatile memory, but
1097 generate code to handle other side effects. */
1098 if (!MEM_P (op0) && side_effects_p (op0))
1099 emit_insn (op0);
1102 /* Get a MEM rtx for expression EXP which is the address of an operand
1103 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1104 the maximum length of the block of memory that might be accessed or
1105 NULL if unknown. */
1107 static rtx
1108 get_memory_rtx (tree exp, tree len)
1110 tree orig_exp = exp;
1111 rtx addr, mem;
1112 HOST_WIDE_INT off;
1114 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1115 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1116 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1117 exp = TREE_OPERAND (exp, 0);
1119 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1120 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1122 /* Get an expression we can use to find the attributes to assign to MEM.
1123 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1124 we can. First remove any nops. */
1125 while (CONVERT_EXPR_P (exp)
1126 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1127 exp = TREE_OPERAND (exp, 0);
1129 off = 0;
1130 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1131 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1132 && host_integerp (TREE_OPERAND (exp, 1), 0)
1133 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1134 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1135 else if (TREE_CODE (exp) == ADDR_EXPR)
1136 exp = TREE_OPERAND (exp, 0);
1137 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1138 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1139 else
1140 exp = NULL;
1142 /* Honor attributes derived from exp, except for the alias set
1143 (as builtin stringops may alias with anything) and the size
1144 (as stringops may access multiple array elements). */
1145 if (exp)
1147 set_mem_attributes (mem, exp, 0);
1149 if (off)
1150 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1152 /* Allow the string and memory builtins to overflow from one
1153 field into another, see http://gcc.gnu.org/PR23561.
1154 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1155 memory accessed by the string or memory builtin will fit
1156 within the field. */
1157 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1159 tree mem_expr = MEM_EXPR (mem);
1160 HOST_WIDE_INT offset = -1, length = -1;
1161 tree inner = exp;
1163 while (TREE_CODE (inner) == ARRAY_REF
1164 || CONVERT_EXPR_P (inner)
1165 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1166 || TREE_CODE (inner) == SAVE_EXPR)
1167 inner = TREE_OPERAND (inner, 0);
1169 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1171 if (MEM_OFFSET (mem)
1172 && CONST_INT_P (MEM_OFFSET (mem)))
1173 offset = INTVAL (MEM_OFFSET (mem));
1175 if (offset >= 0 && len && host_integerp (len, 0))
1176 length = tree_low_cst (len, 0);
1178 while (TREE_CODE (inner) == COMPONENT_REF)
1180 tree field = TREE_OPERAND (inner, 1);
1181 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1182 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1184 /* Bitfields are generally not byte-addressable. */
1185 gcc_assert (!DECL_BIT_FIELD (field)
1186 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1187 % BITS_PER_UNIT) == 0
1188 && host_integerp (DECL_SIZE (field), 0)
1189 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1190 % BITS_PER_UNIT) == 0));
1192 /* If we can prove that the memory starting at XEXP (mem, 0) and
1193 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1194 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1195 fields without DECL_SIZE_UNIT like flexible array members. */
1196 if (length >= 0
1197 && DECL_SIZE_UNIT (field)
1198 && host_integerp (DECL_SIZE_UNIT (field), 0))
1200 HOST_WIDE_INT size
1201 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1202 if (offset <= size
1203 && length <= size
1204 && offset + length <= size)
1205 break;
1208 if (offset >= 0
1209 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1210 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1211 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1212 / BITS_PER_UNIT;
1213 else
1215 offset = -1;
1216 length = -1;
1219 mem_expr = TREE_OPERAND (mem_expr, 0);
1220 inner = TREE_OPERAND (inner, 0);
1223 if (mem_expr == NULL)
1224 offset = -1;
1225 if (mem_expr != MEM_EXPR (mem))
1227 set_mem_expr (mem, mem_expr);
1228 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1231 set_mem_alias_set (mem, 0);
1232 set_mem_size (mem, NULL_RTX);
1235 return mem;
1238 /* Built-in functions to perform an untyped call and return. */
1240 /* For each register that may be used for calling a function, this
1241 gives a mode used to copy the register's value. VOIDmode indicates
1242 the register is not used for calling a function. If the machine
1243 has register windows, this gives only the outbound registers.
1244 INCOMING_REGNO gives the corresponding inbound register. */
1245 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1247 /* For each register that may be used for returning values, this gives
1248 a mode used to copy the register's value. VOIDmode indicates the
1249 register is not used for returning values. If the machine has
1250 register windows, this gives only the outbound registers.
1251 INCOMING_REGNO gives the corresponding inbound register. */
1252 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1254 /* For each register that may be used for calling a function, this
1255 gives the offset of that register into the block returned by
1256 __builtin_apply_args. 0 indicates that the register is not
1257 used for calling a function. */
1258 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1260 /* Return the size required for the block returned by __builtin_apply_args,
1261 and initialize apply_args_mode. */
1263 static int
1264 apply_args_size (void)
1266 static int size = -1;
1267 int align;
1268 unsigned int regno;
1269 enum machine_mode mode;
1271 /* The values computed by this function never change. */
1272 if (size < 0)
1274 /* The first value is the incoming arg-pointer. */
1275 size = GET_MODE_SIZE (Pmode);
1277 /* The second value is the structure value address unless this is
1278 passed as an "invisible" first argument. */
1279 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1280 size += GET_MODE_SIZE (Pmode);
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 if (FUNCTION_ARG_REGNO_P (regno))
1285 mode = reg_raw_mode[regno];
1287 gcc_assert (mode != VOIDmode);
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 apply_args_reg_offset[regno] = size;
1293 size += GET_MODE_SIZE (mode);
1294 apply_args_mode[regno] = mode;
1296 else
1298 apply_args_mode[regno] = VOIDmode;
1299 apply_args_reg_offset[regno] = 0;
1302 return size;
1305 /* Return the size required for the block returned by __builtin_apply,
1306 and initialize apply_result_mode. */
1308 static int
1309 apply_result_size (void)
1311 static int size = -1;
1312 int align, regno;
1313 enum machine_mode mode;
1315 /* The values computed by this function never change. */
1316 if (size < 0)
1318 size = 0;
1320 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1321 if (FUNCTION_VALUE_REGNO_P (regno))
1323 mode = reg_raw_mode[regno];
1325 gcc_assert (mode != VOIDmode);
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1330 size += GET_MODE_SIZE (mode);
1331 apply_result_mode[regno] = mode;
1333 else
1334 apply_result_mode[regno] = VOIDmode;
1336 /* Allow targets that use untyped_call and untyped_return to override
1337 the size so that machine-specific information can be stored here. */
1338 #ifdef APPLY_RESULT_SIZE
1339 size = APPLY_RESULT_SIZE;
1340 #endif
1342 return size;
1345 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1346 /* Create a vector describing the result block RESULT. If SAVEP is true,
1347 the result block is used to save the values; otherwise it is used to
1348 restore the values. */
1350 static rtx
1351 result_vector (int savep, rtx result)
1353 int regno, size, align, nelts;
1354 enum machine_mode mode;
1355 rtx reg, mem;
1356 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1358 size = nelts = 0;
1359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1360 if ((mode = apply_result_mode[regno]) != VOIDmode)
1362 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1363 if (size % align != 0)
1364 size = CEIL (size, align) * align;
1365 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1366 mem = adjust_address (result, mode, size);
1367 savevec[nelts++] = (savep
1368 ? gen_rtx_SET (VOIDmode, mem, reg)
1369 : gen_rtx_SET (VOIDmode, reg, mem));
1370 size += GET_MODE_SIZE (mode);
1372 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1374 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1376 /* Save the state required to perform an untyped call with the same
1377 arguments as were passed to the current function. */
1379 static rtx
1380 expand_builtin_apply_args_1 (void)
1382 rtx registers, tem;
1383 int size, align, regno;
1384 enum machine_mode mode;
1385 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1387 /* Create a block where the arg-pointer, structure value address,
1388 and argument registers can be saved. */
1389 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1391 /* Walk past the arg-pointer and structure value address. */
1392 size = GET_MODE_SIZE (Pmode);
1393 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1394 size += GET_MODE_SIZE (Pmode);
1396 /* Save each register used in calling a function to the block. */
1397 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1398 if ((mode = apply_args_mode[regno]) != VOIDmode)
1400 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1401 if (size % align != 0)
1402 size = CEIL (size, align) * align;
1404 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1406 emit_move_insn (adjust_address (registers, mode, size), tem);
1407 size += GET_MODE_SIZE (mode);
1410 /* Save the arg pointer to the block. */
1411 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1412 #ifdef STACK_GROWS_DOWNWARD
1413 /* We need the pointer as the caller actually passed them to us, not
1414 as we might have pretended they were passed. Make sure it's a valid
1415 operand, as emit_move_insn isn't expected to handle a PLUS. */
1417 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1418 NULL_RTX);
1419 #endif
1420 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1422 size = GET_MODE_SIZE (Pmode);
1424 /* Save the structure value address unless this is passed as an
1425 "invisible" first argument. */
1426 if (struct_incoming_value)
1428 emit_move_insn (adjust_address (registers, Pmode, size),
1429 copy_to_reg (struct_incoming_value));
1430 size += GET_MODE_SIZE (Pmode);
1433 /* Return the address of the block. */
1434 return copy_addr_to_reg (XEXP (registers, 0));
1437 /* __builtin_apply_args returns block of memory allocated on
1438 the stack into which is stored the arg pointer, structure
1439 value address, static chain, and all the registers that might
1440 possibly be used in performing a function call. The code is
1441 moved to the start of the function so the incoming values are
1442 saved. */
1444 static rtx
1445 expand_builtin_apply_args (void)
1447 /* Don't do __builtin_apply_args more than once in a function.
1448 Save the result of the first call and reuse it. */
1449 if (apply_args_value != 0)
1450 return apply_args_value;
1452 /* When this function is called, it means that registers must be
1453 saved on entry to this function. So we migrate the
1454 call to the first insn of this function. */
1455 rtx temp;
1456 rtx seq;
1458 start_sequence ();
1459 temp = expand_builtin_apply_args_1 ();
1460 seq = get_insns ();
1461 end_sequence ();
1463 apply_args_value = temp;
1465 /* Put the insns after the NOTE that starts the function.
1466 If this is inside a start_sequence, make the outer-level insn
1467 chain current, so the code is placed at the start of the
1468 function. If internal_arg_pointer is a non-virtual pseudo,
1469 it needs to be placed after the function that initializes
1470 that pseudo. */
1471 push_topmost_sequence ();
1472 if (REG_P (crtl->args.internal_arg_pointer)
1473 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1474 emit_insn_before (seq, parm_birth_insn);
1475 else
1476 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1477 pop_topmost_sequence ();
1478 return temp;
1482 /* Perform an untyped call and save the state required to perform an
1483 untyped return of whatever value was returned by the given function. */
1485 static rtx
1486 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1488 int size, align, regno;
1489 enum machine_mode mode;
1490 rtx incoming_args, result, reg, dest, src, call_insn;
1491 rtx old_stack_level = 0;
1492 rtx call_fusage = 0;
1493 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1495 arguments = convert_memory_address (Pmode, arguments);
1497 /* Create a block where the return registers can be saved. */
1498 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1500 /* Fetch the arg pointer from the ARGUMENTS block. */
1501 incoming_args = gen_reg_rtx (Pmode);
1502 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1503 #ifndef STACK_GROWS_DOWNWARD
1504 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1505 incoming_args, 0, OPTAB_LIB_WIDEN);
1506 #endif
1508 /* Push a new argument block and copy the arguments. Do not allow
1509 the (potential) memcpy call below to interfere with our stack
1510 manipulations. */
1511 do_pending_stack_adjust ();
1512 NO_DEFER_POP;
1514 /* Save the stack with nonlocal if available. */
1515 #ifdef HAVE_save_stack_nonlocal
1516 if (HAVE_save_stack_nonlocal)
1517 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1518 else
1519 #endif
1520 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1522 /* Allocate a block of memory onto the stack and copy the memory
1523 arguments to the outgoing arguments address. */
1524 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1526 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1527 may have already set current_function_calls_alloca to true.
1528 current_function_calls_alloca won't be set if argsize is zero,
1529 so we have to guarantee need_drap is true here. */
1530 if (SUPPORTS_STACK_ALIGNMENT)
1531 crtl->need_drap = true;
1533 dest = virtual_outgoing_args_rtx;
1534 #ifndef STACK_GROWS_DOWNWARD
1535 if (CONST_INT_P (argsize))
1536 dest = plus_constant (dest, -INTVAL (argsize));
1537 else
1538 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1539 #endif
1540 dest = gen_rtx_MEM (BLKmode, dest);
1541 set_mem_align (dest, PARM_BOUNDARY);
1542 src = gen_rtx_MEM (BLKmode, incoming_args);
1543 set_mem_align (src, PARM_BOUNDARY);
1544 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1546 /* Refer to the argument block. */
1547 apply_args_size ();
1548 arguments = gen_rtx_MEM (BLKmode, arguments);
1549 set_mem_align (arguments, PARM_BOUNDARY);
1551 /* Walk past the arg-pointer and structure value address. */
1552 size = GET_MODE_SIZE (Pmode);
1553 if (struct_value)
1554 size += GET_MODE_SIZE (Pmode);
1556 /* Restore each of the registers previously saved. Make USE insns
1557 for each of these registers for use in making the call. */
1558 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1559 if ((mode = apply_args_mode[regno]) != VOIDmode)
1561 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1562 if (size % align != 0)
1563 size = CEIL (size, align) * align;
1564 reg = gen_rtx_REG (mode, regno);
1565 emit_move_insn (reg, adjust_address (arguments, mode, size));
1566 use_reg (&call_fusage, reg);
1567 size += GET_MODE_SIZE (mode);
1570 /* Restore the structure value address unless this is passed as an
1571 "invisible" first argument. */
1572 size = GET_MODE_SIZE (Pmode);
1573 if (struct_value)
1575 rtx value = gen_reg_rtx (Pmode);
1576 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1577 emit_move_insn (struct_value, value);
1578 if (REG_P (struct_value))
1579 use_reg (&call_fusage, struct_value);
1580 size += GET_MODE_SIZE (Pmode);
1583 /* All arguments and registers used for the call are set up by now! */
1584 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1586 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1587 and we don't want to load it into a register as an optimization,
1588 because prepare_call_address already did it if it should be done. */
1589 if (GET_CODE (function) != SYMBOL_REF)
1590 function = memory_address (FUNCTION_MODE, function);
1592 /* Generate the actual call instruction and save the return value. */
1593 #ifdef HAVE_untyped_call
1594 if (HAVE_untyped_call)
1595 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1596 result, result_vector (1, result)));
1597 else
1598 #endif
1599 #ifdef HAVE_call_value
1600 if (HAVE_call_value)
1602 rtx valreg = 0;
1604 /* Locate the unique return register. It is not possible to
1605 express a call that sets more than one return register using
1606 call_value; use untyped_call for that. In fact, untyped_call
1607 only needs to save the return registers in the given block. */
1608 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1609 if ((mode = apply_result_mode[regno]) != VOIDmode)
1611 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1613 valreg = gen_rtx_REG (mode, regno);
1616 emit_call_insn (GEN_CALL_VALUE (valreg,
1617 gen_rtx_MEM (FUNCTION_MODE, function),
1618 const0_rtx, NULL_RTX, const0_rtx));
1620 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1622 else
1623 #endif
1624 gcc_unreachable ();
1626 /* Find the CALL insn we just emitted, and attach the register usage
1627 information. */
1628 call_insn = last_call_insn ();
1629 add_function_usage_to (call_insn, call_fusage);
1631 /* Restore the stack. */
1632 #ifdef HAVE_save_stack_nonlocal
1633 if (HAVE_save_stack_nonlocal)
1634 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1635 else
1636 #endif
1637 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1639 OK_DEFER_POP;
1641 /* Return the address of the result block. */
1642 result = copy_addr_to_reg (XEXP (result, 0));
1643 return convert_memory_address (ptr_mode, result);
1646 /* Perform an untyped return. */
1648 static void
1649 expand_builtin_return (rtx result)
1651 int size, align, regno;
1652 enum machine_mode mode;
1653 rtx reg;
1654 rtx call_fusage = 0;
1656 result = convert_memory_address (Pmode, result);
1658 apply_result_size ();
1659 result = gen_rtx_MEM (BLKmode, result);
1661 #ifdef HAVE_untyped_return
1662 if (HAVE_untyped_return)
1664 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1665 emit_barrier ();
1666 return;
1668 #endif
1670 /* Restore the return value and note that each value is used. */
1671 size = 0;
1672 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1673 if ((mode = apply_result_mode[regno]) != VOIDmode)
1675 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1676 if (size % align != 0)
1677 size = CEIL (size, align) * align;
1678 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1679 emit_move_insn (reg, adjust_address (result, mode, size));
1681 push_to_sequence (call_fusage);
1682 emit_use (reg);
1683 call_fusage = get_insns ();
1684 end_sequence ();
1685 size += GET_MODE_SIZE (mode);
1688 /* Put the USE insns before the return. */
1689 emit_insn (call_fusage);
1691 /* Return whatever values was restored by jumping directly to the end
1692 of the function. */
1693 expand_naked_return ();
1696 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1698 static enum type_class
1699 type_to_class (tree type)
1701 switch (TREE_CODE (type))
1703 case VOID_TYPE: return void_type_class;
1704 case INTEGER_TYPE: return integer_type_class;
1705 case ENUMERAL_TYPE: return enumeral_type_class;
1706 case BOOLEAN_TYPE: return boolean_type_class;
1707 case POINTER_TYPE: return pointer_type_class;
1708 case REFERENCE_TYPE: return reference_type_class;
1709 case OFFSET_TYPE: return offset_type_class;
1710 case REAL_TYPE: return real_type_class;
1711 case COMPLEX_TYPE: return complex_type_class;
1712 case FUNCTION_TYPE: return function_type_class;
1713 case METHOD_TYPE: return method_type_class;
1714 case RECORD_TYPE: return record_type_class;
1715 case UNION_TYPE:
1716 case QUAL_UNION_TYPE: return union_type_class;
1717 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1718 ? string_type_class : array_type_class);
1719 case LANG_TYPE: return lang_type_class;
1720 default: return no_type_class;
1724 /* Expand a call EXP to __builtin_classify_type. */
1726 static rtx
1727 expand_builtin_classify_type (tree exp)
1729 if (call_expr_nargs (exp))
1730 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1731 return GEN_INT (no_type_class);
1734 /* This helper macro, meant to be used in mathfn_built_in below,
1735 determines which among a set of three builtin math functions is
1736 appropriate for a given type mode. The `F' and `L' cases are
1737 automatically generated from the `double' case. */
1738 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1739 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1740 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1741 fcodel = BUILT_IN_MATHFN##L ; break;
1742 /* Similar to above, but appends _R after any F/L suffix. */
1743 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1744 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1745 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1746 fcodel = BUILT_IN_MATHFN##L_R ; break;
1748 /* Return mathematic function equivalent to FN but operating directly
1749 on TYPE, if available. If IMPLICIT is true find the function in
1750 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1751 can't do the conversion, return zero. */
1753 static tree
1754 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1756 tree const *const fn_arr
1757 = implicit ? implicit_built_in_decls : built_in_decls;
1758 enum built_in_function fcode, fcodef, fcodel;
1760 switch (fn)
1762 CASE_MATHFN (BUILT_IN_ACOS)
1763 CASE_MATHFN (BUILT_IN_ACOSH)
1764 CASE_MATHFN (BUILT_IN_ASIN)
1765 CASE_MATHFN (BUILT_IN_ASINH)
1766 CASE_MATHFN (BUILT_IN_ATAN)
1767 CASE_MATHFN (BUILT_IN_ATAN2)
1768 CASE_MATHFN (BUILT_IN_ATANH)
1769 CASE_MATHFN (BUILT_IN_CBRT)
1770 CASE_MATHFN (BUILT_IN_CEIL)
1771 CASE_MATHFN (BUILT_IN_CEXPI)
1772 CASE_MATHFN (BUILT_IN_COPYSIGN)
1773 CASE_MATHFN (BUILT_IN_COS)
1774 CASE_MATHFN (BUILT_IN_COSH)
1775 CASE_MATHFN (BUILT_IN_DREM)
1776 CASE_MATHFN (BUILT_IN_ERF)
1777 CASE_MATHFN (BUILT_IN_ERFC)
1778 CASE_MATHFN (BUILT_IN_EXP)
1779 CASE_MATHFN (BUILT_IN_EXP10)
1780 CASE_MATHFN (BUILT_IN_EXP2)
1781 CASE_MATHFN (BUILT_IN_EXPM1)
1782 CASE_MATHFN (BUILT_IN_FABS)
1783 CASE_MATHFN (BUILT_IN_FDIM)
1784 CASE_MATHFN (BUILT_IN_FLOOR)
1785 CASE_MATHFN (BUILT_IN_FMA)
1786 CASE_MATHFN (BUILT_IN_FMAX)
1787 CASE_MATHFN (BUILT_IN_FMIN)
1788 CASE_MATHFN (BUILT_IN_FMOD)
1789 CASE_MATHFN (BUILT_IN_FREXP)
1790 CASE_MATHFN (BUILT_IN_GAMMA)
1791 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1792 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1793 CASE_MATHFN (BUILT_IN_HYPOT)
1794 CASE_MATHFN (BUILT_IN_ILOGB)
1795 CASE_MATHFN (BUILT_IN_INF)
1796 CASE_MATHFN (BUILT_IN_ISINF)
1797 CASE_MATHFN (BUILT_IN_J0)
1798 CASE_MATHFN (BUILT_IN_J1)
1799 CASE_MATHFN (BUILT_IN_JN)
1800 CASE_MATHFN (BUILT_IN_LCEIL)
1801 CASE_MATHFN (BUILT_IN_LDEXP)
1802 CASE_MATHFN (BUILT_IN_LFLOOR)
1803 CASE_MATHFN (BUILT_IN_LGAMMA)
1804 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1805 CASE_MATHFN (BUILT_IN_LLCEIL)
1806 CASE_MATHFN (BUILT_IN_LLFLOOR)
1807 CASE_MATHFN (BUILT_IN_LLRINT)
1808 CASE_MATHFN (BUILT_IN_LLROUND)
1809 CASE_MATHFN (BUILT_IN_LOG)
1810 CASE_MATHFN (BUILT_IN_LOG10)
1811 CASE_MATHFN (BUILT_IN_LOG1P)
1812 CASE_MATHFN (BUILT_IN_LOG2)
1813 CASE_MATHFN (BUILT_IN_LOGB)
1814 CASE_MATHFN (BUILT_IN_LRINT)
1815 CASE_MATHFN (BUILT_IN_LROUND)
1816 CASE_MATHFN (BUILT_IN_MODF)
1817 CASE_MATHFN (BUILT_IN_NAN)
1818 CASE_MATHFN (BUILT_IN_NANS)
1819 CASE_MATHFN (BUILT_IN_NEARBYINT)
1820 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1821 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1822 CASE_MATHFN (BUILT_IN_POW)
1823 CASE_MATHFN (BUILT_IN_POWI)
1824 CASE_MATHFN (BUILT_IN_POW10)
1825 CASE_MATHFN (BUILT_IN_REMAINDER)
1826 CASE_MATHFN (BUILT_IN_REMQUO)
1827 CASE_MATHFN (BUILT_IN_RINT)
1828 CASE_MATHFN (BUILT_IN_ROUND)
1829 CASE_MATHFN (BUILT_IN_SCALB)
1830 CASE_MATHFN (BUILT_IN_SCALBLN)
1831 CASE_MATHFN (BUILT_IN_SCALBN)
1832 CASE_MATHFN (BUILT_IN_SIGNBIT)
1833 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1834 CASE_MATHFN (BUILT_IN_SIN)
1835 CASE_MATHFN (BUILT_IN_SINCOS)
1836 CASE_MATHFN (BUILT_IN_SINH)
1837 CASE_MATHFN (BUILT_IN_SQRT)
1838 CASE_MATHFN (BUILT_IN_TAN)
1839 CASE_MATHFN (BUILT_IN_TANH)
1840 CASE_MATHFN (BUILT_IN_TGAMMA)
1841 CASE_MATHFN (BUILT_IN_TRUNC)
1842 CASE_MATHFN (BUILT_IN_Y0)
1843 CASE_MATHFN (BUILT_IN_Y1)
1844 CASE_MATHFN (BUILT_IN_YN)
1846 default:
1847 return NULL_TREE;
1850 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1851 return fn_arr[fcode];
1852 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1853 return fn_arr[fcodef];
1854 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1855 return fn_arr[fcodel];
1856 else
1857 return NULL_TREE;
1860 /* Like mathfn_built_in_1(), but always use the implicit array. */
1862 tree
1863 mathfn_built_in (tree type, enum built_in_function fn)
1865 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1868 /* If errno must be maintained, expand the RTL to check if the result,
1869 TARGET, of a built-in function call, EXP, is NaN, and if so set
1870 errno to EDOM. */
1872 static void
1873 expand_errno_check (tree exp, rtx target)
1875 rtx lab = gen_label_rtx ();
1877 /* Test the result; if it is NaN, set errno=EDOM because
1878 the argument was not in the domain. */
1879 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1880 NULL_RTX, NULL_RTX, lab);
1882 #ifdef TARGET_EDOM
1883 /* If this built-in doesn't throw an exception, set errno directly. */
1884 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1886 #ifdef GEN_ERRNO_RTX
1887 rtx errno_rtx = GEN_ERRNO_RTX;
1888 #else
1889 rtx errno_rtx
1890 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1891 #endif
1892 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1893 emit_label (lab);
1894 return;
1896 #endif
1898 /* Make sure the library call isn't expanded as a tail call. */
1899 CALL_EXPR_TAILCALL (exp) = 0;
1901 /* We can't set errno=EDOM directly; let the library call do it.
1902 Pop the arguments right away in case the call gets deleted. */
1903 NO_DEFER_POP;
1904 expand_call (exp, target, 0);
1905 OK_DEFER_POP;
1906 emit_label (lab);
1909 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1910 Return NULL_RTX if a normal call should be emitted rather than expanding
1911 the function in-line. EXP is the expression that is a call to the builtin
1912 function; if convenient, the result should be placed in TARGET.
1913 SUBTARGET may be used as the target for computing one of EXP's operands. */
1915 static rtx
1916 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1918 optab builtin_optab;
1919 rtx op0, insns, before_call;
1920 tree fndecl = get_callee_fndecl (exp);
1921 enum machine_mode mode;
1922 bool errno_set = false;
1923 tree arg;
1925 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1926 return NULL_RTX;
1928 arg = CALL_EXPR_ARG (exp, 0);
1930 switch (DECL_FUNCTION_CODE (fndecl))
1932 CASE_FLT_FN (BUILT_IN_SQRT):
1933 errno_set = ! tree_expr_nonnegative_p (arg);
1934 builtin_optab = sqrt_optab;
1935 break;
1936 CASE_FLT_FN (BUILT_IN_EXP):
1937 errno_set = true; builtin_optab = exp_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXP10):
1939 CASE_FLT_FN (BUILT_IN_POW10):
1940 errno_set = true; builtin_optab = exp10_optab; break;
1941 CASE_FLT_FN (BUILT_IN_EXP2):
1942 errno_set = true; builtin_optab = exp2_optab; break;
1943 CASE_FLT_FN (BUILT_IN_EXPM1):
1944 errno_set = true; builtin_optab = expm1_optab; break;
1945 CASE_FLT_FN (BUILT_IN_LOGB):
1946 errno_set = true; builtin_optab = logb_optab; break;
1947 CASE_FLT_FN (BUILT_IN_LOG):
1948 errno_set = true; builtin_optab = log_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOG10):
1950 errno_set = true; builtin_optab = log10_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG2):
1952 errno_set = true; builtin_optab = log2_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG1P):
1954 errno_set = true; builtin_optab = log1p_optab; break;
1955 CASE_FLT_FN (BUILT_IN_ASIN):
1956 builtin_optab = asin_optab; break;
1957 CASE_FLT_FN (BUILT_IN_ACOS):
1958 builtin_optab = acos_optab; break;
1959 CASE_FLT_FN (BUILT_IN_TAN):
1960 builtin_optab = tan_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ATAN):
1962 builtin_optab = atan_optab; break;
1963 CASE_FLT_FN (BUILT_IN_FLOOR):
1964 builtin_optab = floor_optab; break;
1965 CASE_FLT_FN (BUILT_IN_CEIL):
1966 builtin_optab = ceil_optab; break;
1967 CASE_FLT_FN (BUILT_IN_TRUNC):
1968 builtin_optab = btrunc_optab; break;
1969 CASE_FLT_FN (BUILT_IN_ROUND):
1970 builtin_optab = round_optab; break;
1971 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1972 builtin_optab = nearbyint_optab;
1973 if (flag_trapping_math)
1974 break;
1975 /* Else fallthrough and expand as rint. */
1976 CASE_FLT_FN (BUILT_IN_RINT):
1977 builtin_optab = rint_optab; break;
1978 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1979 builtin_optab = significand_optab; break;
1980 default:
1981 gcc_unreachable ();
1984 /* Make a suitable register to place result in. */
1985 mode = TYPE_MODE (TREE_TYPE (exp));
1987 if (! flag_errno_math || ! HONOR_NANS (mode))
1988 errno_set = false;
1990 /* Before working hard, check whether the instruction is available. */
1991 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1993 target = gen_reg_rtx (mode);
1995 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1996 need to expand the argument again. This way, we will not perform
1997 side-effects more the once. */
1998 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2000 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2002 start_sequence ();
2004 /* Compute into TARGET.
2005 Set TARGET to wherever the result comes back. */
2006 target = expand_unop (mode, builtin_optab, op0, target, 0);
2008 if (target != 0)
2010 if (errno_set)
2011 expand_errno_check (exp, target);
2013 /* Output the entire sequence. */
2014 insns = get_insns ();
2015 end_sequence ();
2016 emit_insn (insns);
2017 return target;
2020 /* If we were unable to expand via the builtin, stop the sequence
2021 (without outputting the insns) and call to the library function
2022 with the stabilized argument list. */
2023 end_sequence ();
2026 before_call = get_last_insn ();
2028 return expand_call (exp, target, target == const0_rtx);
2031 /* Expand a call to the builtin binary math functions (pow and atan2).
2032 Return NULL_RTX if a normal call should be emitted rather than expanding the
2033 function in-line. EXP is the expression that is a call to the builtin
2034 function; if convenient, the result should be placed in TARGET.
2035 SUBTARGET may be used as the target for computing one of EXP's
2036 operands. */
2038 static rtx
2039 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2041 optab builtin_optab;
2042 rtx op0, op1, insns;
2043 int op1_type = REAL_TYPE;
2044 tree fndecl = get_callee_fndecl (exp);
2045 tree arg0, arg1;
2046 enum machine_mode mode;
2047 bool errno_set = true;
2049 switch (DECL_FUNCTION_CODE (fndecl))
2051 CASE_FLT_FN (BUILT_IN_SCALBN):
2052 CASE_FLT_FN (BUILT_IN_SCALBLN):
2053 CASE_FLT_FN (BUILT_IN_LDEXP):
2054 op1_type = INTEGER_TYPE;
2055 default:
2056 break;
2059 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2060 return NULL_RTX;
2062 arg0 = CALL_EXPR_ARG (exp, 0);
2063 arg1 = CALL_EXPR_ARG (exp, 1);
2065 switch (DECL_FUNCTION_CODE (fndecl))
2067 CASE_FLT_FN (BUILT_IN_POW):
2068 builtin_optab = pow_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ATAN2):
2070 builtin_optab = atan2_optab; break;
2071 CASE_FLT_FN (BUILT_IN_SCALB):
2072 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2073 return 0;
2074 builtin_optab = scalb_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALBN):
2076 CASE_FLT_FN (BUILT_IN_SCALBLN):
2077 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2078 return 0;
2079 /* Fall through... */
2080 CASE_FLT_FN (BUILT_IN_LDEXP):
2081 builtin_optab = ldexp_optab; break;
2082 CASE_FLT_FN (BUILT_IN_FMOD):
2083 builtin_optab = fmod_optab; break;
2084 CASE_FLT_FN (BUILT_IN_REMAINDER):
2085 CASE_FLT_FN (BUILT_IN_DREM):
2086 builtin_optab = remainder_optab; break;
2087 default:
2088 gcc_unreachable ();
2091 /* Make a suitable register to place result in. */
2092 mode = TYPE_MODE (TREE_TYPE (exp));
2094 /* Before working hard, check whether the instruction is available. */
2095 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2096 return NULL_RTX;
2098 target = gen_reg_rtx (mode);
2100 if (! flag_errno_math || ! HONOR_NANS (mode))
2101 errno_set = false;
2103 /* Always stabilize the argument list. */
2104 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2105 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2107 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2108 op1 = expand_normal (arg1);
2110 start_sequence ();
2112 /* Compute into TARGET.
2113 Set TARGET to wherever the result comes back. */
2114 target = expand_binop (mode, builtin_optab, op0, op1,
2115 target, 0, OPTAB_DIRECT);
2117 /* If we were unable to expand via the builtin, stop the sequence
2118 (without outputting the insns) and call to the library function
2119 with the stabilized argument list. */
2120 if (target == 0)
2122 end_sequence ();
2123 return expand_call (exp, target, target == const0_rtx);
2126 if (errno_set)
2127 expand_errno_check (exp, target);
2129 /* Output the entire sequence. */
2130 insns = get_insns ();
2131 end_sequence ();
2132 emit_insn (insns);
2134 return target;
2137 /* Expand a call to the builtin sin and cos math functions.
2138 Return NULL_RTX if a normal call should be emitted rather than expanding the
2139 function in-line. EXP is the expression that is a call to the builtin
2140 function; if convenient, the result should be placed in TARGET.
2141 SUBTARGET may be used as the target for computing one of EXP's
2142 operands. */
2144 static rtx
2145 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2147 optab builtin_optab;
2148 rtx op0, insns;
2149 tree fndecl = get_callee_fndecl (exp);
2150 enum machine_mode mode;
2151 tree arg;
2153 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2154 return NULL_RTX;
2156 arg = CALL_EXPR_ARG (exp, 0);
2158 switch (DECL_FUNCTION_CODE (fndecl))
2160 CASE_FLT_FN (BUILT_IN_SIN):
2161 CASE_FLT_FN (BUILT_IN_COS):
2162 builtin_optab = sincos_optab; break;
2163 default:
2164 gcc_unreachable ();
2167 /* Make a suitable register to place result in. */
2168 mode = TYPE_MODE (TREE_TYPE (exp));
2170 /* Check if sincos insn is available, otherwise fallback
2171 to sin or cos insn. */
2172 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2173 switch (DECL_FUNCTION_CODE (fndecl))
2175 CASE_FLT_FN (BUILT_IN_SIN):
2176 builtin_optab = sin_optab; break;
2177 CASE_FLT_FN (BUILT_IN_COS):
2178 builtin_optab = cos_optab; break;
2179 default:
2180 gcc_unreachable ();
2183 /* Before working hard, check whether the instruction is available. */
2184 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2186 target = gen_reg_rtx (mode);
2188 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2189 need to expand the argument again. This way, we will not perform
2190 side-effects more the once. */
2191 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2193 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2195 start_sequence ();
2197 /* Compute into TARGET.
2198 Set TARGET to wherever the result comes back. */
2199 if (builtin_optab == sincos_optab)
2201 int result;
2203 switch (DECL_FUNCTION_CODE (fndecl))
2205 CASE_FLT_FN (BUILT_IN_SIN):
2206 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2207 break;
2208 CASE_FLT_FN (BUILT_IN_COS):
2209 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2210 break;
2211 default:
2212 gcc_unreachable ();
2214 gcc_assert (result);
2216 else
2218 target = expand_unop (mode, builtin_optab, op0, target, 0);
2221 if (target != 0)
2223 /* Output the entire sequence. */
2224 insns = get_insns ();
2225 end_sequence ();
2226 emit_insn (insns);
2227 return target;
2230 /* If we were unable to expand via the builtin, stop the sequence
2231 (without outputting the insns) and call to the library function
2232 with the stabilized argument list. */
2233 end_sequence ();
2236 target = expand_call (exp, target, target == const0_rtx);
2238 return target;
2241 /* Expand a call to one of the builtin math functions that operate on
2242 floating point argument and output an integer result (ilogb, isinf,
2243 isnan, etc).
2244 Return 0 if a normal call should be emitted rather than expanding the
2245 function in-line. EXP is the expression that is a call to the builtin
2246 function; if convenient, the result should be placed in TARGET.
2247 SUBTARGET may be used as the target for computing one of EXP's operands. */
2249 static rtx
2250 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2252 optab builtin_optab = 0;
2253 enum insn_code icode = CODE_FOR_nothing;
2254 rtx op0;
2255 tree fndecl = get_callee_fndecl (exp);
2256 enum machine_mode mode;
2257 bool errno_set = false;
2258 tree arg;
2260 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2261 return NULL_RTX;
2263 arg = CALL_EXPR_ARG (exp, 0);
2265 switch (DECL_FUNCTION_CODE (fndecl))
2267 CASE_FLT_FN (BUILT_IN_ILOGB):
2268 errno_set = true; builtin_optab = ilogb_optab; break;
2269 CASE_FLT_FN (BUILT_IN_ISINF):
2270 builtin_optab = isinf_optab; break;
2271 case BUILT_IN_ISNORMAL:
2272 case BUILT_IN_ISFINITE:
2273 CASE_FLT_FN (BUILT_IN_FINITE):
2274 /* These builtins have no optabs (yet). */
2275 break;
2276 default:
2277 gcc_unreachable ();
2280 /* There's no easy way to detect the case we need to set EDOM. */
2281 if (flag_errno_math && errno_set)
2282 return NULL_RTX;
2284 /* Optab mode depends on the mode of the input argument. */
2285 mode = TYPE_MODE (TREE_TYPE (arg));
2287 if (builtin_optab)
2288 icode = optab_handler (builtin_optab, mode)->insn_code;
2290 /* Before working hard, check whether the instruction is available. */
2291 if (icode != CODE_FOR_nothing)
2293 /* Make a suitable register to place result in. */
2294 if (!target
2295 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2296 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2298 gcc_assert (insn_data[icode].operand[0].predicate
2299 (target, GET_MODE (target)));
2301 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2302 need to expand the argument again. This way, we will not perform
2303 side-effects more the once. */
2304 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2306 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2308 if (mode != GET_MODE (op0))
2309 op0 = convert_to_mode (mode, op0, 0);
2311 /* Compute into TARGET.
2312 Set TARGET to wherever the result comes back. */
2313 emit_unop_insn (icode, target, op0, UNKNOWN);
2314 return target;
2317 /* If there is no optab, try generic code. */
2318 switch (DECL_FUNCTION_CODE (fndecl))
2320 tree result;
2322 CASE_FLT_FN (BUILT_IN_ISINF):
2324 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2325 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2326 tree const type = TREE_TYPE (arg);
2327 REAL_VALUE_TYPE r;
2328 char buf[128];
2330 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2331 real_from_string (&r, buf);
2332 result = build_call_expr (isgr_fn, 2,
2333 fold_build1 (ABS_EXPR, type, arg),
2334 build_real (type, r));
2335 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2337 CASE_FLT_FN (BUILT_IN_FINITE):
2338 case BUILT_IN_ISFINITE:
2340 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2341 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2342 tree const type = TREE_TYPE (arg);
2343 REAL_VALUE_TYPE r;
2344 char buf[128];
2346 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2347 real_from_string (&r, buf);
2348 result = build_call_expr (isle_fn, 2,
2349 fold_build1 (ABS_EXPR, type, arg),
2350 build_real (type, r));
2351 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2353 case BUILT_IN_ISNORMAL:
2355 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2356 islessequal(fabs(x),DBL_MAX). */
2357 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2358 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2359 tree const type = TREE_TYPE (arg);
2360 REAL_VALUE_TYPE rmax, rmin;
2361 char buf[128];
2363 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2364 real_from_string (&rmax, buf);
2365 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2366 real_from_string (&rmin, buf);
2367 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2368 result = build_call_expr (isle_fn, 2, arg,
2369 build_real (type, rmax));
2370 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2371 build_call_expr (isge_fn, 2, arg,
2372 build_real (type, rmin)));
2373 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2375 default:
2376 break;
2379 target = expand_call (exp, target, target == const0_rtx);
2381 return target;
2384 /* Expand a call to the builtin sincos math function.
2385 Return NULL_RTX if a normal call should be emitted rather than expanding the
2386 function in-line. EXP is the expression that is a call to the builtin
2387 function. */
2389 static rtx
2390 expand_builtin_sincos (tree exp)
2392 rtx op0, op1, op2, target1, target2;
2393 enum machine_mode mode;
2394 tree arg, sinp, cosp;
2395 int result;
2397 if (!validate_arglist (exp, REAL_TYPE,
2398 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2399 return NULL_RTX;
2401 arg = CALL_EXPR_ARG (exp, 0);
2402 sinp = CALL_EXPR_ARG (exp, 1);
2403 cosp = CALL_EXPR_ARG (exp, 2);
2405 /* Make a suitable register to place result in. */
2406 mode = TYPE_MODE (TREE_TYPE (arg));
2408 /* Check if sincos insn is available, otherwise emit the call. */
2409 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2410 return NULL_RTX;
2412 target1 = gen_reg_rtx (mode);
2413 target2 = gen_reg_rtx (mode);
2415 op0 = expand_normal (arg);
2416 op1 = expand_normal (build_fold_indirect_ref (sinp));
2417 op2 = expand_normal (build_fold_indirect_ref (cosp));
2419 /* Compute into target1 and target2.
2420 Set TARGET to wherever the result comes back. */
2421 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2422 gcc_assert (result);
2424 /* Move target1 and target2 to the memory locations indicated
2425 by op1 and op2. */
2426 emit_move_insn (op1, target1);
2427 emit_move_insn (op2, target2);
2429 return const0_rtx;
2432 /* Expand a call to the internal cexpi builtin to the sincos math function.
2433 EXP is the expression that is a call to the builtin function; if convenient,
2434 the result should be placed in TARGET. SUBTARGET may be used as the target
2435 for computing one of EXP's operands. */
2437 static rtx
2438 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2440 tree fndecl = get_callee_fndecl (exp);
2441 tree arg, type;
2442 enum machine_mode mode;
2443 rtx op0, op1, op2;
2445 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2446 return NULL_RTX;
2448 arg = CALL_EXPR_ARG (exp, 0);
2449 type = TREE_TYPE (arg);
2450 mode = TYPE_MODE (TREE_TYPE (arg));
2452 /* Try expanding via a sincos optab, fall back to emitting a libcall
2453 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2454 is only generated from sincos, cexp or if we have either of them. */
2455 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2457 op1 = gen_reg_rtx (mode);
2458 op2 = gen_reg_rtx (mode);
2460 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2462 /* Compute into op1 and op2. */
2463 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2465 else if (TARGET_HAS_SINCOS)
2467 tree call, fn = NULL_TREE;
2468 tree top1, top2;
2469 rtx op1a, op2a;
2471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2472 fn = built_in_decls[BUILT_IN_SINCOSF];
2473 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2474 fn = built_in_decls[BUILT_IN_SINCOS];
2475 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2476 fn = built_in_decls[BUILT_IN_SINCOSL];
2477 else
2478 gcc_unreachable ();
2480 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2481 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2482 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2483 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2484 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2485 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2487 /* Make sure not to fold the sincos call again. */
2488 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2489 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2490 call, 3, arg, top1, top2));
2492 else
2494 tree call, fn = NULL_TREE, narg;
2495 tree ctype = build_complex_type (type);
2497 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2498 fn = built_in_decls[BUILT_IN_CEXPF];
2499 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2500 fn = built_in_decls[BUILT_IN_CEXP];
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2502 fn = built_in_decls[BUILT_IN_CEXPL];
2503 else
2504 gcc_unreachable ();
2506 /* If we don't have a decl for cexp create one. This is the
2507 friendliest fallback if the user calls __builtin_cexpi
2508 without full target C99 function support. */
2509 if (fn == NULL_TREE)
2511 tree fntype;
2512 const char *name = NULL;
2514 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2515 name = "cexpf";
2516 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2517 name = "cexp";
2518 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2519 name = "cexpl";
2521 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2522 fn = build_fn_decl (name, fntype);
2525 narg = fold_build2 (COMPLEX_EXPR, ctype,
2526 build_real (type, dconst0), arg);
2528 /* Make sure not to fold the cexp call again. */
2529 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2530 return expand_expr (build_call_nary (ctype, call, 1, narg),
2531 target, VOIDmode, EXPAND_NORMAL);
2534 /* Now build the proper return type. */
2535 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2536 make_tree (TREE_TYPE (arg), op2),
2537 make_tree (TREE_TYPE (arg), op1)),
2538 target, VOIDmode, EXPAND_NORMAL);
2541 /* Expand a call to one of the builtin rounding functions gcc defines
2542 as an extension (lfloor and lceil). As these are gcc extensions we
2543 do not need to worry about setting errno to EDOM.
2544 If expanding via optab fails, lower expression to (int)(floor(x)).
2545 EXP is the expression that is a call to the builtin function;
2546 if convenient, the result should be placed in TARGET. */
2548 static rtx
2549 expand_builtin_int_roundingfn (tree exp, rtx target)
2551 convert_optab builtin_optab;
2552 rtx op0, insns, tmp;
2553 tree fndecl = get_callee_fndecl (exp);
2554 enum built_in_function fallback_fn;
2555 tree fallback_fndecl;
2556 enum machine_mode mode;
2557 tree arg;
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 gcc_unreachable ();
2562 arg = CALL_EXPR_ARG (exp, 0);
2564 switch (DECL_FUNCTION_CODE (fndecl))
2566 CASE_FLT_FN (BUILT_IN_LCEIL):
2567 CASE_FLT_FN (BUILT_IN_LLCEIL):
2568 builtin_optab = lceil_optab;
2569 fallback_fn = BUILT_IN_CEIL;
2570 break;
2572 CASE_FLT_FN (BUILT_IN_LFLOOR):
2573 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2574 builtin_optab = lfloor_optab;
2575 fallback_fn = BUILT_IN_FLOOR;
2576 break;
2578 default:
2579 gcc_unreachable ();
2582 /* Make a suitable register to place result in. */
2583 mode = TYPE_MODE (TREE_TYPE (exp));
2585 target = gen_reg_rtx (mode);
2587 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2588 need to expand the argument again. This way, we will not perform
2589 side-effects more the once. */
2590 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2592 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2594 start_sequence ();
2596 /* Compute into TARGET. */
2597 if (expand_sfix_optab (target, op0, builtin_optab))
2599 /* Output the entire sequence. */
2600 insns = get_insns ();
2601 end_sequence ();
2602 emit_insn (insns);
2603 return target;
2606 /* If we were unable to expand via the builtin, stop the sequence
2607 (without outputting the insns). */
2608 end_sequence ();
2610 /* Fall back to floating point rounding optab. */
2611 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2613 /* For non-C99 targets we may end up without a fallback fndecl here
2614 if the user called __builtin_lfloor directly. In this case emit
2615 a call to the floor/ceil variants nevertheless. This should result
2616 in the best user experience for not full C99 targets. */
2617 if (fallback_fndecl == NULL_TREE)
2619 tree fntype;
2620 const char *name = NULL;
2622 switch (DECL_FUNCTION_CODE (fndecl))
2624 case BUILT_IN_LCEIL:
2625 case BUILT_IN_LLCEIL:
2626 name = "ceil";
2627 break;
2628 case BUILT_IN_LCEILF:
2629 case BUILT_IN_LLCEILF:
2630 name = "ceilf";
2631 break;
2632 case BUILT_IN_LCEILL:
2633 case BUILT_IN_LLCEILL:
2634 name = "ceill";
2635 break;
2636 case BUILT_IN_LFLOOR:
2637 case BUILT_IN_LLFLOOR:
2638 name = "floor";
2639 break;
2640 case BUILT_IN_LFLOORF:
2641 case BUILT_IN_LLFLOORF:
2642 name = "floorf";
2643 break;
2644 case BUILT_IN_LFLOORL:
2645 case BUILT_IN_LLFLOORL:
2646 name = "floorl";
2647 break;
2648 default:
2649 gcc_unreachable ();
2652 fntype = build_function_type_list (TREE_TYPE (arg),
2653 TREE_TYPE (arg), NULL_TREE);
2654 fallback_fndecl = build_fn_decl (name, fntype);
2657 exp = build_call_expr (fallback_fndecl, 1, arg);
2659 tmp = expand_normal (exp);
2661 /* Truncate the result of floating point optab to integer
2662 via expand_fix (). */
2663 target = gen_reg_rtx (mode);
2664 expand_fix (target, tmp, 0);
2666 return target;
2669 /* Expand a call to one of the builtin math functions doing integer
2670 conversion (lrint).
2671 Return 0 if a normal call should be emitted rather than expanding the
2672 function in-line. EXP is the expression that is a call to the builtin
2673 function; if convenient, the result should be placed in TARGET. */
2675 static rtx
2676 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2678 convert_optab builtin_optab;
2679 rtx op0, insns;
2680 tree fndecl = get_callee_fndecl (exp);
2681 tree arg;
2682 enum machine_mode mode;
2684 /* There's no easy way to detect the case we need to set EDOM. */
2685 if (flag_errno_math)
2686 return NULL_RTX;
2688 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2689 gcc_unreachable ();
2691 arg = CALL_EXPR_ARG (exp, 0);
2693 switch (DECL_FUNCTION_CODE (fndecl))
2695 CASE_FLT_FN (BUILT_IN_LRINT):
2696 CASE_FLT_FN (BUILT_IN_LLRINT):
2697 builtin_optab = lrint_optab; break;
2698 CASE_FLT_FN (BUILT_IN_LROUND):
2699 CASE_FLT_FN (BUILT_IN_LLROUND):
2700 builtin_optab = lround_optab; break;
2701 default:
2702 gcc_unreachable ();
2705 /* Make a suitable register to place result in. */
2706 mode = TYPE_MODE (TREE_TYPE (exp));
2708 target = gen_reg_rtx (mode);
2710 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2711 need to expand the argument again. This way, we will not perform
2712 side-effects more the once. */
2713 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2715 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2717 start_sequence ();
2719 if (expand_sfix_optab (target, op0, builtin_optab))
2721 /* Output the entire sequence. */
2722 insns = get_insns ();
2723 end_sequence ();
2724 emit_insn (insns);
2725 return target;
2728 /* If we were unable to expand via the builtin, stop the sequence
2729 (without outputting the insns) and call to the library function
2730 with the stabilized argument list. */
2731 end_sequence ();
2733 target = expand_call (exp, target, target == const0_rtx);
2735 return target;
2738 /* To evaluate powi(x,n), the floating point value x raised to the
2739 constant integer exponent n, we use a hybrid algorithm that
2740 combines the "window method" with look-up tables. For an
2741 introduction to exponentiation algorithms and "addition chains",
2742 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2743 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2744 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2745 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2747 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2748 multiplications to inline before calling the system library's pow
2749 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2750 so this default never requires calling pow, powf or powl. */
2752 #ifndef POWI_MAX_MULTS
2753 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2754 #endif
2756 /* The size of the "optimal power tree" lookup table. All
2757 exponents less than this value are simply looked up in the
2758 powi_table below. This threshold is also used to size the
2759 cache of pseudo registers that hold intermediate results. */
2760 #define POWI_TABLE_SIZE 256
2762 /* The size, in bits of the window, used in the "window method"
2763 exponentiation algorithm. This is equivalent to a radix of
2764 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2765 #define POWI_WINDOW_SIZE 3
2767 /* The following table is an efficient representation of an
2768 "optimal power tree". For each value, i, the corresponding
2769 value, j, in the table states than an optimal evaluation
2770 sequence for calculating pow(x,i) can be found by evaluating
2771 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2772 100 integers is given in Knuth's "Seminumerical algorithms". */
2774 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2776 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2777 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2778 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2779 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2780 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2781 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2782 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2783 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2784 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2785 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2786 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2787 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2788 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2789 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2790 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2791 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2792 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2793 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2794 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2795 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2796 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2797 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2798 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2799 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2800 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2801 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2802 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2803 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2804 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2805 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2806 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2807 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2811 /* Return the number of multiplications required to calculate
2812 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2813 subroutine of powi_cost. CACHE is an array indicating
2814 which exponents have already been calculated. */
2816 static int
2817 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2819 /* If we've already calculated this exponent, then this evaluation
2820 doesn't require any additional multiplications. */
2821 if (cache[n])
2822 return 0;
2824 cache[n] = true;
2825 return powi_lookup_cost (n - powi_table[n], cache)
2826 + powi_lookup_cost (powi_table[n], cache) + 1;
2829 /* Return the number of multiplications required to calculate
2830 powi(x,n) for an arbitrary x, given the exponent N. This
2831 function needs to be kept in sync with expand_powi below. */
2833 static int
2834 powi_cost (HOST_WIDE_INT n)
2836 bool cache[POWI_TABLE_SIZE];
2837 unsigned HOST_WIDE_INT digit;
2838 unsigned HOST_WIDE_INT val;
2839 int result;
2841 if (n == 0)
2842 return 0;
2844 /* Ignore the reciprocal when calculating the cost. */
2845 val = (n < 0) ? -n : n;
2847 /* Initialize the exponent cache. */
2848 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2849 cache[1] = true;
2851 result = 0;
2853 while (val >= POWI_TABLE_SIZE)
2855 if (val & 1)
2857 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2858 result += powi_lookup_cost (digit, cache)
2859 + POWI_WINDOW_SIZE + 1;
2860 val >>= POWI_WINDOW_SIZE;
2862 else
2864 val >>= 1;
2865 result++;
2869 return result + powi_lookup_cost (val, cache);
2872 /* Recursive subroutine of expand_powi. This function takes the array,
2873 CACHE, of already calculated exponents and an exponent N and returns
2874 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2876 static rtx
2877 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2879 unsigned HOST_WIDE_INT digit;
2880 rtx target, result;
2881 rtx op0, op1;
2883 if (n < POWI_TABLE_SIZE)
2885 if (cache[n])
2886 return cache[n];
2888 target = gen_reg_rtx (mode);
2889 cache[n] = target;
2891 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2892 op1 = expand_powi_1 (mode, powi_table[n], cache);
2894 else if (n & 1)
2896 target = gen_reg_rtx (mode);
2897 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2898 op0 = expand_powi_1 (mode, n - digit, cache);
2899 op1 = expand_powi_1 (mode, digit, cache);
2901 else
2903 target = gen_reg_rtx (mode);
2904 op0 = expand_powi_1 (mode, n >> 1, cache);
2905 op1 = op0;
2908 result = expand_mult (mode, op0, op1, target, 0);
2909 if (result != target)
2910 emit_move_insn (target, result);
2911 return target;
2914 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2915 floating point operand in mode MODE, and N is the exponent. This
2916 function needs to be kept in sync with powi_cost above. */
2918 static rtx
2919 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2921 unsigned HOST_WIDE_INT val;
2922 rtx cache[POWI_TABLE_SIZE];
2923 rtx result;
2925 if (n == 0)
2926 return CONST1_RTX (mode);
2928 val = (n < 0) ? -n : n;
2930 memset (cache, 0, sizeof (cache));
2931 cache[1] = x;
2933 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2935 /* If the original exponent was negative, reciprocate the result. */
2936 if (n < 0)
2937 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2938 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2940 return result;
2943 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2944 a normal call should be emitted rather than expanding the function
2945 in-line. EXP is the expression that is a call to the builtin
2946 function; if convenient, the result should be placed in TARGET. */
2948 static rtx
2949 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2951 tree arg0, arg1;
2952 tree fn, narg0;
2953 tree type = TREE_TYPE (exp);
2954 REAL_VALUE_TYPE cint, c, c2;
2955 HOST_WIDE_INT n;
2956 rtx op, op2;
2957 enum machine_mode mode = TYPE_MODE (type);
2959 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2960 return NULL_RTX;
2962 arg0 = CALL_EXPR_ARG (exp, 0);
2963 arg1 = CALL_EXPR_ARG (exp, 1);
2965 if (TREE_CODE (arg1) != REAL_CST
2966 || TREE_OVERFLOW (arg1))
2967 return expand_builtin_mathfn_2 (exp, target, subtarget);
2969 /* Handle constant exponents. */
2971 /* For integer valued exponents we can expand to an optimal multiplication
2972 sequence using expand_powi. */
2973 c = TREE_REAL_CST (arg1);
2974 n = real_to_integer (&c);
2975 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2976 if (real_identical (&c, &cint)
2977 && ((n >= -1 && n <= 2)
2978 || (flag_unsafe_math_optimizations
2979 && optimize_insn_for_speed_p ()
2980 && powi_cost (n) <= POWI_MAX_MULTS)))
2982 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2983 if (n != 1)
2985 op = force_reg (mode, op);
2986 op = expand_powi (op, mode, n);
2988 return op;
2991 narg0 = builtin_save_expr (arg0);
2993 /* If the exponent is not integer valued, check if it is half of an integer.
2994 In this case we can expand to sqrt (x) * x**(n/2). */
2995 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2996 if (fn != NULL_TREE)
2998 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2999 n = real_to_integer (&c2);
3000 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3001 if (real_identical (&c2, &cint)
3002 && ((flag_unsafe_math_optimizations
3003 && optimize_insn_for_speed_p ()
3004 && powi_cost (n/2) <= POWI_MAX_MULTS)
3005 || n == 1))
3007 tree call_expr = build_call_expr (fn, 1, narg0);
3008 /* Use expand_expr in case the newly built call expression
3009 was folded to a non-call. */
3010 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3011 if (n != 1)
3013 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3014 op2 = force_reg (mode, op2);
3015 op2 = expand_powi (op2, mode, abs (n / 2));
3016 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3017 0, OPTAB_LIB_WIDEN);
3018 /* If the original exponent was negative, reciprocate the
3019 result. */
3020 if (n < 0)
3021 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3022 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3024 return op;
3028 /* Try if the exponent is a third of an integer. In this case
3029 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3030 different from pow (x, 1./3.) due to rounding and behavior
3031 with negative x we need to constrain this transformation to
3032 unsafe math and positive x or finite math. */
3033 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3034 if (fn != NULL_TREE
3035 && flag_unsafe_math_optimizations
3036 && (tree_expr_nonnegative_p (arg0)
3037 || !HONOR_NANS (mode)))
3039 REAL_VALUE_TYPE dconst3;
3040 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3041 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3042 real_round (&c2, mode, &c2);
3043 n = real_to_integer (&c2);
3044 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3045 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3046 real_convert (&c2, mode, &c2);
3047 if (real_identical (&c2, &c)
3048 && ((optimize_insn_for_speed_p ()
3049 && powi_cost (n/3) <= POWI_MAX_MULTS)
3050 || n == 1))
3052 tree call_expr = build_call_expr (fn, 1,narg0);
3053 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3054 if (abs (n) % 3 == 2)
3055 op = expand_simple_binop (mode, MULT, op, op, op,
3056 0, OPTAB_LIB_WIDEN);
3057 if (n != 1)
3059 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3060 op2 = force_reg (mode, op2);
3061 op2 = expand_powi (op2, mode, abs (n / 3));
3062 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3063 0, OPTAB_LIB_WIDEN);
3064 /* If the original exponent was negative, reciprocate the
3065 result. */
3066 if (n < 0)
3067 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3068 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3070 return op;
3074 /* Fall back to optab expansion. */
3075 return expand_builtin_mathfn_2 (exp, target, subtarget);
3078 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3079 a normal call should be emitted rather than expanding the function
3080 in-line. EXP is the expression that is a call to the builtin
3081 function; if convenient, the result should be placed in TARGET. */
3083 static rtx
3084 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3086 tree arg0, arg1;
3087 rtx op0, op1;
3088 enum machine_mode mode;
3089 enum machine_mode mode2;
3091 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3092 return NULL_RTX;
3094 arg0 = CALL_EXPR_ARG (exp, 0);
3095 arg1 = CALL_EXPR_ARG (exp, 1);
3096 mode = TYPE_MODE (TREE_TYPE (exp));
3098 /* Handle constant power. */
3100 if (TREE_CODE (arg1) == INTEGER_CST
3101 && !TREE_OVERFLOW (arg1))
3103 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3105 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3106 Otherwise, check the number of multiplications required. */
3107 if ((TREE_INT_CST_HIGH (arg1) == 0
3108 || TREE_INT_CST_HIGH (arg1) == -1)
3109 && ((n >= -1 && n <= 2)
3110 || (optimize_insn_for_speed_p ()
3111 && powi_cost (n) <= POWI_MAX_MULTS)))
3113 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3114 op0 = force_reg (mode, op0);
3115 return expand_powi (op0, mode, n);
3119 /* Emit a libcall to libgcc. */
3121 /* Mode of the 2nd argument must match that of an int. */
3122 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3124 if (target == NULL_RTX)
3125 target = gen_reg_rtx (mode);
3127 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3128 if (GET_MODE (op0) != mode)
3129 op0 = convert_to_mode (mode, op0, 0);
3130 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3131 if (GET_MODE (op1) != mode2)
3132 op1 = convert_to_mode (mode2, op1, 0);
3134 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3135 target, LCT_CONST, mode, 2,
3136 op0, mode, op1, mode2);
3138 return target;
3141 /* Expand expression EXP which is a call to the strlen builtin. Return
3142 NULL_RTX if we failed the caller should emit a normal call, otherwise
3143 try to get the result in TARGET, if convenient. */
3145 static rtx
3146 expand_builtin_strlen (tree exp, rtx target,
3147 enum machine_mode target_mode)
3149 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3150 return NULL_RTX;
3151 else
3153 rtx pat;
3154 tree len;
3155 tree src = CALL_EXPR_ARG (exp, 0);
3156 rtx result, src_reg, char_rtx, before_strlen;
3157 enum machine_mode insn_mode = target_mode, char_mode;
3158 enum insn_code icode = CODE_FOR_nothing;
3159 int align;
3161 /* If the length can be computed at compile-time, return it. */
3162 len = c_strlen (src, 0);
3163 if (len)
3164 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3166 /* If the length can be computed at compile-time and is constant
3167 integer, but there are side-effects in src, evaluate
3168 src for side-effects, then return len.
3169 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3170 can be optimized into: i++; x = 3; */
3171 len = c_strlen (src, 1);
3172 if (len && TREE_CODE (len) == INTEGER_CST)
3174 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3175 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3178 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3180 /* If SRC is not a pointer type, don't do this operation inline. */
3181 if (align == 0)
3182 return NULL_RTX;
3184 /* Bail out if we can't compute strlen in the right mode. */
3185 while (insn_mode != VOIDmode)
3187 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3188 if (icode != CODE_FOR_nothing)
3189 break;
3191 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3193 if (insn_mode == VOIDmode)
3194 return NULL_RTX;
3196 /* Make a place to write the result of the instruction. */
3197 result = target;
3198 if (! (result != 0
3199 && REG_P (result)
3200 && GET_MODE (result) == insn_mode
3201 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3202 result = gen_reg_rtx (insn_mode);
3204 /* Make a place to hold the source address. We will not expand
3205 the actual source until we are sure that the expansion will
3206 not fail -- there are trees that cannot be expanded twice. */
3207 src_reg = gen_reg_rtx (Pmode);
3209 /* Mark the beginning of the strlen sequence so we can emit the
3210 source operand later. */
3211 before_strlen = get_last_insn ();
3213 char_rtx = const0_rtx;
3214 char_mode = insn_data[(int) icode].operand[2].mode;
3215 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3216 char_mode))
3217 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3219 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3220 char_rtx, GEN_INT (align));
3221 if (! pat)
3222 return NULL_RTX;
3223 emit_insn (pat);
3225 /* Now that we are assured of success, expand the source. */
3226 start_sequence ();
3227 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3228 if (pat != src_reg)
3229 emit_move_insn (src_reg, pat);
3230 pat = get_insns ();
3231 end_sequence ();
3233 if (before_strlen)
3234 emit_insn_after (pat, before_strlen);
3235 else
3236 emit_insn_before (pat, get_insns ());
3238 /* Return the value in the proper mode for this function. */
3239 if (GET_MODE (result) == target_mode)
3240 target = result;
3241 else if (target != 0)
3242 convert_move (target, result, 0);
3243 else
3244 target = convert_to_mode (target_mode, result, 0);
3246 return target;
3250 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3251 caller should emit a normal call, otherwise try to get the result
3252 in TARGET, if convenient (and in mode MODE if that's convenient). */
3254 static rtx
3255 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3257 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3259 tree type = TREE_TYPE (exp);
3260 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3261 CALL_EXPR_ARG (exp, 1), type);
3262 if (result)
3263 return expand_expr (result, target, mode, EXPAND_NORMAL);
3265 return NULL_RTX;
3268 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3269 caller should emit a normal call, otherwise try to get the result
3270 in TARGET, if convenient (and in mode MODE if that's convenient). */
3272 static rtx
3273 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3275 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3277 tree type = TREE_TYPE (exp);
3278 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3279 CALL_EXPR_ARG (exp, 1), type);
3280 if (result)
3281 return expand_expr (result, target, mode, EXPAND_NORMAL);
3283 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3285 return NULL_RTX;
3288 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3289 caller should emit a normal call, otherwise try to get the result
3290 in TARGET, if convenient (and in mode MODE if that's convenient). */
3292 static rtx
3293 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3295 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3297 tree type = TREE_TYPE (exp);
3298 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3299 CALL_EXPR_ARG (exp, 1), type);
3300 if (result)
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3303 return NULL_RTX;
3306 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3307 caller should emit a normal call, otherwise try to get the result
3308 in TARGET, if convenient (and in mode MODE if that's convenient). */
3310 static rtx
3311 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3313 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3315 tree type = TREE_TYPE (exp);
3316 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3317 CALL_EXPR_ARG (exp, 1), type);
3318 if (result)
3319 return expand_expr (result, target, mode, EXPAND_NORMAL);
3321 return NULL_RTX;
3324 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3325 bytes from constant string DATA + OFFSET and return it as target
3326 constant. */
3328 static rtx
3329 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3330 enum machine_mode mode)
3332 const char *str = (const char *) data;
3334 gcc_assert (offset >= 0
3335 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3336 <= strlen (str) + 1));
3338 return c_readstr (str + offset, mode);
3341 /* Expand a call EXP to the memcpy builtin.
3342 Return NULL_RTX if we failed, the caller should emit a normal call,
3343 otherwise try to get the result in TARGET, if convenient (and in
3344 mode MODE if that's convenient). */
3346 static rtx
3347 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3349 tree fndecl = get_callee_fndecl (exp);
3351 if (!validate_arglist (exp,
3352 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3353 return NULL_RTX;
3354 else
3356 tree dest = CALL_EXPR_ARG (exp, 0);
3357 tree src = CALL_EXPR_ARG (exp, 1);
3358 tree len = CALL_EXPR_ARG (exp, 2);
3359 const char *src_str;
3360 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3361 unsigned int dest_align
3362 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3363 rtx dest_mem, src_mem, dest_addr, len_rtx;
3364 tree result = fold_builtin_memory_op (dest, src, len,
3365 TREE_TYPE (TREE_TYPE (fndecl)),
3366 false, /*endp=*/0);
3367 HOST_WIDE_INT expected_size = -1;
3368 unsigned int expected_align = 0;
3369 tree_ann_common_t ann;
3371 if (result)
3373 while (TREE_CODE (result) == COMPOUND_EXPR)
3375 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3376 EXPAND_NORMAL);
3377 result = TREE_OPERAND (result, 1);
3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
3382 /* If DEST is not a pointer type, call the normal function. */
3383 if (dest_align == 0)
3384 return NULL_RTX;
3386 /* If either SRC is not a pointer type, don't do this
3387 operation in-line. */
3388 if (src_align == 0)
3389 return NULL_RTX;
3391 ann = tree_common_ann (exp);
3392 if (ann)
3393 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3395 if (expected_align < dest_align)
3396 expected_align = dest_align;
3397 dest_mem = get_memory_rtx (dest, len);
3398 set_mem_align (dest_mem, dest_align);
3399 len_rtx = expand_normal (len);
3400 src_str = c_getstr (src);
3402 /* If SRC is a string constant and block move would be done
3403 by pieces, we can avoid loading the string from memory
3404 and only stored the computed constants. */
3405 if (src_str
3406 && CONST_INT_P (len_rtx)
3407 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3408 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3409 CONST_CAST (char *, src_str),
3410 dest_align, false))
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 CONST_CAST (char *, src_str),
3415 dest_align, false, 0);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3421 src_mem = get_memory_rtx (src, len);
3422 set_mem_align (src_mem, src_align);
3424 /* Copy word part most expediently. */
3425 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3426 CALL_EXPR_TAILCALL (exp)
3427 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3428 expected_align, expected_size);
3430 if (dest_addr == 0)
3432 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3433 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3435 return dest_addr;
3439 /* Expand a call EXP to the mempcpy builtin.
3440 Return NULL_RTX if we failed; the caller should emit a normal call,
3441 otherwise try to get the result in TARGET, if convenient (and in
3442 mode MODE if that's convenient). If ENDP is 0 return the
3443 destination pointer, if ENDP is 1 return the end pointer ala
3444 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 stpcpy. */
3447 static rtx
3448 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3450 if (!validate_arglist (exp,
3451 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3452 return NULL_RTX;
3453 else
3455 tree dest = CALL_EXPR_ARG (exp, 0);
3456 tree src = CALL_EXPR_ARG (exp, 1);
3457 tree len = CALL_EXPR_ARG (exp, 2);
3458 return expand_builtin_mempcpy_args (dest, src, len,
3459 TREE_TYPE (exp),
3460 target, mode, /*endp=*/ 1);
3464 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3465 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3466 so that this can also be called without constructing an actual CALL_EXPR.
3467 TYPE is the return type of the call. The other arguments and return value
3468 are the same as for expand_builtin_mempcpy. */
3470 static rtx
3471 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3472 rtx target, enum machine_mode mode, int endp)
3474 /* If return value is ignored, transform mempcpy into memcpy. */
3475 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3477 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3478 tree result = build_call_expr (fn, 3, dest, src, len);
3480 while (TREE_CODE (result) == COMPOUND_EXPR)
3482 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3483 EXPAND_NORMAL);
3484 result = TREE_OPERAND (result, 1);
3486 return expand_expr (result, target, mode, EXPAND_NORMAL);
3488 else
3490 const char *src_str;
3491 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3492 unsigned int dest_align
3493 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3494 rtx dest_mem, src_mem, len_rtx;
3495 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3497 if (result)
3499 while (TREE_CODE (result) == COMPOUND_EXPR)
3501 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3502 EXPAND_NORMAL);
3503 result = TREE_OPERAND (result, 1);
3505 return expand_expr (result, target, mode, EXPAND_NORMAL);
3508 /* If either SRC or DEST is not a pointer type, don't do this
3509 operation in-line. */
3510 if (dest_align == 0 || src_align == 0)
3511 return NULL_RTX;
3513 /* If LEN is not constant, call the normal function. */
3514 if (! host_integerp (len, 1))
3515 return NULL_RTX;
3517 len_rtx = expand_normal (len);
3518 src_str = c_getstr (src);
3520 /* If SRC is a string constant and block move would be done
3521 by pieces, we can avoid loading the string from memory
3522 and only stored the computed constants. */
3523 if (src_str
3524 && CONST_INT_P (len_rtx)
3525 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3526 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3527 CONST_CAST (char *, src_str),
3528 dest_align, false))
3530 dest_mem = get_memory_rtx (dest, len);
3531 set_mem_align (dest_mem, dest_align);
3532 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3533 builtin_memcpy_read_str,
3534 CONST_CAST (char *, src_str),
3535 dest_align, false, endp);
3536 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3538 return dest_mem;
3541 if (CONST_INT_P (len_rtx)
3542 && can_move_by_pieces (INTVAL (len_rtx),
3543 MIN (dest_align, src_align)))
3545 dest_mem = get_memory_rtx (dest, len);
3546 set_mem_align (dest_mem, dest_align);
3547 src_mem = get_memory_rtx (src, len);
3548 set_mem_align (src_mem, src_align);
3549 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3550 MIN (dest_align, src_align), endp);
3551 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3552 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3553 return dest_mem;
3556 return NULL_RTX;
3560 /* Expand expression EXP, which is a call to the memmove builtin. Return
3561 NULL_RTX if we failed; the caller should emit a normal call. */
3563 static rtx
3564 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3566 if (!validate_arglist (exp,
3567 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3568 return NULL_RTX;
3569 else
3571 tree dest = CALL_EXPR_ARG (exp, 0);
3572 tree src = CALL_EXPR_ARG (exp, 1);
3573 tree len = CALL_EXPR_ARG (exp, 2);
3574 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3575 target, mode, ignore);
3579 /* Helper function to do the actual work for expand_builtin_memmove. The
3580 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3581 so that this can also be called without constructing an actual CALL_EXPR.
3582 TYPE is the return type of the call. The other arguments and return value
3583 are the same as for expand_builtin_memmove. */
3585 static rtx
3586 expand_builtin_memmove_args (tree dest, tree src, tree len,
3587 tree type, rtx target, enum machine_mode mode,
3588 int ignore)
3590 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3592 if (result)
3594 STRIP_TYPE_NOPS (result);
3595 while (TREE_CODE (result) == COMPOUND_EXPR)
3597 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3598 EXPAND_NORMAL);
3599 result = TREE_OPERAND (result, 1);
3601 return expand_expr (result, target, mode, EXPAND_NORMAL);
3604 /* Otherwise, call the normal function. */
3605 return NULL_RTX;
3608 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3609 NULL_RTX if we failed the caller should emit a normal call. */
3611 static rtx
3612 expand_builtin_bcopy (tree exp, int ignore)
3614 tree type = TREE_TYPE (exp);
3615 tree src, dest, size;
3617 if (!validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
3621 src = CALL_EXPR_ARG (exp, 0);
3622 dest = CALL_EXPR_ARG (exp, 1);
3623 size = CALL_EXPR_ARG (exp, 2);
3625 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3626 This is done this way so that if it isn't expanded inline, we fall
3627 back to calling bcopy instead of memmove. */
3628 return expand_builtin_memmove_args (dest, src,
3629 fold_convert (sizetype, size),
3630 type, const0_rtx, VOIDmode,
3631 ignore);
3634 #ifndef HAVE_movstr
3635 # define HAVE_movstr 0
3636 # define CODE_FOR_movstr CODE_FOR_nothing
3637 #endif
3639 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3640 we failed, the caller should emit a normal call, otherwise try to
3641 get the result in TARGET, if convenient. If ENDP is 0 return the
3642 destination pointer, if ENDP is 1 return the end pointer ala
3643 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3644 stpcpy. */
3646 static rtx
3647 expand_movstr (tree dest, tree src, rtx target, int endp)
3649 rtx end;
3650 rtx dest_mem;
3651 rtx src_mem;
3652 rtx insn;
3653 const struct insn_data * data;
3655 if (!HAVE_movstr)
3656 return NULL_RTX;
3658 dest_mem = get_memory_rtx (dest, NULL);
3659 src_mem = get_memory_rtx (src, NULL);
3660 if (!endp)
3662 target = force_reg (Pmode, XEXP (dest_mem, 0));
3663 dest_mem = replace_equiv_address (dest_mem, target);
3664 end = gen_reg_rtx (Pmode);
3666 else
3668 if (target == 0 || target == const0_rtx)
3670 end = gen_reg_rtx (Pmode);
3671 if (target == 0)
3672 target = end;
3674 else
3675 end = target;
3678 data = insn_data + CODE_FOR_movstr;
3680 if (data->operand[0].mode != VOIDmode)
3681 end = gen_lowpart (data->operand[0].mode, end);
3683 insn = data->genfun (end, dest_mem, src_mem);
3685 gcc_assert (insn);
3687 emit_insn (insn);
3689 /* movstr is supposed to set end to the address of the NUL
3690 terminator. If the caller requested a mempcpy-like return value,
3691 adjust it. */
3692 if (endp == 1 && target != const0_rtx)
3694 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3695 emit_move_insn (target, force_operand (tem, NULL_RTX));
3698 return target;
3701 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3702 NULL_RTX if we failed the caller should emit a normal call, otherwise
3703 try to get the result in TARGET, if convenient (and in mode MODE if that's
3704 convenient). */
3706 static rtx
3707 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3709 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3711 tree dest = CALL_EXPR_ARG (exp, 0);
3712 tree src = CALL_EXPR_ARG (exp, 1);
3713 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3715 return NULL_RTX;
3718 /* Helper function to do the actual work for expand_builtin_strcpy. The
3719 arguments to the builtin_strcpy call DEST and SRC are broken out
3720 so that this can also be called without constructing an actual CALL_EXPR.
3721 The other arguments and return value are the same as for
3722 expand_builtin_strcpy. */
3724 static rtx
3725 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3726 rtx target, enum machine_mode mode)
3728 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3729 if (result)
3730 return expand_expr (result, target, mode, EXPAND_NORMAL);
3731 return expand_movstr (dest, src, target, /*endp=*/0);
3735 /* Expand a call EXP to the stpcpy builtin.
3736 Return NULL_RTX if we failed the caller should emit a normal call,
3737 otherwise try to get the result in TARGET, if convenient (and in
3738 mode MODE if that's convenient). */
3740 static rtx
3741 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3743 tree dst, src;
3745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3746 return NULL_RTX;
3748 dst = CALL_EXPR_ARG (exp, 0);
3749 src = CALL_EXPR_ARG (exp, 1);
3751 /* If return value is ignored, transform stpcpy into strcpy. */
3752 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3754 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3755 tree result = build_call_expr (fn, 2, dst, src);
3757 STRIP_NOPS (result);
3758 while (TREE_CODE (result) == COMPOUND_EXPR)
3760 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3761 EXPAND_NORMAL);
3762 result = TREE_OPERAND (result, 1);
3764 return expand_expr (result, target, mode, EXPAND_NORMAL);
3766 else
3768 tree len, lenp1;
3769 rtx ret;
3771 /* Ensure we get an actual string whose length can be evaluated at
3772 compile-time, not an expression containing a string. This is
3773 because the latter will potentially produce pessimized code
3774 when used to produce the return value. */
3775 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3776 return expand_movstr (dst, src, target, /*endp=*/2);
3778 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3779 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3780 target, mode, /*endp=*/2);
3782 if (ret)
3783 return ret;
3785 if (TREE_CODE (len) == INTEGER_CST)
3787 rtx len_rtx = expand_normal (len);
3789 if (CONST_INT_P (len_rtx))
3791 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3792 dst, src, target, mode);
3794 if (ret)
3796 if (! target)
3798 if (mode != VOIDmode)
3799 target = gen_reg_rtx (mode);
3800 else
3801 target = gen_reg_rtx (GET_MODE (ret));
3803 if (GET_MODE (target) != GET_MODE (ret))
3804 ret = gen_lowpart (GET_MODE (target), ret);
3806 ret = plus_constant (ret, INTVAL (len_rtx));
3807 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3808 gcc_assert (ret);
3810 return target;
3815 return expand_movstr (dst, src, target, /*endp=*/2);
3819 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3820 bytes from constant string DATA + OFFSET and return it as target
3821 constant. */
3824 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3825 enum machine_mode mode)
3827 const char *str = (const char *) data;
3829 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3830 return const0_rtx;
3832 return c_readstr (str + offset, mode);
3835 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3836 NULL_RTX if we failed the caller should emit a normal call. */
3838 static rtx
3839 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3841 tree fndecl = get_callee_fndecl (exp);
3843 if (validate_arglist (exp,
3844 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3846 tree dest = CALL_EXPR_ARG (exp, 0);
3847 tree src = CALL_EXPR_ARG (exp, 1);
3848 tree len = CALL_EXPR_ARG (exp, 2);
3849 tree slen = c_strlen (src, 1);
3850 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3852 if (result)
3854 while (TREE_CODE (result) == COMPOUND_EXPR)
3856 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3857 EXPAND_NORMAL);
3858 result = TREE_OPERAND (result, 1);
3860 return expand_expr (result, target, mode, EXPAND_NORMAL);
3863 /* We must be passed a constant len and src parameter. */
3864 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3865 return NULL_RTX;
3867 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3869 /* We're required to pad with trailing zeros if the requested
3870 len is greater than strlen(s2)+1. In that case try to
3871 use store_by_pieces, if it fails, punt. */
3872 if (tree_int_cst_lt (slen, len))
3874 unsigned int dest_align
3875 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3876 const char *p = c_getstr (src);
3877 rtx dest_mem;
3879 if (!p || dest_align == 0 || !host_integerp (len, 1)
3880 || !can_store_by_pieces (tree_low_cst (len, 1),
3881 builtin_strncpy_read_str,
3882 CONST_CAST (char *, p),
3883 dest_align, false))
3884 return NULL_RTX;
3886 dest_mem = get_memory_rtx (dest, len);
3887 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3888 builtin_strncpy_read_str,
3889 CONST_CAST (char *, p), dest_align, false, 0);
3890 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3891 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3892 return dest_mem;
3895 return NULL_RTX;
3898 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3899 bytes from constant string DATA + OFFSET and return it as target
3900 constant. */
3903 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3904 enum machine_mode mode)
3906 const char *c = (const char *) data;
3907 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3909 memset (p, *c, GET_MODE_SIZE (mode));
3911 return c_readstr (p, mode);
3914 /* Callback routine for store_by_pieces. Return the RTL of a register
3915 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3916 char value given in the RTL register data. For example, if mode is
3917 4 bytes wide, return the RTL for 0x01010101*data. */
3919 static rtx
3920 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3921 enum machine_mode mode)
3923 rtx target, coeff;
3924 size_t size;
3925 char *p;
3927 size = GET_MODE_SIZE (mode);
3928 if (size == 1)
3929 return (rtx) data;
3931 p = XALLOCAVEC (char, size);
3932 memset (p, 1, size);
3933 coeff = c_readstr (p, mode);
3935 target = convert_to_mode (mode, (rtx) data, 1);
3936 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3937 return force_reg (mode, target);
3940 /* Expand expression EXP, which is a call to the memset builtin. Return
3941 NULL_RTX if we failed the caller should emit a normal call, otherwise
3942 try to get the result in TARGET, if convenient (and in mode MODE if that's
3943 convenient). */
3945 static rtx
3946 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3948 if (!validate_arglist (exp,
3949 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3950 return NULL_RTX;
3951 else
3953 tree dest = CALL_EXPR_ARG (exp, 0);
3954 tree val = CALL_EXPR_ARG (exp, 1);
3955 tree len = CALL_EXPR_ARG (exp, 2);
3956 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3960 /* Helper function to do the actual work for expand_builtin_memset. The
3961 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3962 so that this can also be called without constructing an actual CALL_EXPR.
3963 The other arguments and return value are the same as for
3964 expand_builtin_memset. */
3966 static rtx
3967 expand_builtin_memset_args (tree dest, tree val, tree len,
3968 rtx target, enum machine_mode mode, tree orig_exp)
3970 tree fndecl, fn;
3971 enum built_in_function fcode;
3972 char c;
3973 unsigned int dest_align;
3974 rtx dest_mem, dest_addr, len_rtx;
3975 HOST_WIDE_INT expected_size = -1;
3976 unsigned int expected_align = 0;
3977 tree_ann_common_t ann;
3979 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3981 /* If DEST is not a pointer type, don't do this operation in-line. */
3982 if (dest_align == 0)
3983 return NULL_RTX;
3985 ann = tree_common_ann (orig_exp);
3986 if (ann)
3987 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3989 if (expected_align < dest_align)
3990 expected_align = dest_align;
3992 /* If the LEN parameter is zero, return DEST. */
3993 if (integer_zerop (len))
3995 /* Evaluate and ignore VAL in case it has side-effects. */
3996 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3997 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4000 /* Stabilize the arguments in case we fail. */
4001 dest = builtin_save_expr (dest);
4002 val = builtin_save_expr (val);
4003 len = builtin_save_expr (len);
4005 len_rtx = expand_normal (len);
4006 dest_mem = get_memory_rtx (dest, len);
4008 if (TREE_CODE (val) != INTEGER_CST)
4010 rtx val_rtx;
4012 val_rtx = expand_normal (val);
4013 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4014 val_rtx, 0);
4016 /* Assume that we can memset by pieces if we can store
4017 * the coefficients by pieces (in the required modes).
4018 * We can't pass builtin_memset_gen_str as that emits RTL. */
4019 c = 1;
4020 if (host_integerp (len, 1)
4021 && can_store_by_pieces (tree_low_cst (len, 1),
4022 builtin_memset_read_str, &c, dest_align,
4023 true))
4025 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4026 val_rtx);
4027 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4028 builtin_memset_gen_str, val_rtx, dest_align,
4029 true, 0);
4031 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4032 dest_align, expected_align,
4033 expected_size))
4034 goto do_libcall;
4036 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4037 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4038 return dest_mem;
4041 if (target_char_cast (val, &c))
4042 goto do_libcall;
4044 if (c)
4046 if (host_integerp (len, 1)
4047 && can_store_by_pieces (tree_low_cst (len, 1),
4048 builtin_memset_read_str, &c, dest_align,
4049 true))
4050 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4051 builtin_memset_read_str, &c, dest_align, true, 0);
4052 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4053 dest_align, expected_align,
4054 expected_size))
4055 goto do_libcall;
4057 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4058 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4059 return dest_mem;
4062 set_mem_align (dest_mem, dest_align);
4063 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4064 CALL_EXPR_TAILCALL (orig_exp)
4065 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4066 expected_align, expected_size);
4068 if (dest_addr == 0)
4070 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4071 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4074 return dest_addr;
4076 do_libcall:
4077 fndecl = get_callee_fndecl (orig_exp);
4078 fcode = DECL_FUNCTION_CODE (fndecl);
4079 if (fcode == BUILT_IN_MEMSET)
4080 fn = build_call_expr (fndecl, 3, dest, val, len);
4081 else if (fcode == BUILT_IN_BZERO)
4082 fn = build_call_expr (fndecl, 2, dest, len);
4083 else
4084 gcc_unreachable ();
4085 if (TREE_CODE (fn) == CALL_EXPR)
4086 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4087 return expand_call (fn, target, target == const0_rtx);
4090 /* Expand expression EXP, which is a call to the bzero builtin. Return
4091 NULL_RTX if we failed the caller should emit a normal call. */
4093 static rtx
4094 expand_builtin_bzero (tree exp)
4096 tree dest, size;
4098 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4099 return NULL_RTX;
4101 dest = CALL_EXPR_ARG (exp, 0);
4102 size = CALL_EXPR_ARG (exp, 1);
4104 /* New argument list transforming bzero(ptr x, int y) to
4105 memset(ptr x, int 0, size_t y). This is done this way
4106 so that if it isn't expanded inline, we fallback to
4107 calling bzero instead of memset. */
4109 return expand_builtin_memset_args (dest, integer_zero_node,
4110 fold_convert (sizetype, size),
4111 const0_rtx, VOIDmode, exp);
4114 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4115 caller should emit a normal call, otherwise try to get the result
4116 in TARGET, if convenient (and in mode MODE if that's convenient). */
4118 static rtx
4119 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4121 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4122 INTEGER_TYPE, VOID_TYPE))
4124 tree type = TREE_TYPE (exp);
4125 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4126 CALL_EXPR_ARG (exp, 1),
4127 CALL_EXPR_ARG (exp, 2), type);
4128 if (result)
4129 return expand_expr (result, target, mode, EXPAND_NORMAL);
4131 return NULL_RTX;
4134 /* Expand expression EXP, which is a call to the memcmp built-in function.
4135 Return NULL_RTX if we failed and the
4136 caller should emit a normal call, otherwise try to get the result in
4137 TARGET, if convenient (and in mode MODE, if that's convenient). */
4139 static rtx
4140 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4142 if (!validate_arglist (exp,
4143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4144 return NULL_RTX;
4145 else
4147 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4148 CALL_EXPR_ARG (exp, 1),
4149 CALL_EXPR_ARG (exp, 2));
4150 if (result)
4151 return expand_expr (result, target, mode, EXPAND_NORMAL);
4154 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4156 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4157 rtx result;
4158 rtx insn;
4159 tree arg1 = CALL_EXPR_ARG (exp, 0);
4160 tree arg2 = CALL_EXPR_ARG (exp, 1);
4161 tree len = CALL_EXPR_ARG (exp, 2);
4163 int arg1_align
4164 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4165 int arg2_align
4166 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4167 enum machine_mode insn_mode;
4169 #ifdef HAVE_cmpmemsi
4170 if (HAVE_cmpmemsi)
4171 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4172 else
4173 #endif
4174 #ifdef HAVE_cmpstrnsi
4175 if (HAVE_cmpstrnsi)
4176 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4177 else
4178 #endif
4179 return NULL_RTX;
4181 /* If we don't have POINTER_TYPE, call the function. */
4182 if (arg1_align == 0 || arg2_align == 0)
4183 return NULL_RTX;
4185 /* Make a place to write the result of the instruction. */
4186 result = target;
4187 if (! (result != 0
4188 && REG_P (result) && GET_MODE (result) == insn_mode
4189 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4190 result = gen_reg_rtx (insn_mode);
4192 arg1_rtx = get_memory_rtx (arg1, len);
4193 arg2_rtx = get_memory_rtx (arg2, len);
4194 arg3_rtx = expand_normal (fold_convert (sizetype, len));
4196 /* Set MEM_SIZE as appropriate. */
4197 if (CONST_INT_P (arg3_rtx))
4199 set_mem_size (arg1_rtx, arg3_rtx);
4200 set_mem_size (arg2_rtx, arg3_rtx);
4203 #ifdef HAVE_cmpmemsi
4204 if (HAVE_cmpmemsi)
4205 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4206 GEN_INT (MIN (arg1_align, arg2_align)));
4207 else
4208 #endif
4209 #ifdef HAVE_cmpstrnsi
4210 if (HAVE_cmpstrnsi)
4211 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4212 GEN_INT (MIN (arg1_align, arg2_align)));
4213 else
4214 #endif
4215 gcc_unreachable ();
4217 if (insn)
4218 emit_insn (insn);
4219 else
4220 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4221 TYPE_MODE (integer_type_node), 3,
4222 XEXP (arg1_rtx, 0), Pmode,
4223 XEXP (arg2_rtx, 0), Pmode,
4224 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4225 TYPE_UNSIGNED (sizetype)),
4226 TYPE_MODE (sizetype));
4228 /* Return the value in the proper mode for this function. */
4229 mode = TYPE_MODE (TREE_TYPE (exp));
4230 if (GET_MODE (result) == mode)
4231 return result;
4232 else if (target != 0)
4234 convert_move (target, result, 0);
4235 return target;
4237 else
4238 return convert_to_mode (mode, result, 0);
4240 #endif
4242 return NULL_RTX;
4245 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4246 if we failed the caller should emit a normal call, otherwise try to get
4247 the result in TARGET, if convenient. */
4249 static rtx
4250 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4252 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4253 return NULL_RTX;
4254 else
4256 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4257 CALL_EXPR_ARG (exp, 1));
4258 if (result)
4259 return expand_expr (result, target, mode, EXPAND_NORMAL);
4262 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4263 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4264 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4266 rtx arg1_rtx, arg2_rtx;
4267 rtx result, insn = NULL_RTX;
4268 tree fndecl, fn;
4269 tree arg1 = CALL_EXPR_ARG (exp, 0);
4270 tree arg2 = CALL_EXPR_ARG (exp, 1);
4272 int arg1_align
4273 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4274 int arg2_align
4275 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4277 /* If we don't have POINTER_TYPE, call the function. */
4278 if (arg1_align == 0 || arg2_align == 0)
4279 return NULL_RTX;
4281 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4282 arg1 = builtin_save_expr (arg1);
4283 arg2 = builtin_save_expr (arg2);
4285 arg1_rtx = get_memory_rtx (arg1, NULL);
4286 arg2_rtx = get_memory_rtx (arg2, NULL);
4288 #ifdef HAVE_cmpstrsi
4289 /* Try to call cmpstrsi. */
4290 if (HAVE_cmpstrsi)
4292 enum machine_mode insn_mode
4293 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4295 /* Make a place to write the result of the instruction. */
4296 result = target;
4297 if (! (result != 0
4298 && REG_P (result) && GET_MODE (result) == insn_mode
4299 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4300 result = gen_reg_rtx (insn_mode);
4302 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4303 GEN_INT (MIN (arg1_align, arg2_align)));
4305 #endif
4306 #ifdef HAVE_cmpstrnsi
4307 /* Try to determine at least one length and call cmpstrnsi. */
4308 if (!insn && HAVE_cmpstrnsi)
4310 tree len;
4311 rtx arg3_rtx;
4313 enum machine_mode insn_mode
4314 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4315 tree len1 = c_strlen (arg1, 1);
4316 tree len2 = c_strlen (arg2, 1);
4318 if (len1)
4319 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4320 if (len2)
4321 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4323 /* If we don't have a constant length for the first, use the length
4324 of the second, if we know it. We don't require a constant for
4325 this case; some cost analysis could be done if both are available
4326 but neither is constant. For now, assume they're equally cheap,
4327 unless one has side effects. If both strings have constant lengths,
4328 use the smaller. */
4330 if (!len1)
4331 len = len2;
4332 else if (!len2)
4333 len = len1;
4334 else if (TREE_SIDE_EFFECTS (len1))
4335 len = len2;
4336 else if (TREE_SIDE_EFFECTS (len2))
4337 len = len1;
4338 else if (TREE_CODE (len1) != INTEGER_CST)
4339 len = len2;
4340 else if (TREE_CODE (len2) != INTEGER_CST)
4341 len = len1;
4342 else if (tree_int_cst_lt (len1, len2))
4343 len = len1;
4344 else
4345 len = len2;
4347 /* If both arguments have side effects, we cannot optimize. */
4348 if (!len || TREE_SIDE_EFFECTS (len))
4349 goto do_libcall;
4351 arg3_rtx = expand_normal (len);
4353 /* Make a place to write the result of the instruction. */
4354 result = target;
4355 if (! (result != 0
4356 && REG_P (result) && GET_MODE (result) == insn_mode
4357 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4358 result = gen_reg_rtx (insn_mode);
4360 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4361 GEN_INT (MIN (arg1_align, arg2_align)));
4363 #endif
4365 if (insn)
4367 emit_insn (insn);
4369 /* Return the value in the proper mode for this function. */
4370 mode = TYPE_MODE (TREE_TYPE (exp));
4371 if (GET_MODE (result) == mode)
4372 return result;
4373 if (target == 0)
4374 return convert_to_mode (mode, result, 0);
4375 convert_move (target, result, 0);
4376 return target;
4379 /* Expand the library call ourselves using a stabilized argument
4380 list to avoid re-evaluating the function's arguments twice. */
4381 #ifdef HAVE_cmpstrnsi
4382 do_libcall:
4383 #endif
4384 fndecl = get_callee_fndecl (exp);
4385 fn = build_call_expr (fndecl, 2, arg1, arg2);
4386 if (TREE_CODE (fn) == CALL_EXPR)
4387 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4388 return expand_call (fn, target, target == const0_rtx);
4390 #endif
4391 return NULL_RTX;
4394 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4395 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4396 the result in TARGET, if convenient. */
4398 static rtx
4399 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4401 if (!validate_arglist (exp,
4402 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4403 return NULL_RTX;
4404 else
4406 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4407 CALL_EXPR_ARG (exp, 1),
4408 CALL_EXPR_ARG (exp, 2));
4409 if (result)
4410 return expand_expr (result, target, mode, EXPAND_NORMAL);
4413 /* If c_strlen can determine an expression for one of the string
4414 lengths, and it doesn't have side effects, then emit cmpstrnsi
4415 using length MIN(strlen(string)+1, arg3). */
4416 #ifdef HAVE_cmpstrnsi
4417 if (HAVE_cmpstrnsi)
4419 tree len, len1, len2;
4420 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4421 rtx result, insn;
4422 tree fndecl, fn;
4423 tree arg1 = CALL_EXPR_ARG (exp, 0);
4424 tree arg2 = CALL_EXPR_ARG (exp, 1);
4425 tree arg3 = CALL_EXPR_ARG (exp, 2);
4427 int arg1_align
4428 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4429 int arg2_align
4430 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4431 enum machine_mode insn_mode
4432 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4434 len1 = c_strlen (arg1, 1);
4435 len2 = c_strlen (arg2, 1);
4437 if (len1)
4438 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4439 if (len2)
4440 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4442 /* If we don't have a constant length for the first, use the length
4443 of the second, if we know it. We don't require a constant for
4444 this case; some cost analysis could be done if both are available
4445 but neither is constant. For now, assume they're equally cheap,
4446 unless one has side effects. If both strings have constant lengths,
4447 use the smaller. */
4449 if (!len1)
4450 len = len2;
4451 else if (!len2)
4452 len = len1;
4453 else if (TREE_SIDE_EFFECTS (len1))
4454 len = len2;
4455 else if (TREE_SIDE_EFFECTS (len2))
4456 len = len1;
4457 else if (TREE_CODE (len1) != INTEGER_CST)
4458 len = len2;
4459 else if (TREE_CODE (len2) != INTEGER_CST)
4460 len = len1;
4461 else if (tree_int_cst_lt (len1, len2))
4462 len = len1;
4463 else
4464 len = len2;
4466 /* If both arguments have side effects, we cannot optimize. */
4467 if (!len || TREE_SIDE_EFFECTS (len))
4468 return NULL_RTX;
4470 /* The actual new length parameter is MIN(len,arg3). */
4471 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4472 fold_convert (TREE_TYPE (len), arg3));
4474 /* If we don't have POINTER_TYPE, call the function. */
4475 if (arg1_align == 0 || arg2_align == 0)
4476 return NULL_RTX;
4478 /* Make a place to write the result of the instruction. */
4479 result = target;
4480 if (! (result != 0
4481 && REG_P (result) && GET_MODE (result) == insn_mode
4482 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4483 result = gen_reg_rtx (insn_mode);
4485 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4486 arg1 = builtin_save_expr (arg1);
4487 arg2 = builtin_save_expr (arg2);
4488 len = builtin_save_expr (len);
4490 arg1_rtx = get_memory_rtx (arg1, len);
4491 arg2_rtx = get_memory_rtx (arg2, len);
4492 arg3_rtx = expand_normal (len);
4493 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4494 GEN_INT (MIN (arg1_align, arg2_align)));
4495 if (insn)
4497 emit_insn (insn);
4499 /* Return the value in the proper mode for this function. */
4500 mode = TYPE_MODE (TREE_TYPE (exp));
4501 if (GET_MODE (result) == mode)
4502 return result;
4503 if (target == 0)
4504 return convert_to_mode (mode, result, 0);
4505 convert_move (target, result, 0);
4506 return target;
4509 /* Expand the library call ourselves using a stabilized argument
4510 list to avoid re-evaluating the function's arguments twice. */
4511 fndecl = get_callee_fndecl (exp);
4512 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4513 if (TREE_CODE (fn) == CALL_EXPR)
4514 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4515 return expand_call (fn, target, target == const0_rtx);
4517 #endif
4518 return NULL_RTX;
4521 /* Expand expression EXP, which is a call to the strcat builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4525 static rtx
4526 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4528 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4529 return NULL_RTX;
4530 else
4532 tree dst = CALL_EXPR_ARG (exp, 0);
4533 tree src = CALL_EXPR_ARG (exp, 1);
4534 const char *p = c_getstr (src);
4536 /* If the string length is zero, return the dst parameter. */
4537 if (p && *p == '\0')
4538 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4540 if (optimize_insn_for_speed_p ())
4542 /* See if we can store by pieces into (dst + strlen(dst)). */
4543 tree newsrc, newdst,
4544 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4545 rtx insns;
4547 /* Stabilize the argument list. */
4548 newsrc = builtin_save_expr (src);
4549 dst = builtin_save_expr (dst);
4551 start_sequence ();
4553 /* Create strlen (dst). */
4554 newdst = build_call_expr (strlen_fn, 1, dst);
4555 /* Create (dst p+ strlen (dst)). */
4557 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4558 newdst = builtin_save_expr (newdst);
4560 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4562 end_sequence (); /* Stop sequence. */
4563 return NULL_RTX;
4566 /* Output the entire sequence. */
4567 insns = get_insns ();
4568 end_sequence ();
4569 emit_insn (insns);
4571 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4574 return NULL_RTX;
4578 /* Expand expression EXP, which is a call to the strncat builtin.
4579 Return NULL_RTX if we failed the caller should emit a normal call,
4580 otherwise try to get the result in TARGET, if convenient. */
4582 static rtx
4583 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4585 if (validate_arglist (exp,
4586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4588 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4589 CALL_EXPR_ARG (exp, 1),
4590 CALL_EXPR_ARG (exp, 2));
4591 if (result)
4592 return expand_expr (result, target, mode, EXPAND_NORMAL);
4594 return NULL_RTX;
4597 /* Expand expression EXP, which is a call to the strspn builtin.
4598 Return NULL_RTX if we failed the caller should emit a normal call,
4599 otherwise try to get the result in TARGET, if convenient. */
4601 static rtx
4602 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4604 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4606 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4607 CALL_EXPR_ARG (exp, 1));
4608 if (result)
4609 return expand_expr (result, target, mode, EXPAND_NORMAL);
4611 return NULL_RTX;
4614 /* Expand expression EXP, which is a call to the strcspn builtin.
4615 Return NULL_RTX if we failed the caller should emit a normal call,
4616 otherwise try to get the result in TARGET, if convenient. */
4618 static rtx
4619 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4621 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4623 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4624 CALL_EXPR_ARG (exp, 1));
4625 if (result)
4626 return expand_expr (result, target, mode, EXPAND_NORMAL);
4628 return NULL_RTX;
4631 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4632 if that's convenient. */
4635 expand_builtin_saveregs (void)
4637 rtx val, seq;
4639 /* Don't do __builtin_saveregs more than once in a function.
4640 Save the result of the first call and reuse it. */
4641 if (saveregs_value != 0)
4642 return saveregs_value;
4644 /* When this function is called, it means that registers must be
4645 saved on entry to this function. So we migrate the call to the
4646 first insn of this function. */
4648 start_sequence ();
4650 /* Do whatever the machine needs done in this case. */
4651 val = targetm.calls.expand_builtin_saveregs ();
4653 seq = get_insns ();
4654 end_sequence ();
4656 saveregs_value = val;
4658 /* Put the insns after the NOTE that starts the function. If this
4659 is inside a start_sequence, make the outer-level insn chain current, so
4660 the code is placed at the start of the function. */
4661 push_topmost_sequence ();
4662 emit_insn_after (seq, entry_of_function ());
4663 pop_topmost_sequence ();
4665 return val;
4668 /* __builtin_args_info (N) returns word N of the arg space info
4669 for the current function. The number and meanings of words
4670 is controlled by the definition of CUMULATIVE_ARGS. */
4672 static rtx
4673 expand_builtin_args_info (tree exp)
4675 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4676 int *word_ptr = (int *) &crtl->args.info;
4678 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4680 if (call_expr_nargs (exp) != 0)
4682 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4683 error ("argument of %<__builtin_args_info%> must be constant");
4684 else
4686 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4688 if (wordnum < 0 || wordnum >= nwords)
4689 error ("argument of %<__builtin_args_info%> out of range");
4690 else
4691 return GEN_INT (word_ptr[wordnum]);
4694 else
4695 error ("missing argument in %<__builtin_args_info%>");
4697 return const0_rtx;
4700 /* Expand a call to __builtin_next_arg. */
4702 static rtx
4703 expand_builtin_next_arg (void)
4705 /* Checking arguments is already done in fold_builtin_next_arg
4706 that must be called before this function. */
4707 return expand_binop (ptr_mode, add_optab,
4708 crtl->args.internal_arg_pointer,
4709 crtl->args.arg_offset_rtx,
4710 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4713 /* Make it easier for the backends by protecting the valist argument
4714 from multiple evaluations. */
4716 static tree
4717 stabilize_va_list (tree valist, int needs_lvalue)
4719 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4721 gcc_assert (vatype != NULL_TREE);
4723 if (TREE_CODE (vatype) == ARRAY_TYPE)
4725 if (TREE_SIDE_EFFECTS (valist))
4726 valist = save_expr (valist);
4728 /* For this case, the backends will be expecting a pointer to
4729 vatype, but it's possible we've actually been given an array
4730 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4731 So fix it. */
4732 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4734 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4735 valist = build_fold_addr_expr_with_type (valist, p1);
4738 else
4740 tree pt;
4742 if (! needs_lvalue)
4744 if (! TREE_SIDE_EFFECTS (valist))
4745 return valist;
4747 pt = build_pointer_type (vatype);
4748 valist = fold_build1 (ADDR_EXPR, pt, valist);
4749 TREE_SIDE_EFFECTS (valist) = 1;
4752 if (TREE_SIDE_EFFECTS (valist))
4753 valist = save_expr (valist);
4754 valist = build_fold_indirect_ref (valist);
4757 return valist;
4760 /* The "standard" definition of va_list is void*. */
4762 tree
4763 std_build_builtin_va_list (void)
4765 return ptr_type_node;
4768 /* The "standard" abi va_list is va_list_type_node. */
4770 tree
4771 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4773 return va_list_type_node;
4776 /* The "standard" type of va_list is va_list_type_node. */
4778 tree
4779 std_canonical_va_list_type (tree type)
4781 tree wtype, htype;
4783 if (INDIRECT_REF_P (type))
4784 type = TREE_TYPE (type);
4785 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4786 type = TREE_TYPE (type);
4787 wtype = va_list_type_node;
4788 htype = type;
4789 /* Treat structure va_list types. */
4790 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4791 htype = TREE_TYPE (htype);
4792 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4794 /* If va_list is an array type, the argument may have decayed
4795 to a pointer type, e.g. by being passed to another function.
4796 In that case, unwrap both types so that we can compare the
4797 underlying records. */
4798 if (TREE_CODE (htype) == ARRAY_TYPE
4799 || POINTER_TYPE_P (htype))
4801 wtype = TREE_TYPE (wtype);
4802 htype = TREE_TYPE (htype);
4805 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4806 return va_list_type_node;
4808 return NULL_TREE;
4811 /* The "standard" implementation of va_start: just assign `nextarg' to
4812 the variable. */
4814 void
4815 std_expand_builtin_va_start (tree valist, rtx nextarg)
4817 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4818 convert_move (va_r, nextarg, 0);
4821 /* Expand EXP, a call to __builtin_va_start. */
4823 static rtx
4824 expand_builtin_va_start (tree exp)
4826 rtx nextarg;
4827 tree valist;
4829 if (call_expr_nargs (exp) < 2)
4831 error ("too few arguments to function %<va_start%>");
4832 return const0_rtx;
4835 if (fold_builtin_next_arg (exp, true))
4836 return const0_rtx;
4838 nextarg = expand_builtin_next_arg ();
4839 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4841 if (targetm.expand_builtin_va_start)
4842 targetm.expand_builtin_va_start (valist, nextarg);
4843 else
4844 std_expand_builtin_va_start (valist, nextarg);
4846 return const0_rtx;
4849 /* The "standard" implementation of va_arg: read the value from the
4850 current (padded) address and increment by the (padded) size. */
4852 tree
4853 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4854 gimple_seq *post_p)
4856 tree addr, t, type_size, rounded_size, valist_tmp;
4857 unsigned HOST_WIDE_INT align, boundary;
4858 bool indirect;
4860 #ifdef ARGS_GROW_DOWNWARD
4861 /* All of the alignment and movement below is for args-grow-up machines.
4862 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4863 implement their own specialized gimplify_va_arg_expr routines. */
4864 gcc_unreachable ();
4865 #endif
4867 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4868 if (indirect)
4869 type = build_pointer_type (type);
4871 align = PARM_BOUNDARY / BITS_PER_UNIT;
4872 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4874 /* When we align parameter on stack for caller, if the parameter
4875 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4876 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4877 here with caller. */
4878 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4879 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4881 boundary /= BITS_PER_UNIT;
4883 /* Hoist the valist value into a temporary for the moment. */
4884 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4886 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4887 requires greater alignment, we must perform dynamic alignment. */
4888 if (boundary > align
4889 && !integer_zerop (TYPE_SIZE (type)))
4891 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4892 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4893 valist_tmp, size_int (boundary - 1)));
4894 gimplify_and_add (t, pre_p);
4896 t = fold_convert (sizetype, valist_tmp);
4897 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4898 fold_convert (TREE_TYPE (valist),
4899 fold_build2 (BIT_AND_EXPR, sizetype, t,
4900 size_int (-boundary))));
4901 gimplify_and_add (t, pre_p);
4903 else
4904 boundary = align;
4906 /* If the actual alignment is less than the alignment of the type,
4907 adjust the type accordingly so that we don't assume strict alignment
4908 when dereferencing the pointer. */
4909 boundary *= BITS_PER_UNIT;
4910 if (boundary < TYPE_ALIGN (type))
4912 type = build_variant_type_copy (type);
4913 TYPE_ALIGN (type) = boundary;
4916 /* Compute the rounded size of the type. */
4917 type_size = size_in_bytes (type);
4918 rounded_size = round_up (type_size, align);
4920 /* Reduce rounded_size so it's sharable with the postqueue. */
4921 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4923 /* Get AP. */
4924 addr = valist_tmp;
4925 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4927 /* Small args are padded downward. */
4928 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4929 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4930 size_binop (MINUS_EXPR, rounded_size, type_size));
4931 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4934 /* Compute new value for AP. */
4935 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4936 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4937 gimplify_and_add (t, pre_p);
4939 addr = fold_convert (build_pointer_type (type), addr);
4941 if (indirect)
4942 addr = build_va_arg_indirect_ref (addr);
4944 return build_va_arg_indirect_ref (addr);
4947 /* Build an indirect-ref expression over the given TREE, which represents a
4948 piece of a va_arg() expansion. */
4949 tree
4950 build_va_arg_indirect_ref (tree addr)
4952 addr = build_fold_indirect_ref (addr);
4954 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4955 mf_mark (addr);
4957 return addr;
4960 /* Return a dummy expression of type TYPE in order to keep going after an
4961 error. */
4963 static tree
4964 dummy_object (tree type)
4966 tree t = build_int_cst (build_pointer_type (type), 0);
4967 return build1 (INDIRECT_REF, type, t);
4970 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4971 builtin function, but a very special sort of operator. */
4973 enum gimplify_status
4974 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4976 tree promoted_type, have_va_type;
4977 tree valist = TREE_OPERAND (*expr_p, 0);
4978 tree type = TREE_TYPE (*expr_p);
4979 tree t;
4980 location_t loc = EXPR_HAS_LOCATION (*expr_p) ? EXPR_LOCATION (*expr_p) :
4981 UNKNOWN_LOCATION;
4983 /* Verify that valist is of the proper type. */
4984 have_va_type = TREE_TYPE (valist);
4985 if (have_va_type == error_mark_node)
4986 return GS_ERROR;
4987 have_va_type = targetm.canonical_va_list_type (have_va_type);
4989 if (have_va_type == NULL_TREE)
4991 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4992 return GS_ERROR;
4995 /* Generate a diagnostic for requesting data of a type that cannot
4996 be passed through `...' due to type promotion at the call site. */
4997 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4998 != type)
5000 static bool gave_help;
5001 bool warned;
5003 /* Unfortunately, this is merely undefined, rather than a constraint
5004 violation, so we cannot make this an error. If this call is never
5005 executed, the program is still strictly conforming. */
5006 warned = warning_at (loc, 0,
5007 "%qT is promoted to %qT when passed through %<...%>",
5008 type, promoted_type);
5009 if (!gave_help && warned)
5011 gave_help = true;
5012 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5013 promoted_type, type);
5016 /* We can, however, treat "undefined" any way we please.
5017 Call abort to encourage the user to fix the program. */
5018 if (warned)
5019 inform (loc, "if this code is reached, the program will abort");
5020 /* Before the abort, allow the evaluation of the va_list
5021 expression to exit or longjmp. */
5022 gimplify_and_add (valist, pre_p);
5023 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5024 gimplify_and_add (t, pre_p);
5026 /* This is dead code, but go ahead and finish so that the
5027 mode of the result comes out right. */
5028 *expr_p = dummy_object (type);
5029 return GS_ALL_DONE;
5031 else
5033 /* Make it easier for the backends by protecting the valist argument
5034 from multiple evaluations. */
5035 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5037 /* For this case, the backends will be expecting a pointer to
5038 TREE_TYPE (abi), but it's possible we've
5039 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5040 So fix it. */
5041 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5043 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5044 valist = fold_convert (p1, build_fold_addr_expr (valist));
5047 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5049 else
5050 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5052 if (!targetm.gimplify_va_arg_expr)
5053 /* FIXME: Once most targets are converted we should merely
5054 assert this is non-null. */
5055 return GS_ALL_DONE;
5057 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5058 return GS_OK;
5062 /* Expand EXP, a call to __builtin_va_end. */
5064 static rtx
5065 expand_builtin_va_end (tree exp)
5067 tree valist = CALL_EXPR_ARG (exp, 0);
5069 /* Evaluate for side effects, if needed. I hate macros that don't
5070 do that. */
5071 if (TREE_SIDE_EFFECTS (valist))
5072 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5074 return const0_rtx;
5077 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5078 builtin rather than just as an assignment in stdarg.h because of the
5079 nastiness of array-type va_list types. */
5081 static rtx
5082 expand_builtin_va_copy (tree exp)
5084 tree dst, src, t;
5086 dst = CALL_EXPR_ARG (exp, 0);
5087 src = CALL_EXPR_ARG (exp, 1);
5089 dst = stabilize_va_list (dst, 1);
5090 src = stabilize_va_list (src, 0);
5092 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5094 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5096 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5097 TREE_SIDE_EFFECTS (t) = 1;
5098 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5100 else
5102 rtx dstb, srcb, size;
5104 /* Evaluate to pointers. */
5105 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5106 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5107 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5108 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5110 dstb = convert_memory_address (Pmode, dstb);
5111 srcb = convert_memory_address (Pmode, srcb);
5113 /* "Dereference" to BLKmode memories. */
5114 dstb = gen_rtx_MEM (BLKmode, dstb);
5115 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5116 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5117 srcb = gen_rtx_MEM (BLKmode, srcb);
5118 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5119 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5121 /* Copy. */
5122 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5125 return const0_rtx;
5128 /* Expand a call to one of the builtin functions __builtin_frame_address or
5129 __builtin_return_address. */
5131 static rtx
5132 expand_builtin_frame_address (tree fndecl, tree exp)
5134 /* The argument must be a nonnegative integer constant.
5135 It counts the number of frames to scan up the stack.
5136 The value is the return address saved in that frame. */
5137 if (call_expr_nargs (exp) == 0)
5138 /* Warning about missing arg was already issued. */
5139 return const0_rtx;
5140 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5142 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5143 error ("invalid argument to %<__builtin_frame_address%>");
5144 else
5145 error ("invalid argument to %<__builtin_return_address%>");
5146 return const0_rtx;
5148 else
5150 rtx tem
5151 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5152 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5154 /* Some ports cannot access arbitrary stack frames. */
5155 if (tem == NULL)
5157 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5158 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5159 else
5160 warning (0, "unsupported argument to %<__builtin_return_address%>");
5161 return const0_rtx;
5164 /* For __builtin_frame_address, return what we've got. */
5165 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5166 return tem;
5168 if (!REG_P (tem)
5169 && ! CONSTANT_P (tem))
5170 tem = copy_to_mode_reg (Pmode, tem);
5171 return tem;
5175 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5176 we failed and the caller should emit a normal call, otherwise try to get
5177 the result in TARGET, if convenient. */
5179 static rtx
5180 expand_builtin_alloca (tree exp, rtx target)
5182 rtx op0;
5183 rtx result;
5185 /* Emit normal call if marked not-inlineable. */
5186 if (CALL_CANNOT_INLINE_P (exp))
5187 return NULL_RTX;
5189 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5190 return NULL_RTX;
5192 /* Compute the argument. */
5193 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5195 /* Allocate the desired space. */
5196 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5197 result = convert_memory_address (ptr_mode, result);
5199 return result;
5202 /* Expand a call to a bswap builtin with argument ARG0. MODE
5203 is the mode to expand with. */
5205 static rtx
5206 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5208 enum machine_mode mode;
5209 tree arg;
5210 rtx op0;
5212 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5213 return NULL_RTX;
5215 arg = CALL_EXPR_ARG (exp, 0);
5216 mode = TYPE_MODE (TREE_TYPE (arg));
5217 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5219 target = expand_unop (mode, bswap_optab, op0, target, 1);
5221 gcc_assert (target);
5223 return convert_to_mode (mode, target, 0);
5226 /* Expand a call to a unary builtin in EXP.
5227 Return NULL_RTX if a normal call should be emitted rather than expanding the
5228 function in-line. If convenient, the result should be placed in TARGET.
5229 SUBTARGET may be used as the target for computing one of EXP's operands. */
5231 static rtx
5232 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5233 rtx subtarget, optab op_optab)
5235 rtx op0;
5237 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5238 return NULL_RTX;
5240 /* Compute the argument. */
5241 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5242 VOIDmode, EXPAND_NORMAL);
5243 /* Compute op, into TARGET if possible.
5244 Set TARGET to wherever the result comes back. */
5245 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5246 op_optab, op0, target, 1);
5247 gcc_assert (target);
5249 return convert_to_mode (target_mode, target, 0);
5252 /* If the string passed to fputs is a constant and is one character
5253 long, we attempt to transform this call into __builtin_fputc(). */
5255 static rtx
5256 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5258 /* Verify the arguments in the original call. */
5259 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5261 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5262 CALL_EXPR_ARG (exp, 1),
5263 (target == const0_rtx),
5264 unlocked, NULL_TREE);
5265 if (result)
5266 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5268 return NULL_RTX;
5271 /* Expand a call to __builtin_expect. We just return our argument
5272 as the builtin_expect semantic should've been already executed by
5273 tree branch prediction pass. */
5275 static rtx
5276 expand_builtin_expect (tree exp, rtx target)
5278 tree arg, c;
5280 if (call_expr_nargs (exp) < 2)
5281 return const0_rtx;
5282 arg = CALL_EXPR_ARG (exp, 0);
5283 c = CALL_EXPR_ARG (exp, 1);
5285 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5286 /* When guessing was done, the hints should be already stripped away. */
5287 gcc_assert (!flag_guess_branch_prob
5288 || optimize == 0 || errorcount || sorrycount);
5289 return target;
5292 void
5293 expand_builtin_trap (void)
5295 #ifdef HAVE_trap
5296 if (HAVE_trap)
5297 emit_insn (gen_trap ());
5298 else
5299 #endif
5300 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5301 emit_barrier ();
5304 /* Expand a call to __builtin_unreachable. We do nothing except emit
5305 a barrier saying that control flow will not pass here.
5307 It is the responsibility of the program being compiled to ensure
5308 that control flow does never reach __builtin_unreachable. */
5309 static void
5310 expand_builtin_unreachable (void)
5312 emit_barrier ();
5315 /* Expand EXP, a call to fabs, fabsf or fabsl.
5316 Return NULL_RTX if a normal call should be emitted rather than expanding
5317 the function inline. If convenient, the result should be placed
5318 in TARGET. SUBTARGET may be used as the target for computing
5319 the operand. */
5321 static rtx
5322 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5324 enum machine_mode mode;
5325 tree arg;
5326 rtx op0;
5328 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5329 return NULL_RTX;
5331 arg = CALL_EXPR_ARG (exp, 0);
5332 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5333 mode = TYPE_MODE (TREE_TYPE (arg));
5334 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5335 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5338 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5339 Return NULL is a normal call should be emitted rather than expanding the
5340 function inline. If convenient, the result should be placed in TARGET.
5341 SUBTARGET may be used as the target for computing the operand. */
5343 static rtx
5344 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5346 rtx op0, op1;
5347 tree arg;
5349 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5350 return NULL_RTX;
5352 arg = CALL_EXPR_ARG (exp, 0);
5353 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5355 arg = CALL_EXPR_ARG (exp, 1);
5356 op1 = expand_normal (arg);
5358 return expand_copysign (op0, op1, target);
5361 /* Create a new constant string literal and return a char* pointer to it.
5362 The STRING_CST value is the LEN characters at STR. */
5363 tree
5364 build_string_literal (int len, const char *str)
5366 tree t, elem, index, type;
5368 t = build_string (len, str);
5369 elem = build_type_variant (char_type_node, 1, 0);
5370 index = build_index_type (size_int (len - 1));
5371 type = build_array_type (elem, index);
5372 TREE_TYPE (t) = type;
5373 TREE_CONSTANT (t) = 1;
5374 TREE_READONLY (t) = 1;
5375 TREE_STATIC (t) = 1;
5377 type = build_pointer_type (elem);
5378 t = build1 (ADDR_EXPR, type,
5379 build4 (ARRAY_REF, elem,
5380 t, integer_zero_node, NULL_TREE, NULL_TREE));
5381 return t;
5384 /* Expand EXP, a call to printf or printf_unlocked.
5385 Return NULL_RTX if a normal call should be emitted rather than transforming
5386 the function inline. If convenient, the result should be placed in
5387 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5388 call. */
5389 static rtx
5390 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5391 bool unlocked)
5393 /* If we're using an unlocked function, assume the other unlocked
5394 functions exist explicitly. */
5395 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5396 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5397 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5398 : implicit_built_in_decls[BUILT_IN_PUTS];
5399 const char *fmt_str;
5400 tree fn = 0;
5401 tree fmt, arg;
5402 int nargs = call_expr_nargs (exp);
5404 /* If the return value is used, don't do the transformation. */
5405 if (target != const0_rtx)
5406 return NULL_RTX;
5408 /* Verify the required arguments in the original call. */
5409 if (nargs == 0)
5410 return NULL_RTX;
5411 fmt = CALL_EXPR_ARG (exp, 0);
5412 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5413 return NULL_RTX;
5415 /* Check whether the format is a literal string constant. */
5416 fmt_str = c_getstr (fmt);
5417 if (fmt_str == NULL)
5418 return NULL_RTX;
5420 if (!init_target_chars ())
5421 return NULL_RTX;
5423 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5424 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5426 if ((nargs != 2)
5427 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5428 return NULL_RTX;
5429 if (fn_puts)
5430 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5432 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5433 else if (strcmp (fmt_str, target_percent_c) == 0)
5435 if ((nargs != 2)
5436 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5437 return NULL_RTX;
5438 if (fn_putchar)
5439 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5441 else
5443 /* We can't handle anything else with % args or %% ... yet. */
5444 if (strchr (fmt_str, target_percent))
5445 return NULL_RTX;
5447 if (nargs > 1)
5448 return NULL_RTX;
5450 /* If the format specifier was "", printf does nothing. */
5451 if (fmt_str[0] == '\0')
5452 return const0_rtx;
5453 /* If the format specifier has length of 1, call putchar. */
5454 if (fmt_str[1] == '\0')
5456 /* Given printf("c"), (where c is any one character,)
5457 convert "c"[0] to an int and pass that to the replacement
5458 function. */
5459 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5460 if (fn_putchar)
5461 fn = build_call_expr (fn_putchar, 1, arg);
5463 else
5465 /* If the format specifier was "string\n", call puts("string"). */
5466 size_t len = strlen (fmt_str);
5467 if ((unsigned char)fmt_str[len - 1] == target_newline)
5469 /* Create a NUL-terminated string that's one char shorter
5470 than the original, stripping off the trailing '\n'. */
5471 char *newstr = XALLOCAVEC (char, len);
5472 memcpy (newstr, fmt_str, len - 1);
5473 newstr[len - 1] = 0;
5474 arg = build_string_literal (len, newstr);
5475 if (fn_puts)
5476 fn = build_call_expr (fn_puts, 1, arg);
5478 else
5479 /* We'd like to arrange to call fputs(string,stdout) here,
5480 but we need stdout and don't have a way to get it yet. */
5481 return NULL_RTX;
5485 if (!fn)
5486 return NULL_RTX;
5487 if (TREE_CODE (fn) == CALL_EXPR)
5488 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5489 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5492 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5493 Return NULL_RTX if a normal call should be emitted rather than transforming
5494 the function inline. If convenient, the result should be placed in
5495 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5496 call. */
5497 static rtx
5498 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5499 bool unlocked)
5501 /* If we're using an unlocked function, assume the other unlocked
5502 functions exist explicitly. */
5503 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5504 : implicit_built_in_decls[BUILT_IN_FPUTC];
5505 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5506 : implicit_built_in_decls[BUILT_IN_FPUTS];
5507 const char *fmt_str;
5508 tree fn = 0;
5509 tree fmt, fp, arg;
5510 int nargs = call_expr_nargs (exp);
5512 /* If the return value is used, don't do the transformation. */
5513 if (target != const0_rtx)
5514 return NULL_RTX;
5516 /* Verify the required arguments in the original call. */
5517 if (nargs < 2)
5518 return NULL_RTX;
5519 fp = CALL_EXPR_ARG (exp, 0);
5520 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5521 return NULL_RTX;
5522 fmt = CALL_EXPR_ARG (exp, 1);
5523 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5524 return NULL_RTX;
5526 /* Check whether the format is a literal string constant. */
5527 fmt_str = c_getstr (fmt);
5528 if (fmt_str == NULL)
5529 return NULL_RTX;
5531 if (!init_target_chars ())
5532 return NULL_RTX;
5534 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5535 if (strcmp (fmt_str, target_percent_s) == 0)
5537 if ((nargs != 3)
5538 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5539 return NULL_RTX;
5540 arg = CALL_EXPR_ARG (exp, 2);
5541 if (fn_fputs)
5542 fn = build_call_expr (fn_fputs, 2, arg, fp);
5544 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5545 else if (strcmp (fmt_str, target_percent_c) == 0)
5547 if ((nargs != 3)
5548 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5549 return NULL_RTX;
5550 arg = CALL_EXPR_ARG (exp, 2);
5551 if (fn_fputc)
5552 fn = build_call_expr (fn_fputc, 2, arg, fp);
5554 else
5556 /* We can't handle anything else with % args or %% ... yet. */
5557 if (strchr (fmt_str, target_percent))
5558 return NULL_RTX;
5560 if (nargs > 2)
5561 return NULL_RTX;
5563 /* If the format specifier was "", fprintf does nothing. */
5564 if (fmt_str[0] == '\0')
5566 /* Evaluate and ignore FILE* argument for side-effects. */
5567 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5568 return const0_rtx;
5571 /* When "string" doesn't contain %, replace all cases of
5572 fprintf(stream,string) with fputs(string,stream). The fputs
5573 builtin will take care of special cases like length == 1. */
5574 if (fn_fputs)
5575 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5578 if (!fn)
5579 return NULL_RTX;
5580 if (TREE_CODE (fn) == CALL_EXPR)
5581 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5582 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5585 /* Expand a call EXP to sprintf. Return NULL_RTX if
5586 a normal call should be emitted rather than expanding the function
5587 inline. If convenient, the result should be placed in TARGET with
5588 mode MODE. */
5590 static rtx
5591 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5593 tree dest, fmt;
5594 const char *fmt_str;
5595 int nargs = call_expr_nargs (exp);
5597 /* Verify the required arguments in the original call. */
5598 if (nargs < 2)
5599 return NULL_RTX;
5600 dest = CALL_EXPR_ARG (exp, 0);
5601 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5602 return NULL_RTX;
5603 fmt = CALL_EXPR_ARG (exp, 0);
5604 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5605 return NULL_RTX;
5607 /* Check whether the format is a literal string constant. */
5608 fmt_str = c_getstr (fmt);
5609 if (fmt_str == NULL)
5610 return NULL_RTX;
5612 if (!init_target_chars ())
5613 return NULL_RTX;
5615 /* If the format doesn't contain % args or %%, use strcpy. */
5616 if (strchr (fmt_str, target_percent) == 0)
5618 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5619 tree exp;
5621 if ((nargs > 2) || ! fn)
5622 return NULL_RTX;
5623 expand_expr (build_call_expr (fn, 2, dest, fmt),
5624 const0_rtx, VOIDmode, EXPAND_NORMAL);
5625 if (target == const0_rtx)
5626 return const0_rtx;
5627 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5628 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5630 /* If the format is "%s", use strcpy if the result isn't used. */
5631 else if (strcmp (fmt_str, target_percent_s) == 0)
5633 tree fn, arg, len;
5634 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5636 if (! fn)
5637 return NULL_RTX;
5638 if (nargs != 3)
5639 return NULL_RTX;
5640 arg = CALL_EXPR_ARG (exp, 2);
5641 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5642 return NULL_RTX;
5644 if (target != const0_rtx)
5646 len = c_strlen (arg, 1);
5647 if (! len || TREE_CODE (len) != INTEGER_CST)
5648 return NULL_RTX;
5650 else
5651 len = NULL_TREE;
5653 expand_expr (build_call_expr (fn, 2, dest, arg),
5654 const0_rtx, VOIDmode, EXPAND_NORMAL);
5656 if (target == const0_rtx)
5657 return const0_rtx;
5658 return expand_expr (len, target, mode, EXPAND_NORMAL);
5661 return NULL_RTX;
5664 /* Expand a call to either the entry or exit function profiler. */
5666 static rtx
5667 expand_builtin_profile_func (bool exitp)
5669 rtx this_rtx, which;
5671 this_rtx = DECL_RTL (current_function_decl);
5672 gcc_assert (MEM_P (this_rtx));
5673 this_rtx = XEXP (this_rtx, 0);
5675 if (exitp)
5676 which = profile_function_exit_libfunc;
5677 else
5678 which = profile_function_entry_libfunc;
5680 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5681 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5683 Pmode);
5685 return const0_rtx;
5688 /* Expand a call to __builtin___clear_cache. */
5690 static rtx
5691 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5693 #ifndef HAVE_clear_cache
5694 #ifdef CLEAR_INSN_CACHE
5695 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5696 does something. Just do the default expansion to a call to
5697 __clear_cache(). */
5698 return NULL_RTX;
5699 #else
5700 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5701 does nothing. There is no need to call it. Do nothing. */
5702 return const0_rtx;
5703 #endif /* CLEAR_INSN_CACHE */
5704 #else
5705 /* We have a "clear_cache" insn, and it will handle everything. */
5706 tree begin, end;
5707 rtx begin_rtx, end_rtx;
5708 enum insn_code icode;
5710 /* We must not expand to a library call. If we did, any
5711 fallback library function in libgcc that might contain a call to
5712 __builtin___clear_cache() would recurse infinitely. */
5713 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5715 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5716 return const0_rtx;
5719 if (HAVE_clear_cache)
5721 icode = CODE_FOR_clear_cache;
5723 begin = CALL_EXPR_ARG (exp, 0);
5724 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5725 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5726 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5727 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5729 end = CALL_EXPR_ARG (exp, 1);
5730 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5731 end_rtx = convert_memory_address (Pmode, end_rtx);
5732 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5733 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5735 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5737 return const0_rtx;
5738 #endif /* HAVE_clear_cache */
5741 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5743 static rtx
5744 round_trampoline_addr (rtx tramp)
5746 rtx temp, addend, mask;
5748 /* If we don't need too much alignment, we'll have been guaranteed
5749 proper alignment by get_trampoline_type. */
5750 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5751 return tramp;
5753 /* Round address up to desired boundary. */
5754 temp = gen_reg_rtx (Pmode);
5755 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5756 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5758 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5759 temp, 0, OPTAB_LIB_WIDEN);
5760 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5761 temp, 0, OPTAB_LIB_WIDEN);
5763 return tramp;
5766 static rtx
5767 expand_builtin_init_trampoline (tree exp)
5769 tree t_tramp, t_func, t_chain;
5770 rtx r_tramp, r_func, r_chain;
5771 #ifdef TRAMPOLINE_TEMPLATE
5772 rtx blktramp;
5773 #endif
5775 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5776 POINTER_TYPE, VOID_TYPE))
5777 return NULL_RTX;
5779 t_tramp = CALL_EXPR_ARG (exp, 0);
5780 t_func = CALL_EXPR_ARG (exp, 1);
5781 t_chain = CALL_EXPR_ARG (exp, 2);
5783 r_tramp = expand_normal (t_tramp);
5784 r_func = expand_normal (t_func);
5785 r_chain = expand_normal (t_chain);
5787 /* Generate insns to initialize the trampoline. */
5788 r_tramp = round_trampoline_addr (r_tramp);
5789 #ifdef TRAMPOLINE_TEMPLATE
5790 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5791 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5792 emit_block_move (blktramp, assemble_trampoline_template (),
5793 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5794 #endif
5795 trampolines_created = 1;
5796 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5798 return const0_rtx;
5801 static rtx
5802 expand_builtin_adjust_trampoline (tree exp)
5804 rtx tramp;
5806 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5807 return NULL_RTX;
5809 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5810 tramp = round_trampoline_addr (tramp);
5811 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5812 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5813 #endif
5815 return tramp;
5818 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5819 function. The function first checks whether the back end provides
5820 an insn to implement signbit for the respective mode. If not, it
5821 checks whether the floating point format of the value is such that
5822 the sign bit can be extracted. If that is not the case, the
5823 function returns NULL_RTX to indicate that a normal call should be
5824 emitted rather than expanding the function in-line. EXP is the
5825 expression that is a call to the builtin function; if convenient,
5826 the result should be placed in TARGET. */
5827 static rtx
5828 expand_builtin_signbit (tree exp, rtx target)
5830 const struct real_format *fmt;
5831 enum machine_mode fmode, imode, rmode;
5832 HOST_WIDE_INT hi, lo;
5833 tree arg;
5834 int word, bitpos;
5835 enum insn_code icode;
5836 rtx temp;
5838 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5839 return NULL_RTX;
5841 arg = CALL_EXPR_ARG (exp, 0);
5842 fmode = TYPE_MODE (TREE_TYPE (arg));
5843 rmode = TYPE_MODE (TREE_TYPE (exp));
5844 fmt = REAL_MODE_FORMAT (fmode);
5846 arg = builtin_save_expr (arg);
5848 /* Expand the argument yielding a RTX expression. */
5849 temp = expand_normal (arg);
5851 /* Check if the back end provides an insn that handles signbit for the
5852 argument's mode. */
5853 icode = signbit_optab->handlers [(int) fmode].insn_code;
5854 if (icode != CODE_FOR_nothing)
5856 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5857 emit_unop_insn (icode, target, temp, UNKNOWN);
5858 return target;
5861 /* For floating point formats without a sign bit, implement signbit
5862 as "ARG < 0.0". */
5863 bitpos = fmt->signbit_ro;
5864 if (bitpos < 0)
5866 /* But we can't do this if the format supports signed zero. */
5867 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5868 return NULL_RTX;
5870 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5871 build_real (TREE_TYPE (arg), dconst0));
5872 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5875 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5877 imode = int_mode_for_mode (fmode);
5878 if (imode == BLKmode)
5879 return NULL_RTX;
5880 temp = gen_lowpart (imode, temp);
5882 else
5884 imode = word_mode;
5885 /* Handle targets with different FP word orders. */
5886 if (FLOAT_WORDS_BIG_ENDIAN)
5887 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5888 else
5889 word = bitpos / BITS_PER_WORD;
5890 temp = operand_subword_force (temp, word, fmode);
5891 bitpos = bitpos % BITS_PER_WORD;
5894 /* Force the intermediate word_mode (or narrower) result into a
5895 register. This avoids attempting to create paradoxical SUBREGs
5896 of floating point modes below. */
5897 temp = force_reg (imode, temp);
5899 /* If the bitpos is within the "result mode" lowpart, the operation
5900 can be implement with a single bitwise AND. Otherwise, we need
5901 a right shift and an AND. */
5903 if (bitpos < GET_MODE_BITSIZE (rmode))
5905 if (bitpos < HOST_BITS_PER_WIDE_INT)
5907 hi = 0;
5908 lo = (HOST_WIDE_INT) 1 << bitpos;
5910 else
5912 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5913 lo = 0;
5916 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5917 temp = gen_lowpart (rmode, temp);
5918 temp = expand_binop (rmode, and_optab, temp,
5919 immed_double_const (lo, hi, rmode),
5920 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5922 else
5924 /* Perform a logical right shift to place the signbit in the least
5925 significant bit, then truncate the result to the desired mode
5926 and mask just this bit. */
5927 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5928 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5929 temp = gen_lowpart (rmode, temp);
5930 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5931 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5934 return temp;
5937 /* Expand fork or exec calls. TARGET is the desired target of the
5938 call. EXP is the call. FN is the
5939 identificator of the actual function. IGNORE is nonzero if the
5940 value is to be ignored. */
5942 static rtx
5943 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5945 tree id, decl;
5946 tree call;
5948 /* If we are not profiling, just call the function. */
5949 if (!profile_arc_flag)
5950 return NULL_RTX;
5952 /* Otherwise call the wrapper. This should be equivalent for the rest of
5953 compiler, so the code does not diverge, and the wrapper may run the
5954 code necessary for keeping the profiling sane. */
5956 switch (DECL_FUNCTION_CODE (fn))
5958 case BUILT_IN_FORK:
5959 id = get_identifier ("__gcov_fork");
5960 break;
5962 case BUILT_IN_EXECL:
5963 id = get_identifier ("__gcov_execl");
5964 break;
5966 case BUILT_IN_EXECV:
5967 id = get_identifier ("__gcov_execv");
5968 break;
5970 case BUILT_IN_EXECLP:
5971 id = get_identifier ("__gcov_execlp");
5972 break;
5974 case BUILT_IN_EXECLE:
5975 id = get_identifier ("__gcov_execle");
5976 break;
5978 case BUILT_IN_EXECVP:
5979 id = get_identifier ("__gcov_execvp");
5980 break;
5982 case BUILT_IN_EXECVE:
5983 id = get_identifier ("__gcov_execve");
5984 break;
5986 default:
5987 gcc_unreachable ();
5990 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5991 FUNCTION_DECL, id, TREE_TYPE (fn));
5992 DECL_EXTERNAL (decl) = 1;
5993 TREE_PUBLIC (decl) = 1;
5994 DECL_ARTIFICIAL (decl) = 1;
5995 TREE_NOTHROW (decl) = 1;
5996 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5997 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5998 call = rewrite_call_expr (exp, 0, decl, 0);
5999 return expand_call (call, target, ignore);
6004 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6005 the pointer in these functions is void*, the tree optimizers may remove
6006 casts. The mode computed in expand_builtin isn't reliable either, due
6007 to __sync_bool_compare_and_swap.
6009 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6010 group of builtins. This gives us log2 of the mode size. */
6012 static inline enum machine_mode
6013 get_builtin_sync_mode (int fcode_diff)
6015 /* The size is not negotiable, so ask not to get BLKmode in return
6016 if the target indicates that a smaller size would be better. */
6017 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6020 /* Expand the memory expression LOC and return the appropriate memory operand
6021 for the builtin_sync operations. */
6023 static rtx
6024 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6026 rtx addr, mem;
6028 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6030 /* Note that we explicitly do not want any alias information for this
6031 memory, so that we kill all other live memories. Otherwise we don't
6032 satisfy the full barrier semantics of the intrinsic. */
6033 mem = validize_mem (gen_rtx_MEM (mode, addr));
6035 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6036 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6037 MEM_VOLATILE_P (mem) = 1;
6039 return mem;
6042 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6043 EXP is the CALL_EXPR. CODE is the rtx code
6044 that corresponds to the arithmetic or logical operation from the name;
6045 an exception here is that NOT actually means NAND. TARGET is an optional
6046 place for us to store the results; AFTER is true if this is the
6047 fetch_and_xxx form. IGNORE is true if we don't actually care about
6048 the result of the operation at all. */
6050 static rtx
6051 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6052 enum rtx_code code, bool after,
6053 rtx target, bool ignore)
6055 rtx val, mem;
6056 enum machine_mode old_mode;
6057 location_t loc = EXPR_LOCATION (exp);
6059 if (code == NOT && warn_sync_nand)
6061 tree fndecl = get_callee_fndecl (exp);
6062 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6064 static bool warned_f_a_n, warned_n_a_f;
6066 switch (fcode)
6068 case BUILT_IN_FETCH_AND_NAND_1:
6069 case BUILT_IN_FETCH_AND_NAND_2:
6070 case BUILT_IN_FETCH_AND_NAND_4:
6071 case BUILT_IN_FETCH_AND_NAND_8:
6072 case BUILT_IN_FETCH_AND_NAND_16:
6074 if (warned_f_a_n)
6075 break;
6077 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6078 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6079 warned_f_a_n = true;
6080 break;
6082 case BUILT_IN_NAND_AND_FETCH_1:
6083 case BUILT_IN_NAND_AND_FETCH_2:
6084 case BUILT_IN_NAND_AND_FETCH_4:
6085 case BUILT_IN_NAND_AND_FETCH_8:
6086 case BUILT_IN_NAND_AND_FETCH_16:
6088 if (warned_n_a_f)
6089 break;
6091 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6092 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6093 warned_n_a_f = true;
6094 break;
6096 default:
6097 gcc_unreachable ();
6101 /* Expand the operands. */
6102 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6104 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6105 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6106 of CONST_INTs, where we know the old_mode only from the call argument. */
6107 old_mode = GET_MODE (val);
6108 if (old_mode == VOIDmode)
6109 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6110 val = convert_modes (mode, old_mode, val, 1);
6112 if (ignore)
6113 return expand_sync_operation (mem, val, code);
6114 else
6115 return expand_sync_fetch_operation (mem, val, code, after, target);
6118 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6119 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6120 true if this is the boolean form. TARGET is a place for us to store the
6121 results; this is NOT optional if IS_BOOL is true. */
6123 static rtx
6124 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6125 bool is_bool, rtx target)
6127 rtx old_val, new_val, mem;
6128 enum machine_mode old_mode;
6130 /* Expand the operands. */
6131 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6134 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6135 mode, EXPAND_NORMAL);
6136 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6137 of CONST_INTs, where we know the old_mode only from the call argument. */
6138 old_mode = GET_MODE (old_val);
6139 if (old_mode == VOIDmode)
6140 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6141 old_val = convert_modes (mode, old_mode, old_val, 1);
6143 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6144 mode, EXPAND_NORMAL);
6145 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6146 of CONST_INTs, where we know the old_mode only from the call argument. */
6147 old_mode = GET_MODE (new_val);
6148 if (old_mode == VOIDmode)
6149 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6150 new_val = convert_modes (mode, old_mode, new_val, 1);
6152 if (is_bool)
6153 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6154 else
6155 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6158 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6159 general form is actually an atomic exchange, and some targets only
6160 support a reduced form with the second argument being a constant 1.
6161 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6162 the results. */
6164 static rtx
6165 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6166 rtx target)
6168 rtx val, mem;
6169 enum machine_mode old_mode;
6171 /* Expand the operands. */
6172 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6173 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6174 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6175 of CONST_INTs, where we know the old_mode only from the call argument. */
6176 old_mode = GET_MODE (val);
6177 if (old_mode == VOIDmode)
6178 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6179 val = convert_modes (mode, old_mode, val, 1);
6181 return expand_sync_lock_test_and_set (mem, val, target);
6184 /* Expand the __sync_synchronize intrinsic. */
6186 static void
6187 expand_builtin_synchronize (void)
6189 tree x;
6191 #ifdef HAVE_memory_barrier
6192 if (HAVE_memory_barrier)
6194 emit_insn (gen_memory_barrier ());
6195 return;
6197 #endif
6199 if (synchronize_libfunc != NULL_RTX)
6201 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6202 return;
6205 /* If no explicit memory barrier instruction is available, create an
6206 empty asm stmt with a memory clobber. */
6207 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6208 tree_cons (NULL, build_string (6, "memory"), NULL));
6209 ASM_VOLATILE_P (x) = 1;
6210 expand_asm_expr (x);
6213 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6215 static void
6216 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6218 enum insn_code icode;
6219 rtx mem, insn;
6220 rtx val = const0_rtx;
6222 /* Expand the operands. */
6223 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6225 /* If there is an explicit operation in the md file, use it. */
6226 icode = sync_lock_release[mode];
6227 if (icode != CODE_FOR_nothing)
6229 if (!insn_data[icode].operand[1].predicate (val, mode))
6230 val = force_reg (mode, val);
6232 insn = GEN_FCN (icode) (mem, val);
6233 if (insn)
6235 emit_insn (insn);
6236 return;
6240 /* Otherwise we can implement this operation by emitting a barrier
6241 followed by a store of zero. */
6242 expand_builtin_synchronize ();
6243 emit_move_insn (mem, val);
6246 /* Expand an expression EXP that calls a built-in function,
6247 with result going to TARGET if that's convenient
6248 (and in mode MODE if that's convenient).
6249 SUBTARGET may be used as the target for computing one of EXP's operands.
6250 IGNORE is nonzero if the value is to be ignored. */
6253 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6254 int ignore)
6256 tree fndecl = get_callee_fndecl (exp);
6257 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6258 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6260 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6261 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6263 /* When not optimizing, generate calls to library functions for a certain
6264 set of builtins. */
6265 if (!optimize
6266 && !called_as_built_in (fndecl)
6267 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6268 && fcode != BUILT_IN_ALLOCA
6269 && fcode != BUILT_IN_FREE)
6270 return expand_call (exp, target, ignore);
6272 /* The built-in function expanders test for target == const0_rtx
6273 to determine whether the function's result will be ignored. */
6274 if (ignore)
6275 target = const0_rtx;
6277 /* If the result of a pure or const built-in function is ignored, and
6278 none of its arguments are volatile, we can avoid expanding the
6279 built-in call and just evaluate the arguments for side-effects. */
6280 if (target == const0_rtx
6281 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6283 bool volatilep = false;
6284 tree arg;
6285 call_expr_arg_iterator iter;
6287 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6288 if (TREE_THIS_VOLATILE (arg))
6290 volatilep = true;
6291 break;
6294 if (! volatilep)
6296 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6297 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6298 return const0_rtx;
6302 switch (fcode)
6304 CASE_FLT_FN (BUILT_IN_FABS):
6305 target = expand_builtin_fabs (exp, target, subtarget);
6306 if (target)
6307 return target;
6308 break;
6310 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6311 target = expand_builtin_copysign (exp, target, subtarget);
6312 if (target)
6313 return target;
6314 break;
6316 /* Just do a normal library call if we were unable to fold
6317 the values. */
6318 CASE_FLT_FN (BUILT_IN_CABS):
6319 break;
6321 CASE_FLT_FN (BUILT_IN_EXP):
6322 CASE_FLT_FN (BUILT_IN_EXP10):
6323 CASE_FLT_FN (BUILT_IN_POW10):
6324 CASE_FLT_FN (BUILT_IN_EXP2):
6325 CASE_FLT_FN (BUILT_IN_EXPM1):
6326 CASE_FLT_FN (BUILT_IN_LOGB):
6327 CASE_FLT_FN (BUILT_IN_LOG):
6328 CASE_FLT_FN (BUILT_IN_LOG10):
6329 CASE_FLT_FN (BUILT_IN_LOG2):
6330 CASE_FLT_FN (BUILT_IN_LOG1P):
6331 CASE_FLT_FN (BUILT_IN_TAN):
6332 CASE_FLT_FN (BUILT_IN_ASIN):
6333 CASE_FLT_FN (BUILT_IN_ACOS):
6334 CASE_FLT_FN (BUILT_IN_ATAN):
6335 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6336 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6337 because of possible accuracy problems. */
6338 if (! flag_unsafe_math_optimizations)
6339 break;
6340 CASE_FLT_FN (BUILT_IN_SQRT):
6341 CASE_FLT_FN (BUILT_IN_FLOOR):
6342 CASE_FLT_FN (BUILT_IN_CEIL):
6343 CASE_FLT_FN (BUILT_IN_TRUNC):
6344 CASE_FLT_FN (BUILT_IN_ROUND):
6345 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6346 CASE_FLT_FN (BUILT_IN_RINT):
6347 target = expand_builtin_mathfn (exp, target, subtarget);
6348 if (target)
6349 return target;
6350 break;
6352 CASE_FLT_FN (BUILT_IN_ILOGB):
6353 if (! flag_unsafe_math_optimizations)
6354 break;
6355 CASE_FLT_FN (BUILT_IN_ISINF):
6356 CASE_FLT_FN (BUILT_IN_FINITE):
6357 case BUILT_IN_ISFINITE:
6358 case BUILT_IN_ISNORMAL:
6359 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6360 if (target)
6361 return target;
6362 break;
6364 CASE_FLT_FN (BUILT_IN_LCEIL):
6365 CASE_FLT_FN (BUILT_IN_LLCEIL):
6366 CASE_FLT_FN (BUILT_IN_LFLOOR):
6367 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6368 target = expand_builtin_int_roundingfn (exp, target);
6369 if (target)
6370 return target;
6371 break;
6373 CASE_FLT_FN (BUILT_IN_LRINT):
6374 CASE_FLT_FN (BUILT_IN_LLRINT):
6375 CASE_FLT_FN (BUILT_IN_LROUND):
6376 CASE_FLT_FN (BUILT_IN_LLROUND):
6377 target = expand_builtin_int_roundingfn_2 (exp, target);
6378 if (target)
6379 return target;
6380 break;
6382 CASE_FLT_FN (BUILT_IN_POW):
6383 target = expand_builtin_pow (exp, target, subtarget);
6384 if (target)
6385 return target;
6386 break;
6388 CASE_FLT_FN (BUILT_IN_POWI):
6389 target = expand_builtin_powi (exp, target, subtarget);
6390 if (target)
6391 return target;
6392 break;
6394 CASE_FLT_FN (BUILT_IN_ATAN2):
6395 CASE_FLT_FN (BUILT_IN_LDEXP):
6396 CASE_FLT_FN (BUILT_IN_SCALB):
6397 CASE_FLT_FN (BUILT_IN_SCALBN):
6398 CASE_FLT_FN (BUILT_IN_SCALBLN):
6399 if (! flag_unsafe_math_optimizations)
6400 break;
6402 CASE_FLT_FN (BUILT_IN_FMOD):
6403 CASE_FLT_FN (BUILT_IN_REMAINDER):
6404 CASE_FLT_FN (BUILT_IN_DREM):
6405 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6406 if (target)
6407 return target;
6408 break;
6410 CASE_FLT_FN (BUILT_IN_CEXPI):
6411 target = expand_builtin_cexpi (exp, target, subtarget);
6412 gcc_assert (target);
6413 return target;
6415 CASE_FLT_FN (BUILT_IN_SIN):
6416 CASE_FLT_FN (BUILT_IN_COS):
6417 if (! flag_unsafe_math_optimizations)
6418 break;
6419 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6420 if (target)
6421 return target;
6422 break;
6424 CASE_FLT_FN (BUILT_IN_SINCOS):
6425 if (! flag_unsafe_math_optimizations)
6426 break;
6427 target = expand_builtin_sincos (exp);
6428 if (target)
6429 return target;
6430 break;
6432 case BUILT_IN_APPLY_ARGS:
6433 return expand_builtin_apply_args ();
6435 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6436 FUNCTION with a copy of the parameters described by
6437 ARGUMENTS, and ARGSIZE. It returns a block of memory
6438 allocated on the stack into which is stored all the registers
6439 that might possibly be used for returning the result of a
6440 function. ARGUMENTS is the value returned by
6441 __builtin_apply_args. ARGSIZE is the number of bytes of
6442 arguments that must be copied. ??? How should this value be
6443 computed? We'll also need a safe worst case value for varargs
6444 functions. */
6445 case BUILT_IN_APPLY:
6446 if (!validate_arglist (exp, POINTER_TYPE,
6447 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6448 && !validate_arglist (exp, REFERENCE_TYPE,
6449 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6450 return const0_rtx;
6451 else
6453 rtx ops[3];
6455 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6456 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6457 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6459 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6462 /* __builtin_return (RESULT) causes the function to return the
6463 value described by RESULT. RESULT is address of the block of
6464 memory returned by __builtin_apply. */
6465 case BUILT_IN_RETURN:
6466 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6467 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6468 return const0_rtx;
6470 case BUILT_IN_SAVEREGS:
6471 return expand_builtin_saveregs ();
6473 case BUILT_IN_ARGS_INFO:
6474 return expand_builtin_args_info (exp);
6476 case BUILT_IN_VA_ARG_PACK:
6477 /* All valid uses of __builtin_va_arg_pack () are removed during
6478 inlining. */
6479 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6480 return const0_rtx;
6482 case BUILT_IN_VA_ARG_PACK_LEN:
6483 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6484 inlining. */
6485 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6486 return const0_rtx;
6488 /* Return the address of the first anonymous stack arg. */
6489 case BUILT_IN_NEXT_ARG:
6490 if (fold_builtin_next_arg (exp, false))
6491 return const0_rtx;
6492 return expand_builtin_next_arg ();
6494 case BUILT_IN_CLEAR_CACHE:
6495 target = expand_builtin___clear_cache (exp);
6496 if (target)
6497 return target;
6498 break;
6500 case BUILT_IN_CLASSIFY_TYPE:
6501 return expand_builtin_classify_type (exp);
6503 case BUILT_IN_CONSTANT_P:
6504 return const0_rtx;
6506 case BUILT_IN_FRAME_ADDRESS:
6507 case BUILT_IN_RETURN_ADDRESS:
6508 return expand_builtin_frame_address (fndecl, exp);
6510 /* Returns the address of the area where the structure is returned.
6511 0 otherwise. */
6512 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6513 if (call_expr_nargs (exp) != 0
6514 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6515 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6516 return const0_rtx;
6517 else
6518 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6520 case BUILT_IN_ALLOCA:
6521 target = expand_builtin_alloca (exp, target);
6522 if (target)
6523 return target;
6524 break;
6526 case BUILT_IN_STACK_SAVE:
6527 return expand_stack_save ();
6529 case BUILT_IN_STACK_RESTORE:
6530 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6531 return const0_rtx;
6533 case BUILT_IN_BSWAP32:
6534 case BUILT_IN_BSWAP64:
6535 target = expand_builtin_bswap (exp, target, subtarget);
6537 if (target)
6538 return target;
6539 break;
6541 CASE_INT_FN (BUILT_IN_FFS):
6542 case BUILT_IN_FFSIMAX:
6543 target = expand_builtin_unop (target_mode, exp, target,
6544 subtarget, ffs_optab);
6545 if (target)
6546 return target;
6547 break;
6549 CASE_INT_FN (BUILT_IN_CLZ):
6550 case BUILT_IN_CLZIMAX:
6551 target = expand_builtin_unop (target_mode, exp, target,
6552 subtarget, clz_optab);
6553 if (target)
6554 return target;
6555 break;
6557 CASE_INT_FN (BUILT_IN_CTZ):
6558 case BUILT_IN_CTZIMAX:
6559 target = expand_builtin_unop (target_mode, exp, target,
6560 subtarget, ctz_optab);
6561 if (target)
6562 return target;
6563 break;
6565 CASE_INT_FN (BUILT_IN_POPCOUNT):
6566 case BUILT_IN_POPCOUNTIMAX:
6567 target = expand_builtin_unop (target_mode, exp, target,
6568 subtarget, popcount_optab);
6569 if (target)
6570 return target;
6571 break;
6573 CASE_INT_FN (BUILT_IN_PARITY):
6574 case BUILT_IN_PARITYIMAX:
6575 target = expand_builtin_unop (target_mode, exp, target,
6576 subtarget, parity_optab);
6577 if (target)
6578 return target;
6579 break;
6581 case BUILT_IN_STRLEN:
6582 target = expand_builtin_strlen (exp, target, target_mode);
6583 if (target)
6584 return target;
6585 break;
6587 case BUILT_IN_STRCPY:
6588 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6589 if (target)
6590 return target;
6591 break;
6593 case BUILT_IN_STRNCPY:
6594 target = expand_builtin_strncpy (exp, target, mode);
6595 if (target)
6596 return target;
6597 break;
6599 case BUILT_IN_STPCPY:
6600 target = expand_builtin_stpcpy (exp, target, mode);
6601 if (target)
6602 return target;
6603 break;
6605 case BUILT_IN_STRCAT:
6606 target = expand_builtin_strcat (fndecl, exp, target, mode);
6607 if (target)
6608 return target;
6609 break;
6611 case BUILT_IN_STRNCAT:
6612 target = expand_builtin_strncat (exp, target, mode);
6613 if (target)
6614 return target;
6615 break;
6617 case BUILT_IN_STRSPN:
6618 target = expand_builtin_strspn (exp, target, mode);
6619 if (target)
6620 return target;
6621 break;
6623 case BUILT_IN_STRCSPN:
6624 target = expand_builtin_strcspn (exp, target, mode);
6625 if (target)
6626 return target;
6627 break;
6629 case BUILT_IN_STRSTR:
6630 target = expand_builtin_strstr (exp, target, mode);
6631 if (target)
6632 return target;
6633 break;
6635 case BUILT_IN_STRPBRK:
6636 target = expand_builtin_strpbrk (exp, target, mode);
6637 if (target)
6638 return target;
6639 break;
6641 case BUILT_IN_INDEX:
6642 case BUILT_IN_STRCHR:
6643 target = expand_builtin_strchr (exp, target, mode);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_RINDEX:
6649 case BUILT_IN_STRRCHR:
6650 target = expand_builtin_strrchr (exp, target, mode);
6651 if (target)
6652 return target;
6653 break;
6655 case BUILT_IN_MEMCPY:
6656 target = expand_builtin_memcpy (exp, target, mode);
6657 if (target)
6658 return target;
6659 break;
6661 case BUILT_IN_MEMPCPY:
6662 target = expand_builtin_mempcpy (exp, target, mode);
6663 if (target)
6664 return target;
6665 break;
6667 case BUILT_IN_MEMMOVE:
6668 target = expand_builtin_memmove (exp, target, mode, ignore);
6669 if (target)
6670 return target;
6671 break;
6673 case BUILT_IN_BCOPY:
6674 target = expand_builtin_bcopy (exp, ignore);
6675 if (target)
6676 return target;
6677 break;
6679 case BUILT_IN_MEMSET:
6680 target = expand_builtin_memset (exp, target, mode);
6681 if (target)
6682 return target;
6683 break;
6685 case BUILT_IN_BZERO:
6686 target = expand_builtin_bzero (exp);
6687 if (target)
6688 return target;
6689 break;
6691 case BUILT_IN_STRCMP:
6692 target = expand_builtin_strcmp (exp, target, mode);
6693 if (target)
6694 return target;
6695 break;
6697 case BUILT_IN_STRNCMP:
6698 target = expand_builtin_strncmp (exp, target, mode);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_MEMCHR:
6704 target = expand_builtin_memchr (exp, target, mode);
6705 if (target)
6706 return target;
6707 break;
6709 case BUILT_IN_BCMP:
6710 case BUILT_IN_MEMCMP:
6711 target = expand_builtin_memcmp (exp, target, mode);
6712 if (target)
6713 return target;
6714 break;
6716 case BUILT_IN_SETJMP:
6717 /* This should have been lowered to the builtins below. */
6718 gcc_unreachable ();
6720 case BUILT_IN_SETJMP_SETUP:
6721 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6722 and the receiver label. */
6723 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6725 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6726 VOIDmode, EXPAND_NORMAL);
6727 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6728 rtx label_r = label_rtx (label);
6730 /* This is copied from the handling of non-local gotos. */
6731 expand_builtin_setjmp_setup (buf_addr, label_r);
6732 nonlocal_goto_handler_labels
6733 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6734 nonlocal_goto_handler_labels);
6735 /* ??? Do not let expand_label treat us as such since we would
6736 not want to be both on the list of non-local labels and on
6737 the list of forced labels. */
6738 FORCED_LABEL (label) = 0;
6739 return const0_rtx;
6741 break;
6743 case BUILT_IN_SETJMP_DISPATCHER:
6744 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6745 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6747 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6748 rtx label_r = label_rtx (label);
6750 /* Remove the dispatcher label from the list of non-local labels
6751 since the receiver labels have been added to it above. */
6752 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6753 return const0_rtx;
6755 break;
6757 case BUILT_IN_SETJMP_RECEIVER:
6758 /* __builtin_setjmp_receiver is passed the receiver label. */
6759 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6761 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6762 rtx label_r = label_rtx (label);
6764 expand_builtin_setjmp_receiver (label_r);
6765 return const0_rtx;
6767 break;
6769 /* __builtin_longjmp is passed a pointer to an array of five words.
6770 It's similar to the C library longjmp function but works with
6771 __builtin_setjmp above. */
6772 case BUILT_IN_LONGJMP:
6773 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6775 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6776 VOIDmode, EXPAND_NORMAL);
6777 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6779 if (value != const1_rtx)
6781 error ("%<__builtin_longjmp%> second argument must be 1");
6782 return const0_rtx;
6785 expand_builtin_longjmp (buf_addr, value);
6786 return const0_rtx;
6788 break;
6790 case BUILT_IN_NONLOCAL_GOTO:
6791 target = expand_builtin_nonlocal_goto (exp);
6792 if (target)
6793 return target;
6794 break;
6796 /* This updates the setjmp buffer that is its argument with the value
6797 of the current stack pointer. */
6798 case BUILT_IN_UPDATE_SETJMP_BUF:
6799 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6801 rtx buf_addr
6802 = expand_normal (CALL_EXPR_ARG (exp, 0));
6804 expand_builtin_update_setjmp_buf (buf_addr);
6805 return const0_rtx;
6807 break;
6809 case BUILT_IN_TRAP:
6810 expand_builtin_trap ();
6811 return const0_rtx;
6813 case BUILT_IN_UNREACHABLE:
6814 expand_builtin_unreachable ();
6815 return const0_rtx;
6817 case BUILT_IN_PRINTF:
6818 target = expand_builtin_printf (exp, target, mode, false);
6819 if (target)
6820 return target;
6821 break;
6823 case BUILT_IN_PRINTF_UNLOCKED:
6824 target = expand_builtin_printf (exp, target, mode, true);
6825 if (target)
6826 return target;
6827 break;
6829 case BUILT_IN_FPUTS:
6830 target = expand_builtin_fputs (exp, target, false);
6831 if (target)
6832 return target;
6833 break;
6834 case BUILT_IN_FPUTS_UNLOCKED:
6835 target = expand_builtin_fputs (exp, target, true);
6836 if (target)
6837 return target;
6838 break;
6840 case BUILT_IN_FPRINTF:
6841 target = expand_builtin_fprintf (exp, target, mode, false);
6842 if (target)
6843 return target;
6844 break;
6846 case BUILT_IN_FPRINTF_UNLOCKED:
6847 target = expand_builtin_fprintf (exp, target, mode, true);
6848 if (target)
6849 return target;
6850 break;
6852 case BUILT_IN_SPRINTF:
6853 target = expand_builtin_sprintf (exp, target, mode);
6854 if (target)
6855 return target;
6856 break;
6858 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6859 case BUILT_IN_SIGNBITD32:
6860 case BUILT_IN_SIGNBITD64:
6861 case BUILT_IN_SIGNBITD128:
6862 target = expand_builtin_signbit (exp, target);
6863 if (target)
6864 return target;
6865 break;
6867 /* Various hooks for the DWARF 2 __throw routine. */
6868 case BUILT_IN_UNWIND_INIT:
6869 expand_builtin_unwind_init ();
6870 return const0_rtx;
6871 case BUILT_IN_DWARF_CFA:
6872 return virtual_cfa_rtx;
6873 #ifdef DWARF2_UNWIND_INFO
6874 case BUILT_IN_DWARF_SP_COLUMN:
6875 return expand_builtin_dwarf_sp_column ();
6876 case BUILT_IN_INIT_DWARF_REG_SIZES:
6877 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6878 return const0_rtx;
6879 #endif
6880 case BUILT_IN_FROB_RETURN_ADDR:
6881 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6882 case BUILT_IN_EXTRACT_RETURN_ADDR:
6883 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6884 case BUILT_IN_EH_RETURN:
6885 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6886 CALL_EXPR_ARG (exp, 1));
6887 return const0_rtx;
6888 #ifdef EH_RETURN_DATA_REGNO
6889 case BUILT_IN_EH_RETURN_DATA_REGNO:
6890 return expand_builtin_eh_return_data_regno (exp);
6891 #endif
6892 case BUILT_IN_EXTEND_POINTER:
6893 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6895 case BUILT_IN_VA_START:
6896 return expand_builtin_va_start (exp);
6897 case BUILT_IN_VA_END:
6898 return expand_builtin_va_end (exp);
6899 case BUILT_IN_VA_COPY:
6900 return expand_builtin_va_copy (exp);
6901 case BUILT_IN_EXPECT:
6902 return expand_builtin_expect (exp, target);
6903 case BUILT_IN_PREFETCH:
6904 expand_builtin_prefetch (exp);
6905 return const0_rtx;
6907 case BUILT_IN_PROFILE_FUNC_ENTER:
6908 return expand_builtin_profile_func (false);
6909 case BUILT_IN_PROFILE_FUNC_EXIT:
6910 return expand_builtin_profile_func (true);
6912 case BUILT_IN_INIT_TRAMPOLINE:
6913 return expand_builtin_init_trampoline (exp);
6914 case BUILT_IN_ADJUST_TRAMPOLINE:
6915 return expand_builtin_adjust_trampoline (exp);
6917 case BUILT_IN_FORK:
6918 case BUILT_IN_EXECL:
6919 case BUILT_IN_EXECV:
6920 case BUILT_IN_EXECLP:
6921 case BUILT_IN_EXECLE:
6922 case BUILT_IN_EXECVP:
6923 case BUILT_IN_EXECVE:
6924 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6925 if (target)
6926 return target;
6927 break;
6929 case BUILT_IN_FETCH_AND_ADD_1:
6930 case BUILT_IN_FETCH_AND_ADD_2:
6931 case BUILT_IN_FETCH_AND_ADD_4:
6932 case BUILT_IN_FETCH_AND_ADD_8:
6933 case BUILT_IN_FETCH_AND_ADD_16:
6934 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6935 target = expand_builtin_sync_operation (mode, exp, PLUS,
6936 false, target, ignore);
6937 if (target)
6938 return target;
6939 break;
6941 case BUILT_IN_FETCH_AND_SUB_1:
6942 case BUILT_IN_FETCH_AND_SUB_2:
6943 case BUILT_IN_FETCH_AND_SUB_4:
6944 case BUILT_IN_FETCH_AND_SUB_8:
6945 case BUILT_IN_FETCH_AND_SUB_16:
6946 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6947 target = expand_builtin_sync_operation (mode, exp, MINUS,
6948 false, target, ignore);
6949 if (target)
6950 return target;
6951 break;
6953 case BUILT_IN_FETCH_AND_OR_1:
6954 case BUILT_IN_FETCH_AND_OR_2:
6955 case BUILT_IN_FETCH_AND_OR_4:
6956 case BUILT_IN_FETCH_AND_OR_8:
6957 case BUILT_IN_FETCH_AND_OR_16:
6958 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6959 target = expand_builtin_sync_operation (mode, exp, IOR,
6960 false, target, ignore);
6961 if (target)
6962 return target;
6963 break;
6965 case BUILT_IN_FETCH_AND_AND_1:
6966 case BUILT_IN_FETCH_AND_AND_2:
6967 case BUILT_IN_FETCH_AND_AND_4:
6968 case BUILT_IN_FETCH_AND_AND_8:
6969 case BUILT_IN_FETCH_AND_AND_16:
6970 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6971 target = expand_builtin_sync_operation (mode, exp, AND,
6972 false, target, ignore);
6973 if (target)
6974 return target;
6975 break;
6977 case BUILT_IN_FETCH_AND_XOR_1:
6978 case BUILT_IN_FETCH_AND_XOR_2:
6979 case BUILT_IN_FETCH_AND_XOR_4:
6980 case BUILT_IN_FETCH_AND_XOR_8:
6981 case BUILT_IN_FETCH_AND_XOR_16:
6982 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6983 target = expand_builtin_sync_operation (mode, exp, XOR,
6984 false, target, ignore);
6985 if (target)
6986 return target;
6987 break;
6989 case BUILT_IN_FETCH_AND_NAND_1:
6990 case BUILT_IN_FETCH_AND_NAND_2:
6991 case BUILT_IN_FETCH_AND_NAND_4:
6992 case BUILT_IN_FETCH_AND_NAND_8:
6993 case BUILT_IN_FETCH_AND_NAND_16:
6994 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6995 target = expand_builtin_sync_operation (mode, exp, NOT,
6996 false, target, ignore);
6997 if (target)
6998 return target;
6999 break;
7001 case BUILT_IN_ADD_AND_FETCH_1:
7002 case BUILT_IN_ADD_AND_FETCH_2:
7003 case BUILT_IN_ADD_AND_FETCH_4:
7004 case BUILT_IN_ADD_AND_FETCH_8:
7005 case BUILT_IN_ADD_AND_FETCH_16:
7006 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7007 target = expand_builtin_sync_operation (mode, exp, PLUS,
7008 true, target, ignore);
7009 if (target)
7010 return target;
7011 break;
7013 case BUILT_IN_SUB_AND_FETCH_1:
7014 case BUILT_IN_SUB_AND_FETCH_2:
7015 case BUILT_IN_SUB_AND_FETCH_4:
7016 case BUILT_IN_SUB_AND_FETCH_8:
7017 case BUILT_IN_SUB_AND_FETCH_16:
7018 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7019 target = expand_builtin_sync_operation (mode, exp, MINUS,
7020 true, target, ignore);
7021 if (target)
7022 return target;
7023 break;
7025 case BUILT_IN_OR_AND_FETCH_1:
7026 case BUILT_IN_OR_AND_FETCH_2:
7027 case BUILT_IN_OR_AND_FETCH_4:
7028 case BUILT_IN_OR_AND_FETCH_8:
7029 case BUILT_IN_OR_AND_FETCH_16:
7030 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7031 target = expand_builtin_sync_operation (mode, exp, IOR,
7032 true, target, ignore);
7033 if (target)
7034 return target;
7035 break;
7037 case BUILT_IN_AND_AND_FETCH_1:
7038 case BUILT_IN_AND_AND_FETCH_2:
7039 case BUILT_IN_AND_AND_FETCH_4:
7040 case BUILT_IN_AND_AND_FETCH_8:
7041 case BUILT_IN_AND_AND_FETCH_16:
7042 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7043 target = expand_builtin_sync_operation (mode, exp, AND,
7044 true, target, ignore);
7045 if (target)
7046 return target;
7047 break;
7049 case BUILT_IN_XOR_AND_FETCH_1:
7050 case BUILT_IN_XOR_AND_FETCH_2:
7051 case BUILT_IN_XOR_AND_FETCH_4:
7052 case BUILT_IN_XOR_AND_FETCH_8:
7053 case BUILT_IN_XOR_AND_FETCH_16:
7054 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7055 target = expand_builtin_sync_operation (mode, exp, XOR,
7056 true, target, ignore);
7057 if (target)
7058 return target;
7059 break;
7061 case BUILT_IN_NAND_AND_FETCH_1:
7062 case BUILT_IN_NAND_AND_FETCH_2:
7063 case BUILT_IN_NAND_AND_FETCH_4:
7064 case BUILT_IN_NAND_AND_FETCH_8:
7065 case BUILT_IN_NAND_AND_FETCH_16:
7066 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7067 target = expand_builtin_sync_operation (mode, exp, NOT,
7068 true, target, ignore);
7069 if (target)
7070 return target;
7071 break;
7073 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7074 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7075 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7076 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7077 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7078 if (mode == VOIDmode)
7079 mode = TYPE_MODE (boolean_type_node);
7080 if (!target || !register_operand (target, mode))
7081 target = gen_reg_rtx (mode);
7083 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7084 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7085 if (target)
7086 return target;
7087 break;
7089 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7090 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7091 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7092 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7093 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7094 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7095 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7096 if (target)
7097 return target;
7098 break;
7100 case BUILT_IN_LOCK_TEST_AND_SET_1:
7101 case BUILT_IN_LOCK_TEST_AND_SET_2:
7102 case BUILT_IN_LOCK_TEST_AND_SET_4:
7103 case BUILT_IN_LOCK_TEST_AND_SET_8:
7104 case BUILT_IN_LOCK_TEST_AND_SET_16:
7105 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7106 target = expand_builtin_lock_test_and_set (mode, exp, target);
7107 if (target)
7108 return target;
7109 break;
7111 case BUILT_IN_LOCK_RELEASE_1:
7112 case BUILT_IN_LOCK_RELEASE_2:
7113 case BUILT_IN_LOCK_RELEASE_4:
7114 case BUILT_IN_LOCK_RELEASE_8:
7115 case BUILT_IN_LOCK_RELEASE_16:
7116 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7117 expand_builtin_lock_release (mode, exp);
7118 return const0_rtx;
7120 case BUILT_IN_SYNCHRONIZE:
7121 expand_builtin_synchronize ();
7122 return const0_rtx;
7124 case BUILT_IN_OBJECT_SIZE:
7125 return expand_builtin_object_size (exp);
7127 case BUILT_IN_MEMCPY_CHK:
7128 case BUILT_IN_MEMPCPY_CHK:
7129 case BUILT_IN_MEMMOVE_CHK:
7130 case BUILT_IN_MEMSET_CHK:
7131 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7132 if (target)
7133 return target;
7134 break;
7136 case BUILT_IN_STRCPY_CHK:
7137 case BUILT_IN_STPCPY_CHK:
7138 case BUILT_IN_STRNCPY_CHK:
7139 case BUILT_IN_STRCAT_CHK:
7140 case BUILT_IN_STRNCAT_CHK:
7141 case BUILT_IN_SNPRINTF_CHK:
7142 case BUILT_IN_VSNPRINTF_CHK:
7143 maybe_emit_chk_warning (exp, fcode);
7144 break;
7146 case BUILT_IN_SPRINTF_CHK:
7147 case BUILT_IN_VSPRINTF_CHK:
7148 maybe_emit_sprintf_chk_warning (exp, fcode);
7149 break;
7151 case BUILT_IN_FREE:
7152 maybe_emit_free_warning (exp);
7153 break;
7155 default: /* just do library call, if unknown builtin */
7156 break;
7159 /* The switch statement above can drop through to cause the function
7160 to be called normally. */
7161 return expand_call (exp, target, ignore);
7164 /* Determine whether a tree node represents a call to a built-in
7165 function. If the tree T is a call to a built-in function with
7166 the right number of arguments of the appropriate types, return
7167 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7168 Otherwise the return value is END_BUILTINS. */
7170 enum built_in_function
7171 builtin_mathfn_code (const_tree t)
7173 const_tree fndecl, arg, parmlist;
7174 const_tree argtype, parmtype;
7175 const_call_expr_arg_iterator iter;
7177 if (TREE_CODE (t) != CALL_EXPR
7178 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7179 return END_BUILTINS;
7181 fndecl = get_callee_fndecl (t);
7182 if (fndecl == NULL_TREE
7183 || TREE_CODE (fndecl) != FUNCTION_DECL
7184 || ! DECL_BUILT_IN (fndecl)
7185 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7186 return END_BUILTINS;
7188 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7189 init_const_call_expr_arg_iterator (t, &iter);
7190 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7192 /* If a function doesn't take a variable number of arguments,
7193 the last element in the list will have type `void'. */
7194 parmtype = TREE_VALUE (parmlist);
7195 if (VOID_TYPE_P (parmtype))
7197 if (more_const_call_expr_args_p (&iter))
7198 return END_BUILTINS;
7199 return DECL_FUNCTION_CODE (fndecl);
7202 if (! more_const_call_expr_args_p (&iter))
7203 return END_BUILTINS;
7205 arg = next_const_call_expr_arg (&iter);
7206 argtype = TREE_TYPE (arg);
7208 if (SCALAR_FLOAT_TYPE_P (parmtype))
7210 if (! SCALAR_FLOAT_TYPE_P (argtype))
7211 return END_BUILTINS;
7213 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7215 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7216 return END_BUILTINS;
7218 else if (POINTER_TYPE_P (parmtype))
7220 if (! POINTER_TYPE_P (argtype))
7221 return END_BUILTINS;
7223 else if (INTEGRAL_TYPE_P (parmtype))
7225 if (! INTEGRAL_TYPE_P (argtype))
7226 return END_BUILTINS;
7228 else
7229 return END_BUILTINS;
7232 /* Variable-length argument list. */
7233 return DECL_FUNCTION_CODE (fndecl);
7236 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7237 evaluate to a constant. */
7239 static tree
7240 fold_builtin_constant_p (tree arg)
7242 /* We return 1 for a numeric type that's known to be a constant
7243 value at compile-time or for an aggregate type that's a
7244 literal constant. */
7245 STRIP_NOPS (arg);
7247 /* If we know this is a constant, emit the constant of one. */
7248 if (CONSTANT_CLASS_P (arg)
7249 || (TREE_CODE (arg) == CONSTRUCTOR
7250 && TREE_CONSTANT (arg)))
7251 return integer_one_node;
7252 if (TREE_CODE (arg) == ADDR_EXPR)
7254 tree op = TREE_OPERAND (arg, 0);
7255 if (TREE_CODE (op) == STRING_CST
7256 || (TREE_CODE (op) == ARRAY_REF
7257 && integer_zerop (TREE_OPERAND (op, 1))
7258 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7259 return integer_one_node;
7262 /* If this expression has side effects, show we don't know it to be a
7263 constant. Likewise if it's a pointer or aggregate type since in
7264 those case we only want literals, since those are only optimized
7265 when generating RTL, not later.
7266 And finally, if we are compiling an initializer, not code, we
7267 need to return a definite result now; there's not going to be any
7268 more optimization done. */
7269 if (TREE_SIDE_EFFECTS (arg)
7270 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7271 || POINTER_TYPE_P (TREE_TYPE (arg))
7272 || cfun == 0
7273 || folding_initializer)
7274 return integer_zero_node;
7276 return NULL_TREE;
7279 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7280 return it as a truthvalue. */
7282 static tree
7283 build_builtin_expect_predicate (tree pred, tree expected)
7285 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7287 fn = built_in_decls[BUILT_IN_EXPECT];
7288 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7289 ret_type = TREE_TYPE (TREE_TYPE (fn));
7290 pred_type = TREE_VALUE (arg_types);
7291 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7293 pred = fold_convert (pred_type, pred);
7294 expected = fold_convert (expected_type, expected);
7295 call_expr = build_call_expr (fn, 2, pred, expected);
7297 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7298 build_int_cst (ret_type, 0));
7301 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7302 NULL_TREE if no simplification is possible. */
7304 static tree
7305 fold_builtin_expect (tree arg0, tree arg1)
7307 tree inner, fndecl;
7308 enum tree_code code;
7310 /* If this is a builtin_expect within a builtin_expect keep the
7311 inner one. See through a comparison against a constant. It
7312 might have been added to create a thruthvalue. */
7313 inner = arg0;
7314 if (COMPARISON_CLASS_P (inner)
7315 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7316 inner = TREE_OPERAND (inner, 0);
7318 if (TREE_CODE (inner) == CALL_EXPR
7319 && (fndecl = get_callee_fndecl (inner))
7320 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7321 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7322 return arg0;
7324 /* Distribute the expected value over short-circuiting operators.
7325 See through the cast from truthvalue_type_node to long. */
7326 inner = arg0;
7327 while (TREE_CODE (inner) == NOP_EXPR
7328 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7329 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7330 inner = TREE_OPERAND (inner, 0);
7332 code = TREE_CODE (inner);
7333 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7335 tree op0 = TREE_OPERAND (inner, 0);
7336 tree op1 = TREE_OPERAND (inner, 1);
7338 op0 = build_builtin_expect_predicate (op0, arg1);
7339 op1 = build_builtin_expect_predicate (op1, arg1);
7340 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7342 return fold_convert (TREE_TYPE (arg0), inner);
7345 /* If the argument isn't invariant then there's nothing else we can do. */
7346 if (!TREE_CONSTANT (arg0))
7347 return NULL_TREE;
7349 /* If we expect that a comparison against the argument will fold to
7350 a constant return the constant. In practice, this means a true
7351 constant or the address of a non-weak symbol. */
7352 inner = arg0;
7353 STRIP_NOPS (inner);
7354 if (TREE_CODE (inner) == ADDR_EXPR)
7358 inner = TREE_OPERAND (inner, 0);
7360 while (TREE_CODE (inner) == COMPONENT_REF
7361 || TREE_CODE (inner) == ARRAY_REF);
7362 if ((TREE_CODE (inner) == VAR_DECL
7363 || TREE_CODE (inner) == FUNCTION_DECL)
7364 && DECL_WEAK (inner))
7365 return NULL_TREE;
7368 /* Otherwise, ARG0 already has the proper type for the return value. */
7369 return arg0;
7372 /* Fold a call to __builtin_classify_type with argument ARG. */
7374 static tree
7375 fold_builtin_classify_type (tree arg)
7377 if (arg == 0)
7378 return build_int_cst (NULL_TREE, no_type_class);
7380 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7383 /* Fold a call to __builtin_strlen with argument ARG. */
7385 static tree
7386 fold_builtin_strlen (tree arg)
7388 if (!validate_arg (arg, POINTER_TYPE))
7389 return NULL_TREE;
7390 else
7392 tree len = c_strlen (arg, 0);
7394 if (len)
7396 /* Convert from the internal "sizetype" type to "size_t". */
7397 if (size_type_node)
7398 len = fold_convert (size_type_node, len);
7399 return len;
7402 return NULL_TREE;
7406 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7408 static tree
7409 fold_builtin_inf (tree type, int warn)
7411 REAL_VALUE_TYPE real;
7413 /* __builtin_inff is intended to be usable to define INFINITY on all
7414 targets. If an infinity is not available, INFINITY expands "to a
7415 positive constant of type float that overflows at translation
7416 time", footnote "In this case, using INFINITY will violate the
7417 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7418 Thus we pedwarn to ensure this constraint violation is
7419 diagnosed. */
7420 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7421 pedwarn (input_location, 0, "target format does not support infinity");
7423 real_inf (&real);
7424 return build_real (type, real);
7427 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7429 static tree
7430 fold_builtin_nan (tree arg, tree type, int quiet)
7432 REAL_VALUE_TYPE real;
7433 const char *str;
7435 if (!validate_arg (arg, POINTER_TYPE))
7436 return NULL_TREE;
7437 str = c_getstr (arg);
7438 if (!str)
7439 return NULL_TREE;
7441 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7442 return NULL_TREE;
7444 return build_real (type, real);
7447 /* Return true if the floating point expression T has an integer value.
7448 We also allow +Inf, -Inf and NaN to be considered integer values. */
7450 static bool
7451 integer_valued_real_p (tree t)
7453 switch (TREE_CODE (t))
7455 case FLOAT_EXPR:
7456 return true;
7458 case ABS_EXPR:
7459 case SAVE_EXPR:
7460 return integer_valued_real_p (TREE_OPERAND (t, 0));
7462 case COMPOUND_EXPR:
7463 case MODIFY_EXPR:
7464 case BIND_EXPR:
7465 return integer_valued_real_p (TREE_OPERAND (t, 1));
7467 case PLUS_EXPR:
7468 case MINUS_EXPR:
7469 case MULT_EXPR:
7470 case MIN_EXPR:
7471 case MAX_EXPR:
7472 return integer_valued_real_p (TREE_OPERAND (t, 0))
7473 && integer_valued_real_p (TREE_OPERAND (t, 1));
7475 case COND_EXPR:
7476 return integer_valued_real_p (TREE_OPERAND (t, 1))
7477 && integer_valued_real_p (TREE_OPERAND (t, 2));
7479 case REAL_CST:
7480 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7482 case NOP_EXPR:
7484 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7485 if (TREE_CODE (type) == INTEGER_TYPE)
7486 return true;
7487 if (TREE_CODE (type) == REAL_TYPE)
7488 return integer_valued_real_p (TREE_OPERAND (t, 0));
7489 break;
7492 case CALL_EXPR:
7493 switch (builtin_mathfn_code (t))
7495 CASE_FLT_FN (BUILT_IN_CEIL):
7496 CASE_FLT_FN (BUILT_IN_FLOOR):
7497 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7498 CASE_FLT_FN (BUILT_IN_RINT):
7499 CASE_FLT_FN (BUILT_IN_ROUND):
7500 CASE_FLT_FN (BUILT_IN_TRUNC):
7501 return true;
7503 CASE_FLT_FN (BUILT_IN_FMIN):
7504 CASE_FLT_FN (BUILT_IN_FMAX):
7505 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7506 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7508 default:
7509 break;
7511 break;
7513 default:
7514 break;
7516 return false;
7519 /* FNDECL is assumed to be a builtin where truncation can be propagated
7520 across (for instance floor((double)f) == (double)floorf (f).
7521 Do the transformation for a call with argument ARG. */
7523 static tree
7524 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7526 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7528 if (!validate_arg (arg, REAL_TYPE))
7529 return NULL_TREE;
7531 /* Integer rounding functions are idempotent. */
7532 if (fcode == builtin_mathfn_code (arg))
7533 return arg;
7535 /* If argument is already integer valued, and we don't need to worry
7536 about setting errno, there's no need to perform rounding. */
7537 if (! flag_errno_math && integer_valued_real_p (arg))
7538 return arg;
7540 if (optimize)
7542 tree arg0 = strip_float_extensions (arg);
7543 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7544 tree newtype = TREE_TYPE (arg0);
7545 tree decl;
7547 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7548 && (decl = mathfn_built_in (newtype, fcode)))
7549 return fold_convert (ftype,
7550 build_call_expr (decl, 1,
7551 fold_convert (newtype, arg0)));
7553 return NULL_TREE;
7556 /* FNDECL is assumed to be builtin which can narrow the FP type of
7557 the argument, for instance lround((double)f) -> lroundf (f).
7558 Do the transformation for a call with argument ARG. */
7560 static tree
7561 fold_fixed_mathfn (tree fndecl, tree arg)
7563 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7565 if (!validate_arg (arg, REAL_TYPE))
7566 return NULL_TREE;
7568 /* If argument is already integer valued, and we don't need to worry
7569 about setting errno, there's no need to perform rounding. */
7570 if (! flag_errno_math && integer_valued_real_p (arg))
7571 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7573 if (optimize)
7575 tree ftype = TREE_TYPE (arg);
7576 tree arg0 = strip_float_extensions (arg);
7577 tree newtype = TREE_TYPE (arg0);
7578 tree decl;
7580 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7581 && (decl = mathfn_built_in (newtype, fcode)))
7582 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7585 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7586 sizeof (long long) == sizeof (long). */
7587 if (TYPE_PRECISION (long_long_integer_type_node)
7588 == TYPE_PRECISION (long_integer_type_node))
7590 tree newfn = NULL_TREE;
7591 switch (fcode)
7593 CASE_FLT_FN (BUILT_IN_LLCEIL):
7594 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7595 break;
7597 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7598 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7599 break;
7601 CASE_FLT_FN (BUILT_IN_LLROUND):
7602 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7603 break;
7605 CASE_FLT_FN (BUILT_IN_LLRINT):
7606 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7607 break;
7609 default:
7610 break;
7613 if (newfn)
7615 tree newcall = build_call_expr(newfn, 1, arg);
7616 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7620 return NULL_TREE;
7623 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7624 return type. Return NULL_TREE if no simplification can be made. */
7626 static tree
7627 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7629 tree res;
7631 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7632 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7633 return NULL_TREE;
7635 /* Calculate the result when the argument is a constant. */
7636 if (TREE_CODE (arg) == COMPLEX_CST
7637 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7638 type, mpfr_hypot)))
7639 return res;
7641 if (TREE_CODE (arg) == COMPLEX_EXPR)
7643 tree real = TREE_OPERAND (arg, 0);
7644 tree imag = TREE_OPERAND (arg, 1);
7646 /* If either part is zero, cabs is fabs of the other. */
7647 if (real_zerop (real))
7648 return fold_build1 (ABS_EXPR, type, imag);
7649 if (real_zerop (imag))
7650 return fold_build1 (ABS_EXPR, type, real);
7652 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7653 if (flag_unsafe_math_optimizations
7654 && operand_equal_p (real, imag, OEP_PURE_SAME))
7656 const REAL_VALUE_TYPE sqrt2_trunc
7657 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7658 STRIP_NOPS (real);
7659 return fold_build2 (MULT_EXPR, type,
7660 fold_build1 (ABS_EXPR, type, real),
7661 build_real (type, sqrt2_trunc));
7665 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7666 if (TREE_CODE (arg) == NEGATE_EXPR
7667 || TREE_CODE (arg) == CONJ_EXPR)
7668 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7670 /* Don't do this when optimizing for size. */
7671 if (flag_unsafe_math_optimizations
7672 && optimize && optimize_function_for_speed_p (cfun))
7674 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7676 if (sqrtfn != NULL_TREE)
7678 tree rpart, ipart, result;
7680 arg = builtin_save_expr (arg);
7682 rpart = fold_build1 (REALPART_EXPR, type, arg);
7683 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7685 rpart = builtin_save_expr (rpart);
7686 ipart = builtin_save_expr (ipart);
7688 result = fold_build2 (PLUS_EXPR, type,
7689 fold_build2 (MULT_EXPR, type,
7690 rpart, rpart),
7691 fold_build2 (MULT_EXPR, type,
7692 ipart, ipart));
7694 return build_call_expr (sqrtfn, 1, result);
7698 return NULL_TREE;
7701 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7702 Return NULL_TREE if no simplification can be made. */
7704 static tree
7705 fold_builtin_sqrt (tree arg, tree type)
7708 enum built_in_function fcode;
7709 tree res;
7711 if (!validate_arg (arg, REAL_TYPE))
7712 return NULL_TREE;
7714 /* Calculate the result when the argument is a constant. */
7715 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7716 return res;
7718 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7719 fcode = builtin_mathfn_code (arg);
7720 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7722 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7723 arg = fold_build2 (MULT_EXPR, type,
7724 CALL_EXPR_ARG (arg, 0),
7725 build_real (type, dconsthalf));
7726 return build_call_expr (expfn, 1, arg);
7729 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7730 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7732 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7734 if (powfn)
7736 tree arg0 = CALL_EXPR_ARG (arg, 0);
7737 tree tree_root;
7738 /* The inner root was either sqrt or cbrt. */
7739 /* This was a conditional expression but it triggered a bug
7740 in Sun C 5.5. */
7741 REAL_VALUE_TYPE dconstroot;
7742 if (BUILTIN_SQRT_P (fcode))
7743 dconstroot = dconsthalf;
7744 else
7745 dconstroot = dconst_third ();
7747 /* Adjust for the outer root. */
7748 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7749 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7750 tree_root = build_real (type, dconstroot);
7751 return build_call_expr (powfn, 2, arg0, tree_root);
7755 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7756 if (flag_unsafe_math_optimizations
7757 && (fcode == BUILT_IN_POW
7758 || fcode == BUILT_IN_POWF
7759 || fcode == BUILT_IN_POWL))
7761 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7762 tree arg0 = CALL_EXPR_ARG (arg, 0);
7763 tree arg1 = CALL_EXPR_ARG (arg, 1);
7764 tree narg1;
7765 if (!tree_expr_nonnegative_p (arg0))
7766 arg0 = build1 (ABS_EXPR, type, arg0);
7767 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7768 build_real (type, dconsthalf));
7769 return build_call_expr (powfn, 2, arg0, narg1);
7772 return NULL_TREE;
7775 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7776 Return NULL_TREE if no simplification can be made. */
7778 static tree
7779 fold_builtin_cbrt (tree arg, tree type)
7781 const enum built_in_function fcode = builtin_mathfn_code (arg);
7782 tree res;
7784 if (!validate_arg (arg, REAL_TYPE))
7785 return NULL_TREE;
7787 /* Calculate the result when the argument is a constant. */
7788 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7789 return res;
7791 if (flag_unsafe_math_optimizations)
7793 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7794 if (BUILTIN_EXPONENT_P (fcode))
7796 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7797 const REAL_VALUE_TYPE third_trunc =
7798 real_value_truncate (TYPE_MODE (type), dconst_third ());
7799 arg = fold_build2 (MULT_EXPR, type,
7800 CALL_EXPR_ARG (arg, 0),
7801 build_real (type, third_trunc));
7802 return build_call_expr (expfn, 1, arg);
7805 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7806 if (BUILTIN_SQRT_P (fcode))
7808 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7810 if (powfn)
7812 tree arg0 = CALL_EXPR_ARG (arg, 0);
7813 tree tree_root;
7814 REAL_VALUE_TYPE dconstroot = dconst_third ();
7816 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7817 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7818 tree_root = build_real (type, dconstroot);
7819 return build_call_expr (powfn, 2, arg0, tree_root);
7823 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7824 if (BUILTIN_CBRT_P (fcode))
7826 tree arg0 = CALL_EXPR_ARG (arg, 0);
7827 if (tree_expr_nonnegative_p (arg0))
7829 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7831 if (powfn)
7833 tree tree_root;
7834 REAL_VALUE_TYPE dconstroot;
7836 real_arithmetic (&dconstroot, MULT_EXPR,
7837 dconst_third_ptr (), dconst_third_ptr ());
7838 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7839 tree_root = build_real (type, dconstroot);
7840 return build_call_expr (powfn, 2, arg0, tree_root);
7845 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7846 if (fcode == BUILT_IN_POW
7847 || fcode == BUILT_IN_POWF
7848 || fcode == BUILT_IN_POWL)
7850 tree arg00 = CALL_EXPR_ARG (arg, 0);
7851 tree arg01 = CALL_EXPR_ARG (arg, 1);
7852 if (tree_expr_nonnegative_p (arg00))
7854 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7855 const REAL_VALUE_TYPE dconstroot
7856 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7857 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7858 build_real (type, dconstroot));
7859 return build_call_expr (powfn, 2, arg00, narg01);
7863 return NULL_TREE;
7866 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7867 TYPE is the type of the return value. Return NULL_TREE if no
7868 simplification can be made. */
7870 static tree
7871 fold_builtin_cos (tree arg, tree type, tree fndecl)
7873 tree res, narg;
7875 if (!validate_arg (arg, REAL_TYPE))
7876 return NULL_TREE;
7878 /* Calculate the result when the argument is a constant. */
7879 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7880 return res;
7882 /* Optimize cos(-x) into cos (x). */
7883 if ((narg = fold_strip_sign_ops (arg)))
7884 return build_call_expr (fndecl, 1, narg);
7886 return NULL_TREE;
7889 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7890 Return NULL_TREE if no simplification can be made. */
7892 static tree
7893 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7895 if (validate_arg (arg, REAL_TYPE))
7897 tree res, narg;
7899 /* Calculate the result when the argument is a constant. */
7900 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7901 return res;
7903 /* Optimize cosh(-x) into cosh (x). */
7904 if ((narg = fold_strip_sign_ops (arg)))
7905 return build_call_expr (fndecl, 1, narg);
7908 return NULL_TREE;
7911 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7912 argument ARG. TYPE is the type of the return value. Return
7913 NULL_TREE if no simplification can be made. */
7915 static tree
7916 fold_builtin_ccos (tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7917 bool hyper ATTRIBUTE_UNUSED)
7919 if (validate_arg (arg, COMPLEX_TYPE)
7920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7922 tree tmp;
7924 #ifdef HAVE_mpc
7925 /* Calculate the result when the argument is a constant. */
7926 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7927 return tmp;
7928 #endif
7930 /* Optimize fn(-x) into fn(x). */
7931 if ((tmp = fold_strip_sign_ops (arg)))
7932 return build_call_expr (fndecl, 1, tmp);
7935 return NULL_TREE;
7938 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7939 Return NULL_TREE if no simplification can be made. */
7941 static tree
7942 fold_builtin_tan (tree arg, tree type)
7944 enum built_in_function fcode;
7945 tree res;
7947 if (!validate_arg (arg, REAL_TYPE))
7948 return NULL_TREE;
7950 /* Calculate the result when the argument is a constant. */
7951 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7952 return res;
7954 /* Optimize tan(atan(x)) = x. */
7955 fcode = builtin_mathfn_code (arg);
7956 if (flag_unsafe_math_optimizations
7957 && (fcode == BUILT_IN_ATAN
7958 || fcode == BUILT_IN_ATANF
7959 || fcode == BUILT_IN_ATANL))
7960 return CALL_EXPR_ARG (arg, 0);
7962 return NULL_TREE;
7965 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7966 NULL_TREE if no simplification can be made. */
7968 static tree
7969 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7971 tree type;
7972 tree res, fn, call;
7974 if (!validate_arg (arg0, REAL_TYPE)
7975 || !validate_arg (arg1, POINTER_TYPE)
7976 || !validate_arg (arg2, POINTER_TYPE))
7977 return NULL_TREE;
7979 type = TREE_TYPE (arg0);
7981 /* Calculate the result when the argument is a constant. */
7982 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7983 return res;
7985 /* Canonicalize sincos to cexpi. */
7986 if (!TARGET_C99_FUNCTIONS)
7987 return NULL_TREE;
7988 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7989 if (!fn)
7990 return NULL_TREE;
7992 call = build_call_expr (fn, 1, arg0);
7993 call = builtin_save_expr (call);
7995 return build2 (COMPOUND_EXPR, void_type_node,
7996 build2 (MODIFY_EXPR, void_type_node,
7997 build_fold_indirect_ref (arg1),
7998 build1 (IMAGPART_EXPR, type, call)),
7999 build2 (MODIFY_EXPR, void_type_node,
8000 build_fold_indirect_ref (arg2),
8001 build1 (REALPART_EXPR, type, call)));
8004 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8005 NULL_TREE if no simplification can be made. */
8007 static tree
8008 fold_builtin_cexp (tree arg0, tree type)
8010 tree rtype;
8011 tree realp, imagp, ifn;
8012 #ifdef HAVE_mpc
8013 tree res;
8014 #endif
8016 if (!validate_arg (arg0, COMPLEX_TYPE)
8017 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8018 return NULL_TREE;
8020 #ifdef HAVE_mpc
8021 /* Calculate the result when the argument is a constant. */
8022 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8023 return res;
8024 #endif
8026 rtype = TREE_TYPE (TREE_TYPE (arg0));
8028 /* In case we can figure out the real part of arg0 and it is constant zero
8029 fold to cexpi. */
8030 if (!TARGET_C99_FUNCTIONS)
8031 return NULL_TREE;
8032 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8033 if (!ifn)
8034 return NULL_TREE;
8036 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
8037 && real_zerop (realp))
8039 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
8040 return build_call_expr (ifn, 1, narg);
8043 /* In case we can easily decompose real and imaginary parts split cexp
8044 to exp (r) * cexpi (i). */
8045 if (flag_unsafe_math_optimizations
8046 && realp)
8048 tree rfn, rcall, icall;
8050 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8051 if (!rfn)
8052 return NULL_TREE;
8054 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
8055 if (!imagp)
8056 return NULL_TREE;
8058 icall = build_call_expr (ifn, 1, imagp);
8059 icall = builtin_save_expr (icall);
8060 rcall = build_call_expr (rfn, 1, realp);
8061 rcall = builtin_save_expr (rcall);
8062 return fold_build2 (COMPLEX_EXPR, type,
8063 fold_build2 (MULT_EXPR, rtype,
8064 rcall,
8065 fold_build1 (REALPART_EXPR, rtype, icall)),
8066 fold_build2 (MULT_EXPR, rtype,
8067 rcall,
8068 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8071 return NULL_TREE;
8074 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8075 Return NULL_TREE if no simplification can be made. */
8077 static tree
8078 fold_builtin_trunc (tree fndecl, tree arg)
8080 if (!validate_arg (arg, REAL_TYPE))
8081 return NULL_TREE;
8083 /* Optimize trunc of constant value. */
8084 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8086 REAL_VALUE_TYPE r, x;
8087 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8089 x = TREE_REAL_CST (arg);
8090 real_trunc (&r, TYPE_MODE (type), &x);
8091 return build_real (type, r);
8094 return fold_trunc_transparent_mathfn (fndecl, arg);
8097 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8098 Return NULL_TREE if no simplification can be made. */
8100 static tree
8101 fold_builtin_floor (tree fndecl, tree arg)
8103 if (!validate_arg (arg, REAL_TYPE))
8104 return NULL_TREE;
8106 /* Optimize floor of constant value. */
8107 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8109 REAL_VALUE_TYPE x;
8111 x = TREE_REAL_CST (arg);
8112 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8114 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8115 REAL_VALUE_TYPE r;
8117 real_floor (&r, TYPE_MODE (type), &x);
8118 return build_real (type, r);
8122 /* Fold floor (x) where x is nonnegative to trunc (x). */
8123 if (tree_expr_nonnegative_p (arg))
8125 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8126 if (truncfn)
8127 return build_call_expr (truncfn, 1, arg);
8130 return fold_trunc_transparent_mathfn (fndecl, arg);
8133 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8134 Return NULL_TREE if no simplification can be made. */
8136 static tree
8137 fold_builtin_ceil (tree fndecl, tree arg)
8139 if (!validate_arg (arg, REAL_TYPE))
8140 return NULL_TREE;
8142 /* Optimize ceil of constant value. */
8143 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8145 REAL_VALUE_TYPE x;
8147 x = TREE_REAL_CST (arg);
8148 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8150 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8151 REAL_VALUE_TYPE r;
8153 real_ceil (&r, TYPE_MODE (type), &x);
8154 return build_real (type, r);
8158 return fold_trunc_transparent_mathfn (fndecl, arg);
8161 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8162 Return NULL_TREE if no simplification can be made. */
8164 static tree
8165 fold_builtin_round (tree fndecl, tree arg)
8167 if (!validate_arg (arg, REAL_TYPE))
8168 return NULL_TREE;
8170 /* Optimize round of constant value. */
8171 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8173 REAL_VALUE_TYPE x;
8175 x = TREE_REAL_CST (arg);
8176 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8178 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8179 REAL_VALUE_TYPE r;
8181 real_round (&r, TYPE_MODE (type), &x);
8182 return build_real (type, r);
8186 return fold_trunc_transparent_mathfn (fndecl, arg);
8189 /* Fold function call to builtin lround, lroundf or lroundl (or the
8190 corresponding long long versions) and other rounding functions. ARG
8191 is the argument to the call. Return NULL_TREE if no simplification
8192 can be made. */
8194 static tree
8195 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8197 if (!validate_arg (arg, REAL_TYPE))
8198 return NULL_TREE;
8200 /* Optimize lround of constant value. */
8201 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8203 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8205 if (real_isfinite (&x))
8207 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8208 tree ftype = TREE_TYPE (arg);
8209 unsigned HOST_WIDE_INT lo2;
8210 HOST_WIDE_INT hi, lo;
8211 REAL_VALUE_TYPE r;
8213 switch (DECL_FUNCTION_CODE (fndecl))
8215 CASE_FLT_FN (BUILT_IN_LFLOOR):
8216 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8217 real_floor (&r, TYPE_MODE (ftype), &x);
8218 break;
8220 CASE_FLT_FN (BUILT_IN_LCEIL):
8221 CASE_FLT_FN (BUILT_IN_LLCEIL):
8222 real_ceil (&r, TYPE_MODE (ftype), &x);
8223 break;
8225 CASE_FLT_FN (BUILT_IN_LROUND):
8226 CASE_FLT_FN (BUILT_IN_LLROUND):
8227 real_round (&r, TYPE_MODE (ftype), &x);
8228 break;
8230 default:
8231 gcc_unreachable ();
8234 REAL_VALUE_TO_INT (&lo, &hi, r);
8235 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8236 return build_int_cst_wide (itype, lo2, hi);
8240 switch (DECL_FUNCTION_CODE (fndecl))
8242 CASE_FLT_FN (BUILT_IN_LFLOOR):
8243 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8244 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8245 if (tree_expr_nonnegative_p (arg))
8246 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8247 arg);
8248 break;
8249 default:;
8252 return fold_fixed_mathfn (fndecl, arg);
8255 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8256 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8257 the argument to the call. Return NULL_TREE if no simplification can
8258 be made. */
8260 static tree
8261 fold_builtin_bitop (tree fndecl, tree arg)
8263 if (!validate_arg (arg, INTEGER_TYPE))
8264 return NULL_TREE;
8266 /* Optimize for constant argument. */
8267 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8269 HOST_WIDE_INT hi, width, result;
8270 unsigned HOST_WIDE_INT lo;
8271 tree type;
8273 type = TREE_TYPE (arg);
8274 width = TYPE_PRECISION (type);
8275 lo = TREE_INT_CST_LOW (arg);
8277 /* Clear all the bits that are beyond the type's precision. */
8278 if (width > HOST_BITS_PER_WIDE_INT)
8280 hi = TREE_INT_CST_HIGH (arg);
8281 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8282 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8284 else
8286 hi = 0;
8287 if (width < HOST_BITS_PER_WIDE_INT)
8288 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8291 switch (DECL_FUNCTION_CODE (fndecl))
8293 CASE_INT_FN (BUILT_IN_FFS):
8294 if (lo != 0)
8295 result = exact_log2 (lo & -lo) + 1;
8296 else if (hi != 0)
8297 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8298 else
8299 result = 0;
8300 break;
8302 CASE_INT_FN (BUILT_IN_CLZ):
8303 if (hi != 0)
8304 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8305 else if (lo != 0)
8306 result = width - floor_log2 (lo) - 1;
8307 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8308 result = width;
8309 break;
8311 CASE_INT_FN (BUILT_IN_CTZ):
8312 if (lo != 0)
8313 result = exact_log2 (lo & -lo);
8314 else if (hi != 0)
8315 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8316 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8317 result = width;
8318 break;
8320 CASE_INT_FN (BUILT_IN_POPCOUNT):
8321 result = 0;
8322 while (lo)
8323 result++, lo &= lo - 1;
8324 while (hi)
8325 result++, hi &= hi - 1;
8326 break;
8328 CASE_INT_FN (BUILT_IN_PARITY):
8329 result = 0;
8330 while (lo)
8331 result++, lo &= lo - 1;
8332 while (hi)
8333 result++, hi &= hi - 1;
8334 result &= 1;
8335 break;
8337 default:
8338 gcc_unreachable ();
8341 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8344 return NULL_TREE;
8347 /* Fold function call to builtin_bswap and the long and long long
8348 variants. Return NULL_TREE if no simplification can be made. */
8349 static tree
8350 fold_builtin_bswap (tree fndecl, tree arg)
8352 if (! validate_arg (arg, INTEGER_TYPE))
8353 return NULL_TREE;
8355 /* Optimize constant value. */
8356 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8358 HOST_WIDE_INT hi, width, r_hi = 0;
8359 unsigned HOST_WIDE_INT lo, r_lo = 0;
8360 tree type;
8362 type = TREE_TYPE (arg);
8363 width = TYPE_PRECISION (type);
8364 lo = TREE_INT_CST_LOW (arg);
8365 hi = TREE_INT_CST_HIGH (arg);
8367 switch (DECL_FUNCTION_CODE (fndecl))
8369 case BUILT_IN_BSWAP32:
8370 case BUILT_IN_BSWAP64:
8372 int s;
8374 for (s = 0; s < width; s += 8)
8376 int d = width - s - 8;
8377 unsigned HOST_WIDE_INT byte;
8379 if (s < HOST_BITS_PER_WIDE_INT)
8380 byte = (lo >> s) & 0xff;
8381 else
8382 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8384 if (d < HOST_BITS_PER_WIDE_INT)
8385 r_lo |= byte << d;
8386 else
8387 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8391 break;
8393 default:
8394 gcc_unreachable ();
8397 if (width < HOST_BITS_PER_WIDE_INT)
8398 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8399 else
8400 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8403 return NULL_TREE;
8406 /* A subroutine of fold_builtin to fold the various logarithmic
8407 functions. Return NULL_TREE if no simplification can me made.
8408 FUNC is the corresponding MPFR logarithm function. */
8410 static tree
8411 fold_builtin_logarithm (tree fndecl, tree arg,
8412 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8414 if (validate_arg (arg, REAL_TYPE))
8416 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8417 tree res;
8418 const enum built_in_function fcode = builtin_mathfn_code (arg);
8420 /* Calculate the result when the argument is a constant. */
8421 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8422 return res;
8424 /* Special case, optimize logN(expN(x)) = x. */
8425 if (flag_unsafe_math_optimizations
8426 && ((func == mpfr_log
8427 && (fcode == BUILT_IN_EXP
8428 || fcode == BUILT_IN_EXPF
8429 || fcode == BUILT_IN_EXPL))
8430 || (func == mpfr_log2
8431 && (fcode == BUILT_IN_EXP2
8432 || fcode == BUILT_IN_EXP2F
8433 || fcode == BUILT_IN_EXP2L))
8434 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8435 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8437 /* Optimize logN(func()) for various exponential functions. We
8438 want to determine the value "x" and the power "exponent" in
8439 order to transform logN(x**exponent) into exponent*logN(x). */
8440 if (flag_unsafe_math_optimizations)
8442 tree exponent = 0, x = 0;
8444 switch (fcode)
8446 CASE_FLT_FN (BUILT_IN_EXP):
8447 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8448 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8449 dconst_e ()));
8450 exponent = CALL_EXPR_ARG (arg, 0);
8451 break;
8452 CASE_FLT_FN (BUILT_IN_EXP2):
8453 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8454 x = build_real (type, dconst2);
8455 exponent = CALL_EXPR_ARG (arg, 0);
8456 break;
8457 CASE_FLT_FN (BUILT_IN_EXP10):
8458 CASE_FLT_FN (BUILT_IN_POW10):
8459 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8461 REAL_VALUE_TYPE dconst10;
8462 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8463 x = build_real (type, dconst10);
8465 exponent = CALL_EXPR_ARG (arg, 0);
8466 break;
8467 CASE_FLT_FN (BUILT_IN_SQRT):
8468 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8469 x = CALL_EXPR_ARG (arg, 0);
8470 exponent = build_real (type, dconsthalf);
8471 break;
8472 CASE_FLT_FN (BUILT_IN_CBRT):
8473 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8474 x = CALL_EXPR_ARG (arg, 0);
8475 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8476 dconst_third ()));
8477 break;
8478 CASE_FLT_FN (BUILT_IN_POW):
8479 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8480 x = CALL_EXPR_ARG (arg, 0);
8481 exponent = CALL_EXPR_ARG (arg, 1);
8482 break;
8483 default:
8484 break;
8487 /* Now perform the optimization. */
8488 if (x && exponent)
8490 tree logfn = build_call_expr (fndecl, 1, x);
8491 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8496 return NULL_TREE;
8499 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8500 NULL_TREE if no simplification can be made. */
8502 static tree
8503 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8505 tree res, narg0, narg1;
8507 if (!validate_arg (arg0, REAL_TYPE)
8508 || !validate_arg (arg1, REAL_TYPE))
8509 return NULL_TREE;
8511 /* Calculate the result when the argument is a constant. */
8512 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8513 return res;
8515 /* If either argument to hypot has a negate or abs, strip that off.
8516 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8517 narg0 = fold_strip_sign_ops (arg0);
8518 narg1 = fold_strip_sign_ops (arg1);
8519 if (narg0 || narg1)
8521 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8522 narg1 ? narg1 : arg1);
8525 /* If either argument is zero, hypot is fabs of the other. */
8526 if (real_zerop (arg0))
8527 return fold_build1 (ABS_EXPR, type, arg1);
8528 else if (real_zerop (arg1))
8529 return fold_build1 (ABS_EXPR, type, arg0);
8531 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8532 if (flag_unsafe_math_optimizations
8533 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8535 const REAL_VALUE_TYPE sqrt2_trunc
8536 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8537 return fold_build2 (MULT_EXPR, type,
8538 fold_build1 (ABS_EXPR, type, arg0),
8539 build_real (type, sqrt2_trunc));
8542 return NULL_TREE;
8546 /* Fold a builtin function call to pow, powf, or powl. Return
8547 NULL_TREE if no simplification can be made. */
8548 static tree
8549 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8551 tree res;
8553 if (!validate_arg (arg0, REAL_TYPE)
8554 || !validate_arg (arg1, REAL_TYPE))
8555 return NULL_TREE;
8557 /* Calculate the result when the argument is a constant. */
8558 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8559 return res;
8561 /* Optimize pow(1.0,y) = 1.0. */
8562 if (real_onep (arg0))
8563 return omit_one_operand (type, build_real (type, dconst1), arg1);
8565 if (TREE_CODE (arg1) == REAL_CST
8566 && !TREE_OVERFLOW (arg1))
8568 REAL_VALUE_TYPE cint;
8569 REAL_VALUE_TYPE c;
8570 HOST_WIDE_INT n;
8572 c = TREE_REAL_CST (arg1);
8574 /* Optimize pow(x,0.0) = 1.0. */
8575 if (REAL_VALUES_EQUAL (c, dconst0))
8576 return omit_one_operand (type, build_real (type, dconst1),
8577 arg0);
8579 /* Optimize pow(x,1.0) = x. */
8580 if (REAL_VALUES_EQUAL (c, dconst1))
8581 return arg0;
8583 /* Optimize pow(x,-1.0) = 1.0/x. */
8584 if (REAL_VALUES_EQUAL (c, dconstm1))
8585 return fold_build2 (RDIV_EXPR, type,
8586 build_real (type, dconst1), arg0);
8588 /* Optimize pow(x,0.5) = sqrt(x). */
8589 if (flag_unsafe_math_optimizations
8590 && REAL_VALUES_EQUAL (c, dconsthalf))
8592 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8594 if (sqrtfn != NULL_TREE)
8595 return build_call_expr (sqrtfn, 1, arg0);
8598 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8599 if (flag_unsafe_math_optimizations)
8601 const REAL_VALUE_TYPE dconstroot
8602 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8604 if (REAL_VALUES_EQUAL (c, dconstroot))
8606 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8607 if (cbrtfn != NULL_TREE)
8608 return build_call_expr (cbrtfn, 1, arg0);
8612 /* Check for an integer exponent. */
8613 n = real_to_integer (&c);
8614 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8615 if (real_identical (&c, &cint))
8617 /* Attempt to evaluate pow at compile-time, unless this should
8618 raise an exception. */
8619 if (TREE_CODE (arg0) == REAL_CST
8620 && !TREE_OVERFLOW (arg0)
8621 && (n > 0
8622 || (!flag_trapping_math && !flag_errno_math)
8623 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8625 REAL_VALUE_TYPE x;
8626 bool inexact;
8628 x = TREE_REAL_CST (arg0);
8629 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8630 if (flag_unsafe_math_optimizations || !inexact)
8631 return build_real (type, x);
8634 /* Strip sign ops from even integer powers. */
8635 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8637 tree narg0 = fold_strip_sign_ops (arg0);
8638 if (narg0)
8639 return build_call_expr (fndecl, 2, narg0, arg1);
8644 if (flag_unsafe_math_optimizations)
8646 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8648 /* Optimize pow(expN(x),y) = expN(x*y). */
8649 if (BUILTIN_EXPONENT_P (fcode))
8651 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8652 tree arg = CALL_EXPR_ARG (arg0, 0);
8653 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8654 return build_call_expr (expfn, 1, arg);
8657 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8658 if (BUILTIN_SQRT_P (fcode))
8660 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8661 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8662 build_real (type, dconsthalf));
8663 return build_call_expr (fndecl, 2, narg0, narg1);
8666 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8667 if (BUILTIN_CBRT_P (fcode))
8669 tree arg = CALL_EXPR_ARG (arg0, 0);
8670 if (tree_expr_nonnegative_p (arg))
8672 const REAL_VALUE_TYPE dconstroot
8673 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8674 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8675 build_real (type, dconstroot));
8676 return build_call_expr (fndecl, 2, arg, narg1);
8680 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8681 if (fcode == BUILT_IN_POW
8682 || fcode == BUILT_IN_POWF
8683 || fcode == BUILT_IN_POWL)
8685 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8686 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8687 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8688 return build_call_expr (fndecl, 2, arg00, narg1);
8692 return NULL_TREE;
8695 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8696 Return NULL_TREE if no simplification can be made. */
8697 static tree
8698 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8699 tree arg0, tree arg1, tree type)
8701 if (!validate_arg (arg0, REAL_TYPE)
8702 || !validate_arg (arg1, INTEGER_TYPE))
8703 return NULL_TREE;
8705 /* Optimize pow(1.0,y) = 1.0. */
8706 if (real_onep (arg0))
8707 return omit_one_operand (type, build_real (type, dconst1), arg1);
8709 if (host_integerp (arg1, 0))
8711 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8713 /* Evaluate powi at compile-time. */
8714 if (TREE_CODE (arg0) == REAL_CST
8715 && !TREE_OVERFLOW (arg0))
8717 REAL_VALUE_TYPE x;
8718 x = TREE_REAL_CST (arg0);
8719 real_powi (&x, TYPE_MODE (type), &x, c);
8720 return build_real (type, x);
8723 /* Optimize pow(x,0) = 1.0. */
8724 if (c == 0)
8725 return omit_one_operand (type, build_real (type, dconst1),
8726 arg0);
8728 /* Optimize pow(x,1) = x. */
8729 if (c == 1)
8730 return arg0;
8732 /* Optimize pow(x,-1) = 1.0/x. */
8733 if (c == -1)
8734 return fold_build2 (RDIV_EXPR, type,
8735 build_real (type, dconst1), arg0);
8738 return NULL_TREE;
8741 /* A subroutine of fold_builtin to fold the various exponent
8742 functions. Return NULL_TREE if no simplification can be made.
8743 FUNC is the corresponding MPFR exponent function. */
8745 static tree
8746 fold_builtin_exponent (tree fndecl, tree arg,
8747 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8749 if (validate_arg (arg, REAL_TYPE))
8751 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8752 tree res;
8754 /* Calculate the result when the argument is a constant. */
8755 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8756 return res;
8758 /* Optimize expN(logN(x)) = x. */
8759 if (flag_unsafe_math_optimizations)
8761 const enum built_in_function fcode = builtin_mathfn_code (arg);
8763 if ((func == mpfr_exp
8764 && (fcode == BUILT_IN_LOG
8765 || fcode == BUILT_IN_LOGF
8766 || fcode == BUILT_IN_LOGL))
8767 || (func == mpfr_exp2
8768 && (fcode == BUILT_IN_LOG2
8769 || fcode == BUILT_IN_LOG2F
8770 || fcode == BUILT_IN_LOG2L))
8771 || (func == mpfr_exp10
8772 && (fcode == BUILT_IN_LOG10
8773 || fcode == BUILT_IN_LOG10F
8774 || fcode == BUILT_IN_LOG10L)))
8775 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8779 return NULL_TREE;
8782 /* Return true if VAR is a VAR_DECL or a component thereof. */
8784 static bool
8785 var_decl_component_p (tree var)
8787 tree inner = var;
8788 while (handled_component_p (inner))
8789 inner = TREE_OPERAND (inner, 0);
8790 return SSA_VAR_P (inner);
8793 /* Fold function call to builtin memset. Return
8794 NULL_TREE if no simplification can be made. */
8796 static tree
8797 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8799 tree var, ret, etype;
8800 unsigned HOST_WIDE_INT length, cval;
8802 if (! validate_arg (dest, POINTER_TYPE)
8803 || ! validate_arg (c, INTEGER_TYPE)
8804 || ! validate_arg (len, INTEGER_TYPE))
8805 return NULL_TREE;
8807 if (! host_integerp (len, 1))
8808 return NULL_TREE;
8810 /* If the LEN parameter is zero, return DEST. */
8811 if (integer_zerop (len))
8812 return omit_one_operand (type, dest, c);
8814 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8815 return NULL_TREE;
8817 var = dest;
8818 STRIP_NOPS (var);
8819 if (TREE_CODE (var) != ADDR_EXPR)
8820 return NULL_TREE;
8822 var = TREE_OPERAND (var, 0);
8823 if (TREE_THIS_VOLATILE (var))
8824 return NULL_TREE;
8826 etype = TREE_TYPE (var);
8827 if (TREE_CODE (etype) == ARRAY_TYPE)
8828 etype = TREE_TYPE (etype);
8830 if (!INTEGRAL_TYPE_P (etype)
8831 && !POINTER_TYPE_P (etype))
8832 return NULL_TREE;
8834 if (! var_decl_component_p (var))
8835 return NULL_TREE;
8837 length = tree_low_cst (len, 1);
8838 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8839 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8840 < (int) length)
8841 return NULL_TREE;
8843 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8844 return NULL_TREE;
8846 if (integer_zerop (c))
8847 cval = 0;
8848 else
8850 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8851 return NULL_TREE;
8853 cval = tree_low_cst (c, 1);
8854 cval &= 0xff;
8855 cval |= cval << 8;
8856 cval |= cval << 16;
8857 cval |= (cval << 31) << 1;
8860 ret = build_int_cst_type (etype, cval);
8861 var = build_fold_indirect_ref (fold_convert (build_pointer_type (etype),
8862 dest));
8863 ret = build2 (MODIFY_EXPR, etype, var, ret);
8864 if (ignore)
8865 return ret;
8867 return omit_one_operand (type, dest, ret);
8870 /* Fold function call to builtin memset. Return
8871 NULL_TREE if no simplification can be made. */
8873 static tree
8874 fold_builtin_bzero (tree dest, tree size, bool ignore)
8876 if (! validate_arg (dest, POINTER_TYPE)
8877 || ! validate_arg (size, INTEGER_TYPE))
8878 return NULL_TREE;
8880 if (!ignore)
8881 return NULL_TREE;
8883 /* New argument list transforming bzero(ptr x, int y) to
8884 memset(ptr x, int 0, size_t y). This is done this way
8885 so that if it isn't expanded inline, we fallback to
8886 calling bzero instead of memset. */
8888 return fold_builtin_memset (dest, integer_zero_node,
8889 fold_convert (sizetype, size),
8890 void_type_node, ignore);
8893 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8894 NULL_TREE if no simplification can be made.
8895 If ENDP is 0, return DEST (like memcpy).
8896 If ENDP is 1, return DEST+LEN (like mempcpy).
8897 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8898 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8899 (memmove). */
8901 static tree
8902 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8904 tree destvar, srcvar, expr;
8906 if (! validate_arg (dest, POINTER_TYPE)
8907 || ! validate_arg (src, POINTER_TYPE)
8908 || ! validate_arg (len, INTEGER_TYPE))
8909 return NULL_TREE;
8911 /* If the LEN parameter is zero, return DEST. */
8912 if (integer_zerop (len))
8913 return omit_one_operand (type, dest, src);
8915 /* If SRC and DEST are the same (and not volatile), return
8916 DEST{,+LEN,+LEN-1}. */
8917 if (operand_equal_p (src, dest, 0))
8918 expr = len;
8919 else
8921 tree srctype, desttype;
8922 int src_align, dest_align;
8924 if (endp == 3)
8926 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8927 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8929 /* Both DEST and SRC must be pointer types.
8930 ??? This is what old code did. Is the testing for pointer types
8931 really mandatory?
8933 If either SRC is readonly or length is 1, we can use memcpy. */
8934 if (!dest_align || !src_align)
8935 return NULL_TREE;
8936 if (readonly_data_expr (src)
8937 || (host_integerp (len, 1)
8938 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8939 >= tree_low_cst (len, 1))))
8941 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8942 if (!fn)
8943 return NULL_TREE;
8944 return build_call_expr (fn, 3, dest, src, len);
8947 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8948 srcvar = build_fold_indirect_ref (src);
8949 destvar = build_fold_indirect_ref (dest);
8950 if (srcvar
8951 && !TREE_THIS_VOLATILE (srcvar)
8952 && destvar
8953 && !TREE_THIS_VOLATILE (destvar))
8955 tree src_base, dest_base, fn;
8956 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8957 HOST_WIDE_INT size = -1;
8958 HOST_WIDE_INT maxsize = -1;
8960 src_base = srcvar;
8961 if (handled_component_p (src_base))
8962 src_base = get_ref_base_and_extent (src_base, &src_offset,
8963 &size, &maxsize);
8964 dest_base = destvar;
8965 if (handled_component_p (dest_base))
8966 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8967 &size, &maxsize);
8968 if (host_integerp (len, 1))
8970 maxsize = tree_low_cst (len, 1);
8971 if (maxsize
8972 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8973 maxsize = -1;
8974 else
8975 maxsize *= BITS_PER_UNIT;
8977 else
8978 maxsize = -1;
8979 if (SSA_VAR_P (src_base)
8980 && SSA_VAR_P (dest_base))
8982 if (operand_equal_p (src_base, dest_base, 0)
8983 && ranges_overlap_p (src_offset, maxsize,
8984 dest_offset, maxsize))
8985 return NULL_TREE;
8987 else if (TREE_CODE (src_base) == INDIRECT_REF
8988 && TREE_CODE (dest_base) == INDIRECT_REF)
8990 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8991 TREE_OPERAND (dest_base, 0), 0)
8992 || ranges_overlap_p (src_offset, maxsize,
8993 dest_offset, maxsize))
8994 return NULL_TREE;
8996 else
8997 return NULL_TREE;
8999 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9000 if (!fn)
9001 return NULL_TREE;
9002 return build_call_expr (fn, 3, dest, src, len);
9004 return NULL_TREE;
9007 if (!host_integerp (len, 0))
9008 return NULL_TREE;
9009 /* FIXME:
9010 This logic lose for arguments like (type *)malloc (sizeof (type)),
9011 since we strip the casts of up to VOID return value from malloc.
9012 Perhaps we ought to inherit type from non-VOID argument here? */
9013 STRIP_NOPS (src);
9014 STRIP_NOPS (dest);
9015 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9016 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9018 tree tem = TREE_OPERAND (src, 0);
9019 STRIP_NOPS (tem);
9020 if (tem != TREE_OPERAND (src, 0))
9021 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9023 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9025 tree tem = TREE_OPERAND (dest, 0);
9026 STRIP_NOPS (tem);
9027 if (tem != TREE_OPERAND (dest, 0))
9028 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9030 srctype = TREE_TYPE (TREE_TYPE (src));
9031 if (srctype
9032 && TREE_CODE (srctype) == ARRAY_TYPE
9033 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9035 srctype = TREE_TYPE (srctype);
9036 STRIP_NOPS (src);
9037 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9039 desttype = TREE_TYPE (TREE_TYPE (dest));
9040 if (desttype
9041 && TREE_CODE (desttype) == ARRAY_TYPE
9042 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9044 desttype = TREE_TYPE (desttype);
9045 STRIP_NOPS (dest);
9046 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9048 if (!srctype || !desttype
9049 || !TYPE_SIZE_UNIT (srctype)
9050 || !TYPE_SIZE_UNIT (desttype)
9051 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9052 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9053 || TYPE_VOLATILE (srctype)
9054 || TYPE_VOLATILE (desttype))
9055 return NULL_TREE;
9057 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9058 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9059 if (dest_align < (int) TYPE_ALIGN (desttype)
9060 || src_align < (int) TYPE_ALIGN (srctype))
9061 return NULL_TREE;
9063 if (!ignore)
9064 dest = builtin_save_expr (dest);
9066 srcvar = NULL_TREE;
9067 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9069 srcvar = build_fold_indirect_ref (src);
9070 if (TREE_THIS_VOLATILE (srcvar))
9071 return NULL_TREE;
9072 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9073 srcvar = NULL_TREE;
9074 /* With memcpy, it is possible to bypass aliasing rules, so without
9075 this check i.e. execute/20060930-2.c would be misoptimized,
9076 because it use conflicting alias set to hold argument for the
9077 memcpy call. This check is probably unnecessary with
9078 -fno-strict-aliasing. Similarly for destvar. See also
9079 PR29286. */
9080 else if (!var_decl_component_p (srcvar))
9081 srcvar = NULL_TREE;
9084 destvar = NULL_TREE;
9085 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9087 destvar = build_fold_indirect_ref (dest);
9088 if (TREE_THIS_VOLATILE (destvar))
9089 return NULL_TREE;
9090 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9091 destvar = NULL_TREE;
9092 else if (!var_decl_component_p (destvar))
9093 destvar = NULL_TREE;
9096 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9097 return NULL_TREE;
9099 if (srcvar == NULL_TREE)
9101 tree srcptype;
9102 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9103 return NULL_TREE;
9105 srctype = build_qualified_type (desttype, 0);
9106 if (src_align < (int) TYPE_ALIGN (srctype))
9108 if (AGGREGATE_TYPE_P (srctype)
9109 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9110 return NULL_TREE;
9112 srctype = build_variant_type_copy (srctype);
9113 TYPE_ALIGN (srctype) = src_align;
9114 TYPE_USER_ALIGN (srctype) = 1;
9115 TYPE_PACKED (srctype) = 1;
9117 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9118 src = fold_convert (srcptype, src);
9119 srcvar = build_fold_indirect_ref (src);
9121 else if (destvar == NULL_TREE)
9123 tree destptype;
9124 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9125 return NULL_TREE;
9127 desttype = build_qualified_type (srctype, 0);
9128 if (dest_align < (int) TYPE_ALIGN (desttype))
9130 if (AGGREGATE_TYPE_P (desttype)
9131 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9132 return NULL_TREE;
9134 desttype = build_variant_type_copy (desttype);
9135 TYPE_ALIGN (desttype) = dest_align;
9136 TYPE_USER_ALIGN (desttype) = 1;
9137 TYPE_PACKED (desttype) = 1;
9139 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9140 dest = fold_convert (destptype, dest);
9141 destvar = build_fold_indirect_ref (dest);
9144 if (srctype == desttype
9145 || (gimple_in_ssa_p (cfun)
9146 && useless_type_conversion_p (desttype, srctype)))
9147 expr = srcvar;
9148 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9149 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9150 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9151 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9152 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9153 else
9154 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9155 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9158 if (ignore)
9159 return expr;
9161 if (endp == 0 || endp == 3)
9162 return omit_one_operand (type, dest, expr);
9164 if (expr == len)
9165 expr = NULL_TREE;
9167 if (endp == 2)
9168 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9169 ssize_int (1));
9171 len = fold_convert (sizetype, len);
9172 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9173 dest = fold_convert (type, dest);
9174 if (expr)
9175 dest = omit_one_operand (type, dest, expr);
9176 return dest;
9179 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9180 If LEN is not NULL, it represents the length of the string to be
9181 copied. Return NULL_TREE if no simplification can be made. */
9183 tree
9184 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9186 tree fn;
9188 if (!validate_arg (dest, POINTER_TYPE)
9189 || !validate_arg (src, POINTER_TYPE))
9190 return NULL_TREE;
9192 /* If SRC and DEST are the same (and not volatile), return DEST. */
9193 if (operand_equal_p (src, dest, 0))
9194 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9196 if (optimize_function_for_size_p (cfun))
9197 return NULL_TREE;
9199 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9200 if (!fn)
9201 return NULL_TREE;
9203 if (!len)
9205 len = c_strlen (src, 1);
9206 if (! len || TREE_SIDE_EFFECTS (len))
9207 return NULL_TREE;
9210 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9211 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9212 build_call_expr (fn, 3, dest, src, len));
9215 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9216 If SLEN is not NULL, it represents the length of the source string.
9217 Return NULL_TREE if no simplification can be made. */
9219 tree
9220 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9222 tree fn;
9224 if (!validate_arg (dest, POINTER_TYPE)
9225 || !validate_arg (src, POINTER_TYPE)
9226 || !validate_arg (len, INTEGER_TYPE))
9227 return NULL_TREE;
9229 /* If the LEN parameter is zero, return DEST. */
9230 if (integer_zerop (len))
9231 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9233 /* We can't compare slen with len as constants below if len is not a
9234 constant. */
9235 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9236 return NULL_TREE;
9238 if (!slen)
9239 slen = c_strlen (src, 1);
9241 /* Now, we must be passed a constant src ptr parameter. */
9242 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9243 return NULL_TREE;
9245 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9247 /* We do not support simplification of this case, though we do
9248 support it when expanding trees into RTL. */
9249 /* FIXME: generate a call to __builtin_memset. */
9250 if (tree_int_cst_lt (slen, len))
9251 return NULL_TREE;
9253 /* OK transform into builtin memcpy. */
9254 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9255 if (!fn)
9256 return NULL_TREE;
9257 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9258 build_call_expr (fn, 3, dest, src, len));
9261 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9262 arguments to the call, and TYPE is its return type.
9263 Return NULL_TREE if no simplification can be made. */
9265 static tree
9266 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9268 if (!validate_arg (arg1, POINTER_TYPE)
9269 || !validate_arg (arg2, INTEGER_TYPE)
9270 || !validate_arg (len, INTEGER_TYPE))
9271 return NULL_TREE;
9272 else
9274 const char *p1;
9276 if (TREE_CODE (arg2) != INTEGER_CST
9277 || !host_integerp (len, 1))
9278 return NULL_TREE;
9280 p1 = c_getstr (arg1);
9281 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9283 char c;
9284 const char *r;
9285 tree tem;
9287 if (target_char_cast (arg2, &c))
9288 return NULL_TREE;
9290 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9292 if (r == NULL)
9293 return build_int_cst (TREE_TYPE (arg1), 0);
9295 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9296 size_int (r - p1));
9297 return fold_convert (type, tem);
9299 return NULL_TREE;
9303 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9304 Return NULL_TREE if no simplification can be made. */
9306 static tree
9307 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9309 const char *p1, *p2;
9311 if (!validate_arg (arg1, POINTER_TYPE)
9312 || !validate_arg (arg2, POINTER_TYPE)
9313 || !validate_arg (len, INTEGER_TYPE))
9314 return NULL_TREE;
9316 /* If the LEN parameter is zero, return zero. */
9317 if (integer_zerop (len))
9318 return omit_two_operands (integer_type_node, integer_zero_node,
9319 arg1, arg2);
9321 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9322 if (operand_equal_p (arg1, arg2, 0))
9323 return omit_one_operand (integer_type_node, integer_zero_node, len);
9325 p1 = c_getstr (arg1);
9326 p2 = c_getstr (arg2);
9328 /* If all arguments are constant, and the value of len is not greater
9329 than the lengths of arg1 and arg2, evaluate at compile-time. */
9330 if (host_integerp (len, 1) && p1 && p2
9331 && compare_tree_int (len, strlen (p1) + 1) <= 0
9332 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9334 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9336 if (r > 0)
9337 return integer_one_node;
9338 else if (r < 0)
9339 return integer_minus_one_node;
9340 else
9341 return integer_zero_node;
9344 /* If len parameter is one, return an expression corresponding to
9345 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9346 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9348 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9349 tree cst_uchar_ptr_node
9350 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9352 tree ind1 = fold_convert (integer_type_node,
9353 build1 (INDIRECT_REF, cst_uchar_node,
9354 fold_convert (cst_uchar_ptr_node,
9355 arg1)));
9356 tree ind2 = fold_convert (integer_type_node,
9357 build1 (INDIRECT_REF, cst_uchar_node,
9358 fold_convert (cst_uchar_ptr_node,
9359 arg2)));
9360 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9363 return NULL_TREE;
9366 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9367 Return NULL_TREE if no simplification can be made. */
9369 static tree
9370 fold_builtin_strcmp (tree arg1, tree arg2)
9372 const char *p1, *p2;
9374 if (!validate_arg (arg1, POINTER_TYPE)
9375 || !validate_arg (arg2, POINTER_TYPE))
9376 return NULL_TREE;
9378 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9379 if (operand_equal_p (arg1, arg2, 0))
9380 return integer_zero_node;
9382 p1 = c_getstr (arg1);
9383 p2 = c_getstr (arg2);
9385 if (p1 && p2)
9387 const int i = strcmp (p1, p2);
9388 if (i < 0)
9389 return integer_minus_one_node;
9390 else if (i > 0)
9391 return integer_one_node;
9392 else
9393 return integer_zero_node;
9396 /* If the second arg is "", return *(const unsigned char*)arg1. */
9397 if (p2 && *p2 == '\0')
9399 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9400 tree cst_uchar_ptr_node
9401 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9403 return fold_convert (integer_type_node,
9404 build1 (INDIRECT_REF, cst_uchar_node,
9405 fold_convert (cst_uchar_ptr_node,
9406 arg1)));
9409 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9410 if (p1 && *p1 == '\0')
9412 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9413 tree cst_uchar_ptr_node
9414 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9416 tree temp = fold_convert (integer_type_node,
9417 build1 (INDIRECT_REF, cst_uchar_node,
9418 fold_convert (cst_uchar_ptr_node,
9419 arg2)));
9420 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9423 return NULL_TREE;
9426 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9427 Return NULL_TREE if no simplification can be made. */
9429 static tree
9430 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9432 const char *p1, *p2;
9434 if (!validate_arg (arg1, POINTER_TYPE)
9435 || !validate_arg (arg2, POINTER_TYPE)
9436 || !validate_arg (len, INTEGER_TYPE))
9437 return NULL_TREE;
9439 /* If the LEN parameter is zero, return zero. */
9440 if (integer_zerop (len))
9441 return omit_two_operands (integer_type_node, integer_zero_node,
9442 arg1, arg2);
9444 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9445 if (operand_equal_p (arg1, arg2, 0))
9446 return omit_one_operand (integer_type_node, integer_zero_node, len);
9448 p1 = c_getstr (arg1);
9449 p2 = c_getstr (arg2);
9451 if (host_integerp (len, 1) && p1 && p2)
9453 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9454 if (i > 0)
9455 return integer_one_node;
9456 else if (i < 0)
9457 return integer_minus_one_node;
9458 else
9459 return integer_zero_node;
9462 /* If the second arg is "", and the length is greater than zero,
9463 return *(const unsigned char*)arg1. */
9464 if (p2 && *p2 == '\0'
9465 && TREE_CODE (len) == INTEGER_CST
9466 && tree_int_cst_sgn (len) == 1)
9468 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9469 tree cst_uchar_ptr_node
9470 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9472 return fold_convert (integer_type_node,
9473 build1 (INDIRECT_REF, cst_uchar_node,
9474 fold_convert (cst_uchar_ptr_node,
9475 arg1)));
9478 /* If the first arg is "", and the length is greater than zero,
9479 return -*(const unsigned char*)arg2. */
9480 if (p1 && *p1 == '\0'
9481 && TREE_CODE (len) == INTEGER_CST
9482 && tree_int_cst_sgn (len) == 1)
9484 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9485 tree cst_uchar_ptr_node
9486 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9488 tree temp = fold_convert (integer_type_node,
9489 build1 (INDIRECT_REF, cst_uchar_node,
9490 fold_convert (cst_uchar_ptr_node,
9491 arg2)));
9492 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9495 /* If len parameter is one, return an expression corresponding to
9496 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9497 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9499 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9500 tree cst_uchar_ptr_node
9501 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9503 tree ind1 = fold_convert (integer_type_node,
9504 build1 (INDIRECT_REF, cst_uchar_node,
9505 fold_convert (cst_uchar_ptr_node,
9506 arg1)));
9507 tree ind2 = fold_convert (integer_type_node,
9508 build1 (INDIRECT_REF, cst_uchar_node,
9509 fold_convert (cst_uchar_ptr_node,
9510 arg2)));
9511 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9514 return NULL_TREE;
9517 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9518 ARG. Return NULL_TREE if no simplification can be made. */
9520 static tree
9521 fold_builtin_signbit (tree arg, tree type)
9523 tree temp;
9525 if (!validate_arg (arg, REAL_TYPE))
9526 return NULL_TREE;
9528 /* If ARG is a compile-time constant, determine the result. */
9529 if (TREE_CODE (arg) == REAL_CST
9530 && !TREE_OVERFLOW (arg))
9532 REAL_VALUE_TYPE c;
9534 c = TREE_REAL_CST (arg);
9535 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9536 return fold_convert (type, temp);
9539 /* If ARG is non-negative, the result is always zero. */
9540 if (tree_expr_nonnegative_p (arg))
9541 return omit_one_operand (type, integer_zero_node, arg);
9543 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9544 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9545 return fold_build2 (LT_EXPR, type, arg,
9546 build_real (TREE_TYPE (arg), dconst0));
9548 return NULL_TREE;
9551 /* Fold function call to builtin copysign, copysignf or copysignl with
9552 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9553 be made. */
9555 static tree
9556 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9558 tree tem;
9560 if (!validate_arg (arg1, REAL_TYPE)
9561 || !validate_arg (arg2, REAL_TYPE))
9562 return NULL_TREE;
9564 /* copysign(X,X) is X. */
9565 if (operand_equal_p (arg1, arg2, 0))
9566 return fold_convert (type, arg1);
9568 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9569 if (TREE_CODE (arg1) == REAL_CST
9570 && TREE_CODE (arg2) == REAL_CST
9571 && !TREE_OVERFLOW (arg1)
9572 && !TREE_OVERFLOW (arg2))
9574 REAL_VALUE_TYPE c1, c2;
9576 c1 = TREE_REAL_CST (arg1);
9577 c2 = TREE_REAL_CST (arg2);
9578 /* c1.sign := c2.sign. */
9579 real_copysign (&c1, &c2);
9580 return build_real (type, c1);
9583 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9584 Remember to evaluate Y for side-effects. */
9585 if (tree_expr_nonnegative_p (arg2))
9586 return omit_one_operand (type,
9587 fold_build1 (ABS_EXPR, type, arg1),
9588 arg2);
9590 /* Strip sign changing operations for the first argument. */
9591 tem = fold_strip_sign_ops (arg1);
9592 if (tem)
9593 return build_call_expr (fndecl, 2, tem, arg2);
9595 return NULL_TREE;
9598 /* Fold a call to builtin isascii with argument ARG. */
9600 static tree
9601 fold_builtin_isascii (tree arg)
9603 if (!validate_arg (arg, INTEGER_TYPE))
9604 return NULL_TREE;
9605 else
9607 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9608 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9609 build_int_cst (NULL_TREE,
9610 ~ (unsigned HOST_WIDE_INT) 0x7f));
9611 return fold_build2 (EQ_EXPR, integer_type_node,
9612 arg, integer_zero_node);
9616 /* Fold a call to builtin toascii with argument ARG. */
9618 static tree
9619 fold_builtin_toascii (tree arg)
9621 if (!validate_arg (arg, INTEGER_TYPE))
9622 return NULL_TREE;
9624 /* Transform toascii(c) -> (c & 0x7f). */
9625 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9626 build_int_cst (NULL_TREE, 0x7f));
9629 /* Fold a call to builtin isdigit with argument ARG. */
9631 static tree
9632 fold_builtin_isdigit (tree arg)
9634 if (!validate_arg (arg, INTEGER_TYPE))
9635 return NULL_TREE;
9636 else
9638 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9639 /* According to the C standard, isdigit is unaffected by locale.
9640 However, it definitely is affected by the target character set. */
9641 unsigned HOST_WIDE_INT target_digit0
9642 = lang_hooks.to_target_charset ('0');
9644 if (target_digit0 == 0)
9645 return NULL_TREE;
9647 arg = fold_convert (unsigned_type_node, arg);
9648 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9649 build_int_cst (unsigned_type_node, target_digit0));
9650 return fold_build2 (LE_EXPR, integer_type_node, arg,
9651 build_int_cst (unsigned_type_node, 9));
9655 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9657 static tree
9658 fold_builtin_fabs (tree arg, tree type)
9660 if (!validate_arg (arg, REAL_TYPE))
9661 return NULL_TREE;
9663 arg = fold_convert (type, arg);
9664 if (TREE_CODE (arg) == REAL_CST)
9665 return fold_abs_const (arg, type);
9666 return fold_build1 (ABS_EXPR, type, arg);
9669 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9671 static tree
9672 fold_builtin_abs (tree arg, tree type)
9674 if (!validate_arg (arg, INTEGER_TYPE))
9675 return NULL_TREE;
9677 arg = fold_convert (type, arg);
9678 if (TREE_CODE (arg) == INTEGER_CST)
9679 return fold_abs_const (arg, type);
9680 return fold_build1 (ABS_EXPR, type, arg);
9683 /* Fold a call to builtin fmin or fmax. */
9685 static tree
9686 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9688 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9690 /* Calculate the result when the argument is a constant. */
9691 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9693 if (res)
9694 return res;
9696 /* If either argument is NaN, return the other one. Avoid the
9697 transformation if we get (and honor) a signalling NaN. Using
9698 omit_one_operand() ensures we create a non-lvalue. */
9699 if (TREE_CODE (arg0) == REAL_CST
9700 && real_isnan (&TREE_REAL_CST (arg0))
9701 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9702 || ! TREE_REAL_CST (arg0).signalling))
9703 return omit_one_operand (type, arg1, arg0);
9704 if (TREE_CODE (arg1) == REAL_CST
9705 && real_isnan (&TREE_REAL_CST (arg1))
9706 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9707 || ! TREE_REAL_CST (arg1).signalling))
9708 return omit_one_operand (type, arg0, arg1);
9710 /* Transform fmin/fmax(x,x) -> x. */
9711 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9712 return omit_one_operand (type, arg0, arg1);
9714 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9715 functions to return the numeric arg if the other one is NaN.
9716 These tree codes don't honor that, so only transform if
9717 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9718 handled, so we don't have to worry about it either. */
9719 if (flag_finite_math_only)
9720 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9721 fold_convert (type, arg0),
9722 fold_convert (type, arg1));
9724 return NULL_TREE;
9727 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9729 static tree
9730 fold_builtin_carg (tree arg, tree type)
9732 if (validate_arg (arg, COMPLEX_TYPE)
9733 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9735 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9737 if (atan2_fn)
9739 tree new_arg = builtin_save_expr (arg);
9740 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9741 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9742 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9746 return NULL_TREE;
9749 /* Fold a call to builtin logb/ilogb. */
9751 static tree
9752 fold_builtin_logb (tree arg, tree rettype)
9754 if (! validate_arg (arg, REAL_TYPE))
9755 return NULL_TREE;
9757 STRIP_NOPS (arg);
9759 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9761 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9763 switch (value->cl)
9765 case rvc_nan:
9766 case rvc_inf:
9767 /* If arg is Inf or NaN and we're logb, return it. */
9768 if (TREE_CODE (rettype) == REAL_TYPE)
9769 return fold_convert (rettype, arg);
9770 /* Fall through... */
9771 case rvc_zero:
9772 /* Zero may set errno and/or raise an exception for logb, also
9773 for ilogb we don't know FP_ILOGB0. */
9774 return NULL_TREE;
9775 case rvc_normal:
9776 /* For normal numbers, proceed iff radix == 2. In GCC,
9777 normalized significands are in the range [0.5, 1.0). We
9778 want the exponent as if they were [1.0, 2.0) so get the
9779 exponent and subtract 1. */
9780 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9781 return fold_convert (rettype, build_int_cst (NULL_TREE,
9782 REAL_EXP (value)-1));
9783 break;
9787 return NULL_TREE;
9790 /* Fold a call to builtin significand, if radix == 2. */
9792 static tree
9793 fold_builtin_significand (tree arg, tree rettype)
9795 if (! validate_arg (arg, REAL_TYPE))
9796 return NULL_TREE;
9798 STRIP_NOPS (arg);
9800 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9802 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9804 switch (value->cl)
9806 case rvc_zero:
9807 case rvc_nan:
9808 case rvc_inf:
9809 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9810 return fold_convert (rettype, arg);
9811 case rvc_normal:
9812 /* For normal numbers, proceed iff radix == 2. */
9813 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9815 REAL_VALUE_TYPE result = *value;
9816 /* In GCC, normalized significands are in the range [0.5,
9817 1.0). We want them to be [1.0, 2.0) so set the
9818 exponent to 1. */
9819 SET_REAL_EXP (&result, 1);
9820 return build_real (rettype, result);
9822 break;
9826 return NULL_TREE;
9829 /* Fold a call to builtin frexp, we can assume the base is 2. */
9831 static tree
9832 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9834 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9835 return NULL_TREE;
9837 STRIP_NOPS (arg0);
9839 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9840 return NULL_TREE;
9842 arg1 = build_fold_indirect_ref (arg1);
9844 /* Proceed if a valid pointer type was passed in. */
9845 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9847 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9848 tree frac, exp;
9850 switch (value->cl)
9852 case rvc_zero:
9853 /* For +-0, return (*exp = 0, +-0). */
9854 exp = integer_zero_node;
9855 frac = arg0;
9856 break;
9857 case rvc_nan:
9858 case rvc_inf:
9859 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9860 return omit_one_operand (rettype, arg0, arg1);
9861 case rvc_normal:
9863 /* Since the frexp function always expects base 2, and in
9864 GCC normalized significands are already in the range
9865 [0.5, 1.0), we have exactly what frexp wants. */
9866 REAL_VALUE_TYPE frac_rvt = *value;
9867 SET_REAL_EXP (&frac_rvt, 0);
9868 frac = build_real (rettype, frac_rvt);
9869 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9871 break;
9872 default:
9873 gcc_unreachable ();
9876 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9877 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9878 TREE_SIDE_EFFECTS (arg1) = 1;
9879 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9882 return NULL_TREE;
9885 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9886 then we can assume the base is two. If it's false, then we have to
9887 check the mode of the TYPE parameter in certain cases. */
9889 static tree
9890 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9892 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9894 STRIP_NOPS (arg0);
9895 STRIP_NOPS (arg1);
9897 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9898 if (real_zerop (arg0) || integer_zerop (arg1)
9899 || (TREE_CODE (arg0) == REAL_CST
9900 && !real_isfinite (&TREE_REAL_CST (arg0))))
9901 return omit_one_operand (type, arg0, arg1);
9903 /* If both arguments are constant, then try to evaluate it. */
9904 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9905 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9906 && host_integerp (arg1, 0))
9908 /* Bound the maximum adjustment to twice the range of the
9909 mode's valid exponents. Use abs to ensure the range is
9910 positive as a sanity check. */
9911 const long max_exp_adj = 2 *
9912 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9913 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9915 /* Get the user-requested adjustment. */
9916 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9918 /* The requested adjustment must be inside this range. This
9919 is a preliminary cap to avoid things like overflow, we
9920 may still fail to compute the result for other reasons. */
9921 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9923 REAL_VALUE_TYPE initial_result;
9925 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9927 /* Ensure we didn't overflow. */
9928 if (! real_isinf (&initial_result))
9930 const REAL_VALUE_TYPE trunc_result
9931 = real_value_truncate (TYPE_MODE (type), initial_result);
9933 /* Only proceed if the target mode can hold the
9934 resulting value. */
9935 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9936 return build_real (type, trunc_result);
9942 return NULL_TREE;
9945 /* Fold a call to builtin modf. */
9947 static tree
9948 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9950 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9951 return NULL_TREE;
9953 STRIP_NOPS (arg0);
9955 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9956 return NULL_TREE;
9958 arg1 = build_fold_indirect_ref (arg1);
9960 /* Proceed if a valid pointer type was passed in. */
9961 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9963 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9964 REAL_VALUE_TYPE trunc, frac;
9966 switch (value->cl)
9968 case rvc_nan:
9969 case rvc_zero:
9970 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9971 trunc = frac = *value;
9972 break;
9973 case rvc_inf:
9974 /* For +-Inf, return (*arg1 = arg0, +-0). */
9975 frac = dconst0;
9976 frac.sign = value->sign;
9977 trunc = *value;
9978 break;
9979 case rvc_normal:
9980 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9981 real_trunc (&trunc, VOIDmode, value);
9982 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9983 /* If the original number was negative and already
9984 integral, then the fractional part is -0.0. */
9985 if (value->sign && frac.cl == rvc_zero)
9986 frac.sign = value->sign;
9987 break;
9990 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9991 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9992 build_real (rettype, trunc));
9993 TREE_SIDE_EFFECTS (arg1) = 1;
9994 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9995 build_real (rettype, frac));
9998 return NULL_TREE;
10001 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10002 ARG is the argument for the call. */
10004 static tree
10005 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
10007 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10008 REAL_VALUE_TYPE r;
10010 if (!validate_arg (arg, REAL_TYPE))
10011 return NULL_TREE;
10013 switch (builtin_index)
10015 case BUILT_IN_ISINF:
10016 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10017 return omit_one_operand (type, integer_zero_node, arg);
10019 if (TREE_CODE (arg) == REAL_CST)
10021 r = TREE_REAL_CST (arg);
10022 if (real_isinf (&r))
10023 return real_compare (GT_EXPR, &r, &dconst0)
10024 ? integer_one_node : integer_minus_one_node;
10025 else
10026 return integer_zero_node;
10029 return NULL_TREE;
10031 case BUILT_IN_ISINF_SIGN:
10033 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10034 /* In a boolean context, GCC will fold the inner COND_EXPR to
10035 1. So e.g. "if (isinf_sign(x))" would be folded to just
10036 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10037 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10038 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10039 tree tmp = NULL_TREE;
10041 arg = builtin_save_expr (arg);
10043 if (signbit_fn && isinf_fn)
10045 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
10046 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
10048 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
10049 signbit_call, integer_zero_node);
10050 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
10051 isinf_call, integer_zero_node);
10053 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
10054 integer_minus_one_node, integer_one_node);
10055 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
10056 integer_zero_node);
10059 return tmp;
10062 case BUILT_IN_ISFINITE:
10063 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10064 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10065 return omit_one_operand (type, integer_one_node, arg);
10067 if (TREE_CODE (arg) == REAL_CST)
10069 r = TREE_REAL_CST (arg);
10070 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10073 return NULL_TREE;
10075 case BUILT_IN_ISNAN:
10076 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10077 return omit_one_operand (type, integer_zero_node, arg);
10079 if (TREE_CODE (arg) == REAL_CST)
10081 r = TREE_REAL_CST (arg);
10082 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10085 arg = builtin_save_expr (arg);
10086 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
10088 default:
10089 gcc_unreachable ();
10093 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10094 This builtin will generate code to return the appropriate floating
10095 point classification depending on the value of the floating point
10096 number passed in. The possible return values must be supplied as
10097 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10098 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10099 one floating point argument which is "type generic". */
10101 static tree
10102 fold_builtin_fpclassify (tree exp)
10104 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10105 arg, type, res, tmp;
10106 enum machine_mode mode;
10107 REAL_VALUE_TYPE r;
10108 char buf[128];
10110 /* Verify the required arguments in the original call. */
10111 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10112 INTEGER_TYPE, INTEGER_TYPE,
10113 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10114 return NULL_TREE;
10116 fp_nan = CALL_EXPR_ARG (exp, 0);
10117 fp_infinite = CALL_EXPR_ARG (exp, 1);
10118 fp_normal = CALL_EXPR_ARG (exp, 2);
10119 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10120 fp_zero = CALL_EXPR_ARG (exp, 4);
10121 arg = CALL_EXPR_ARG (exp, 5);
10122 type = TREE_TYPE (arg);
10123 mode = TYPE_MODE (type);
10124 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10126 /* fpclassify(x) ->
10127 isnan(x) ? FP_NAN :
10128 (fabs(x) == Inf ? FP_INFINITE :
10129 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10130 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10132 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10133 build_real (type, dconst0));
10134 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10136 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10137 real_from_string (&r, buf);
10138 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10139 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10141 if (HONOR_INFINITIES (mode))
10143 real_inf (&r);
10144 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10145 build_real (type, r));
10146 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10149 if (HONOR_NANS (mode))
10151 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10152 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10155 return res;
10158 /* Fold a call to an unordered comparison function such as
10159 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10160 being called and ARG0 and ARG1 are the arguments for the call.
10161 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10162 the opposite of the desired result. UNORDERED_CODE is used
10163 for modes that can hold NaNs and ORDERED_CODE is used for
10164 the rest. */
10166 static tree
10167 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10168 enum tree_code unordered_code,
10169 enum tree_code ordered_code)
10171 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10172 enum tree_code code;
10173 tree type0, type1;
10174 enum tree_code code0, code1;
10175 tree cmp_type = NULL_TREE;
10177 type0 = TREE_TYPE (arg0);
10178 type1 = TREE_TYPE (arg1);
10180 code0 = TREE_CODE (type0);
10181 code1 = TREE_CODE (type1);
10183 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10184 /* Choose the wider of two real types. */
10185 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10186 ? type0 : type1;
10187 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10188 cmp_type = type0;
10189 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10190 cmp_type = type1;
10192 arg0 = fold_convert (cmp_type, arg0);
10193 arg1 = fold_convert (cmp_type, arg1);
10195 if (unordered_code == UNORDERED_EXPR)
10197 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10198 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10199 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10202 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10203 : ordered_code;
10204 return fold_build1 (TRUTH_NOT_EXPR, type,
10205 fold_build2 (code, type, arg0, arg1));
10208 /* Fold a call to built-in function FNDECL with 0 arguments.
10209 IGNORE is true if the result of the function call is ignored. This
10210 function returns NULL_TREE if no simplification was possible. */
10212 static tree
10213 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10216 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10217 switch (fcode)
10219 CASE_FLT_FN (BUILT_IN_INF):
10220 case BUILT_IN_INFD32:
10221 case BUILT_IN_INFD64:
10222 case BUILT_IN_INFD128:
10223 return fold_builtin_inf (type, true);
10225 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10226 return fold_builtin_inf (type, false);
10228 case BUILT_IN_CLASSIFY_TYPE:
10229 return fold_builtin_classify_type (NULL_TREE);
10231 default:
10232 break;
10234 return NULL_TREE;
10237 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10238 IGNORE is true if the result of the function call is ignored. This
10239 function returns NULL_TREE if no simplification was possible. */
10241 static tree
10242 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10244 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10245 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10246 switch (fcode)
10249 case BUILT_IN_CONSTANT_P:
10251 tree val = fold_builtin_constant_p (arg0);
10253 /* Gimplification will pull the CALL_EXPR for the builtin out of
10254 an if condition. When not optimizing, we'll not CSE it back.
10255 To avoid link error types of regressions, return false now. */
10256 if (!val && !optimize)
10257 val = integer_zero_node;
10259 return val;
10262 case BUILT_IN_CLASSIFY_TYPE:
10263 return fold_builtin_classify_type (arg0);
10265 case BUILT_IN_STRLEN:
10266 return fold_builtin_strlen (arg0);
10268 CASE_FLT_FN (BUILT_IN_FABS):
10269 return fold_builtin_fabs (arg0, type);
10271 case BUILT_IN_ABS:
10272 case BUILT_IN_LABS:
10273 case BUILT_IN_LLABS:
10274 case BUILT_IN_IMAXABS:
10275 return fold_builtin_abs (arg0, type);
10277 CASE_FLT_FN (BUILT_IN_CONJ):
10278 if (validate_arg (arg0, COMPLEX_TYPE)
10279 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10280 return fold_build1 (CONJ_EXPR, type, arg0);
10281 break;
10283 CASE_FLT_FN (BUILT_IN_CREAL):
10284 if (validate_arg (arg0, COMPLEX_TYPE)
10285 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10286 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10287 break;
10289 CASE_FLT_FN (BUILT_IN_CIMAG):
10290 if (validate_arg (arg0, COMPLEX_TYPE))
10291 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10292 break;
10294 CASE_FLT_FN (BUILT_IN_CCOS):
10295 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ false);
10297 CASE_FLT_FN (BUILT_IN_CCOSH):
10298 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ true);
10300 #ifdef HAVE_mpc
10301 CASE_FLT_FN (BUILT_IN_CSIN):
10302 if (validate_arg (arg0, COMPLEX_TYPE)
10303 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10304 return do_mpc_arg1 (arg0, type, mpc_sin);
10305 break;
10307 CASE_FLT_FN (BUILT_IN_CSINH):
10308 if (validate_arg (arg0, COMPLEX_TYPE)
10309 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10310 return do_mpc_arg1 (arg0, type, mpc_sinh);
10311 break;
10313 CASE_FLT_FN (BUILT_IN_CTAN):
10314 if (validate_arg (arg0, COMPLEX_TYPE)
10315 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10316 return do_mpc_arg1 (arg0, type, mpc_tan);
10317 break;
10319 CASE_FLT_FN (BUILT_IN_CTANH):
10320 if (validate_arg (arg0, COMPLEX_TYPE)
10321 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10322 return do_mpc_arg1 (arg0, type, mpc_tanh);
10323 break;
10325 CASE_FLT_FN (BUILT_IN_CLOG):
10326 if (validate_arg (arg0, COMPLEX_TYPE)
10327 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10328 return do_mpc_arg1 (arg0, type, mpc_log);
10329 break;
10331 CASE_FLT_FN (BUILT_IN_CSQRT):
10332 if (validate_arg (arg0, COMPLEX_TYPE)
10333 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10334 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10335 break;
10336 #endif
10338 CASE_FLT_FN (BUILT_IN_CABS):
10339 return fold_builtin_cabs (arg0, type, fndecl);
10341 CASE_FLT_FN (BUILT_IN_CARG):
10342 return fold_builtin_carg (arg0, type);
10344 CASE_FLT_FN (BUILT_IN_SQRT):
10345 return fold_builtin_sqrt (arg0, type);
10347 CASE_FLT_FN (BUILT_IN_CBRT):
10348 return fold_builtin_cbrt (arg0, type);
10350 CASE_FLT_FN (BUILT_IN_ASIN):
10351 if (validate_arg (arg0, REAL_TYPE))
10352 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10353 &dconstm1, &dconst1, true);
10354 break;
10356 CASE_FLT_FN (BUILT_IN_ACOS):
10357 if (validate_arg (arg0, REAL_TYPE))
10358 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10359 &dconstm1, &dconst1, true);
10360 break;
10362 CASE_FLT_FN (BUILT_IN_ATAN):
10363 if (validate_arg (arg0, REAL_TYPE))
10364 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10365 break;
10367 CASE_FLT_FN (BUILT_IN_ASINH):
10368 if (validate_arg (arg0, REAL_TYPE))
10369 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10370 break;
10372 CASE_FLT_FN (BUILT_IN_ACOSH):
10373 if (validate_arg (arg0, REAL_TYPE))
10374 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10375 &dconst1, NULL, true);
10376 break;
10378 CASE_FLT_FN (BUILT_IN_ATANH):
10379 if (validate_arg (arg0, REAL_TYPE))
10380 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10381 &dconstm1, &dconst1, false);
10382 break;
10384 CASE_FLT_FN (BUILT_IN_SIN):
10385 if (validate_arg (arg0, REAL_TYPE))
10386 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10387 break;
10389 CASE_FLT_FN (BUILT_IN_COS):
10390 return fold_builtin_cos (arg0, type, fndecl);
10392 CASE_FLT_FN (BUILT_IN_TAN):
10393 return fold_builtin_tan (arg0, type);
10395 CASE_FLT_FN (BUILT_IN_CEXP):
10396 return fold_builtin_cexp (arg0, type);
10398 CASE_FLT_FN (BUILT_IN_CEXPI):
10399 if (validate_arg (arg0, REAL_TYPE))
10400 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10401 break;
10403 CASE_FLT_FN (BUILT_IN_SINH):
10404 if (validate_arg (arg0, REAL_TYPE))
10405 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10406 break;
10408 CASE_FLT_FN (BUILT_IN_COSH):
10409 return fold_builtin_cosh (arg0, type, fndecl);
10411 CASE_FLT_FN (BUILT_IN_TANH):
10412 if (validate_arg (arg0, REAL_TYPE))
10413 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10414 break;
10416 CASE_FLT_FN (BUILT_IN_ERF):
10417 if (validate_arg (arg0, REAL_TYPE))
10418 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10419 break;
10421 CASE_FLT_FN (BUILT_IN_ERFC):
10422 if (validate_arg (arg0, REAL_TYPE))
10423 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10424 break;
10426 CASE_FLT_FN (BUILT_IN_TGAMMA):
10427 if (validate_arg (arg0, REAL_TYPE))
10428 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10429 break;
10431 CASE_FLT_FN (BUILT_IN_EXP):
10432 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10434 CASE_FLT_FN (BUILT_IN_EXP2):
10435 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10437 CASE_FLT_FN (BUILT_IN_EXP10):
10438 CASE_FLT_FN (BUILT_IN_POW10):
10439 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10441 CASE_FLT_FN (BUILT_IN_EXPM1):
10442 if (validate_arg (arg0, REAL_TYPE))
10443 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10444 break;
10446 CASE_FLT_FN (BUILT_IN_LOG):
10447 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10449 CASE_FLT_FN (BUILT_IN_LOG2):
10450 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10452 CASE_FLT_FN (BUILT_IN_LOG10):
10453 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10455 CASE_FLT_FN (BUILT_IN_LOG1P):
10456 if (validate_arg (arg0, REAL_TYPE))
10457 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10458 &dconstm1, NULL, false);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_J0):
10462 if (validate_arg (arg0, REAL_TYPE))
10463 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10464 NULL, NULL, 0);
10465 break;
10467 CASE_FLT_FN (BUILT_IN_J1):
10468 if (validate_arg (arg0, REAL_TYPE))
10469 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10470 NULL, NULL, 0);
10471 break;
10473 CASE_FLT_FN (BUILT_IN_Y0):
10474 if (validate_arg (arg0, REAL_TYPE))
10475 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10476 &dconst0, NULL, false);
10477 break;
10479 CASE_FLT_FN (BUILT_IN_Y1):
10480 if (validate_arg (arg0, REAL_TYPE))
10481 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10482 &dconst0, NULL, false);
10483 break;
10485 CASE_FLT_FN (BUILT_IN_NAN):
10486 case BUILT_IN_NAND32:
10487 case BUILT_IN_NAND64:
10488 case BUILT_IN_NAND128:
10489 return fold_builtin_nan (arg0, type, true);
10491 CASE_FLT_FN (BUILT_IN_NANS):
10492 return fold_builtin_nan (arg0, type, false);
10494 CASE_FLT_FN (BUILT_IN_FLOOR):
10495 return fold_builtin_floor (fndecl, arg0);
10497 CASE_FLT_FN (BUILT_IN_CEIL):
10498 return fold_builtin_ceil (fndecl, arg0);
10500 CASE_FLT_FN (BUILT_IN_TRUNC):
10501 return fold_builtin_trunc (fndecl, arg0);
10503 CASE_FLT_FN (BUILT_IN_ROUND):
10504 return fold_builtin_round (fndecl, arg0);
10506 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10507 CASE_FLT_FN (BUILT_IN_RINT):
10508 return fold_trunc_transparent_mathfn (fndecl, arg0);
10510 CASE_FLT_FN (BUILT_IN_LCEIL):
10511 CASE_FLT_FN (BUILT_IN_LLCEIL):
10512 CASE_FLT_FN (BUILT_IN_LFLOOR):
10513 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10514 CASE_FLT_FN (BUILT_IN_LROUND):
10515 CASE_FLT_FN (BUILT_IN_LLROUND):
10516 return fold_builtin_int_roundingfn (fndecl, arg0);
10518 CASE_FLT_FN (BUILT_IN_LRINT):
10519 CASE_FLT_FN (BUILT_IN_LLRINT):
10520 return fold_fixed_mathfn (fndecl, arg0);
10522 case BUILT_IN_BSWAP32:
10523 case BUILT_IN_BSWAP64:
10524 return fold_builtin_bswap (fndecl, arg0);
10526 CASE_INT_FN (BUILT_IN_FFS):
10527 CASE_INT_FN (BUILT_IN_CLZ):
10528 CASE_INT_FN (BUILT_IN_CTZ):
10529 CASE_INT_FN (BUILT_IN_POPCOUNT):
10530 CASE_INT_FN (BUILT_IN_PARITY):
10531 return fold_builtin_bitop (fndecl, arg0);
10533 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10534 return fold_builtin_signbit (arg0, type);
10536 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10537 return fold_builtin_significand (arg0, type);
10539 CASE_FLT_FN (BUILT_IN_ILOGB):
10540 CASE_FLT_FN (BUILT_IN_LOGB):
10541 return fold_builtin_logb (arg0, type);
10543 case BUILT_IN_ISASCII:
10544 return fold_builtin_isascii (arg0);
10546 case BUILT_IN_TOASCII:
10547 return fold_builtin_toascii (arg0);
10549 case BUILT_IN_ISDIGIT:
10550 return fold_builtin_isdigit (arg0);
10552 CASE_FLT_FN (BUILT_IN_FINITE):
10553 case BUILT_IN_FINITED32:
10554 case BUILT_IN_FINITED64:
10555 case BUILT_IN_FINITED128:
10556 case BUILT_IN_ISFINITE:
10557 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10559 CASE_FLT_FN (BUILT_IN_ISINF):
10560 case BUILT_IN_ISINFD32:
10561 case BUILT_IN_ISINFD64:
10562 case BUILT_IN_ISINFD128:
10563 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10565 case BUILT_IN_ISINF_SIGN:
10566 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10568 CASE_FLT_FN (BUILT_IN_ISNAN):
10569 case BUILT_IN_ISNAND32:
10570 case BUILT_IN_ISNAND64:
10571 case BUILT_IN_ISNAND128:
10572 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10574 case BUILT_IN_PRINTF:
10575 case BUILT_IN_PRINTF_UNLOCKED:
10576 case BUILT_IN_VPRINTF:
10577 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10579 default:
10580 break;
10583 return NULL_TREE;
10587 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10588 IGNORE is true if the result of the function call is ignored. This
10589 function returns NULL_TREE if no simplification was possible. */
10591 static tree
10592 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10594 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10595 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10597 switch (fcode)
10599 CASE_FLT_FN (BUILT_IN_JN):
10600 if (validate_arg (arg0, INTEGER_TYPE)
10601 && validate_arg (arg1, REAL_TYPE))
10602 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10603 break;
10605 CASE_FLT_FN (BUILT_IN_YN):
10606 if (validate_arg (arg0, INTEGER_TYPE)
10607 && validate_arg (arg1, REAL_TYPE))
10608 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10609 &dconst0, false);
10610 break;
10612 CASE_FLT_FN (BUILT_IN_DREM):
10613 CASE_FLT_FN (BUILT_IN_REMAINDER):
10614 if (validate_arg (arg0, REAL_TYPE)
10615 && validate_arg(arg1, REAL_TYPE))
10616 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10617 break;
10619 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10620 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10621 if (validate_arg (arg0, REAL_TYPE)
10622 && validate_arg(arg1, POINTER_TYPE))
10623 return do_mpfr_lgamma_r (arg0, arg1, type);
10624 break;
10626 CASE_FLT_FN (BUILT_IN_ATAN2):
10627 if (validate_arg (arg0, REAL_TYPE)
10628 && validate_arg(arg1, REAL_TYPE))
10629 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10630 break;
10632 CASE_FLT_FN (BUILT_IN_FDIM):
10633 if (validate_arg (arg0, REAL_TYPE)
10634 && validate_arg(arg1, REAL_TYPE))
10635 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10636 break;
10638 CASE_FLT_FN (BUILT_IN_HYPOT):
10639 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10641 CASE_FLT_FN (BUILT_IN_LDEXP):
10642 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10643 CASE_FLT_FN (BUILT_IN_SCALBN):
10644 CASE_FLT_FN (BUILT_IN_SCALBLN):
10645 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10647 CASE_FLT_FN (BUILT_IN_FREXP):
10648 return fold_builtin_frexp (arg0, arg1, type);
10650 CASE_FLT_FN (BUILT_IN_MODF):
10651 return fold_builtin_modf (arg0, arg1, type);
10653 case BUILT_IN_BZERO:
10654 return fold_builtin_bzero (arg0, arg1, ignore);
10656 case BUILT_IN_FPUTS:
10657 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10659 case BUILT_IN_FPUTS_UNLOCKED:
10660 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10662 case BUILT_IN_STRSTR:
10663 return fold_builtin_strstr (arg0, arg1, type);
10665 case BUILT_IN_STRCAT:
10666 return fold_builtin_strcat (arg0, arg1);
10668 case BUILT_IN_STRSPN:
10669 return fold_builtin_strspn (arg0, arg1);
10671 case BUILT_IN_STRCSPN:
10672 return fold_builtin_strcspn (arg0, arg1);
10674 case BUILT_IN_STRCHR:
10675 case BUILT_IN_INDEX:
10676 return fold_builtin_strchr (arg0, arg1, type);
10678 case BUILT_IN_STRRCHR:
10679 case BUILT_IN_RINDEX:
10680 return fold_builtin_strrchr (arg0, arg1, type);
10682 case BUILT_IN_STRCPY:
10683 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10685 case BUILT_IN_STPCPY:
10686 if (ignore)
10688 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10689 if (!fn)
10690 break;
10692 return build_call_expr (fn, 2, arg0, arg1);
10694 break;
10696 case BUILT_IN_STRCMP:
10697 return fold_builtin_strcmp (arg0, arg1);
10699 case BUILT_IN_STRPBRK:
10700 return fold_builtin_strpbrk (arg0, arg1, type);
10702 case BUILT_IN_EXPECT:
10703 return fold_builtin_expect (arg0, arg1);
10705 CASE_FLT_FN (BUILT_IN_POW):
10706 return fold_builtin_pow (fndecl, arg0, arg1, type);
10708 CASE_FLT_FN (BUILT_IN_POWI):
10709 return fold_builtin_powi (fndecl, arg0, arg1, type);
10711 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10712 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10714 CASE_FLT_FN (BUILT_IN_FMIN):
10715 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10717 CASE_FLT_FN (BUILT_IN_FMAX):
10718 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10720 case BUILT_IN_ISGREATER:
10721 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10722 case BUILT_IN_ISGREATEREQUAL:
10723 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10724 case BUILT_IN_ISLESS:
10725 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10726 case BUILT_IN_ISLESSEQUAL:
10727 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10728 case BUILT_IN_ISLESSGREATER:
10729 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10730 case BUILT_IN_ISUNORDERED:
10731 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10732 NOP_EXPR);
10734 /* We do the folding for va_start in the expander. */
10735 case BUILT_IN_VA_START:
10736 break;
10738 case BUILT_IN_SPRINTF:
10739 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10741 case BUILT_IN_OBJECT_SIZE:
10742 return fold_builtin_object_size (arg0, arg1);
10744 case BUILT_IN_PRINTF:
10745 case BUILT_IN_PRINTF_UNLOCKED:
10746 case BUILT_IN_VPRINTF:
10747 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10749 case BUILT_IN_PRINTF_CHK:
10750 case BUILT_IN_VPRINTF_CHK:
10751 if (!validate_arg (arg0, INTEGER_TYPE)
10752 || TREE_SIDE_EFFECTS (arg0))
10753 return NULL_TREE;
10754 else
10755 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10756 break;
10758 case BUILT_IN_FPRINTF:
10759 case BUILT_IN_FPRINTF_UNLOCKED:
10760 case BUILT_IN_VFPRINTF:
10761 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10762 ignore, fcode);
10764 default:
10765 break;
10767 return NULL_TREE;
10770 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10771 and ARG2. IGNORE is true if the result of the function call is ignored.
10772 This function returns NULL_TREE if no simplification was possible. */
10774 static tree
10775 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10777 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10778 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10779 switch (fcode)
10782 CASE_FLT_FN (BUILT_IN_SINCOS):
10783 return fold_builtin_sincos (arg0, arg1, arg2);
10785 CASE_FLT_FN (BUILT_IN_FMA):
10786 if (validate_arg (arg0, REAL_TYPE)
10787 && validate_arg(arg1, REAL_TYPE)
10788 && validate_arg(arg2, REAL_TYPE))
10789 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10790 break;
10792 CASE_FLT_FN (BUILT_IN_REMQUO):
10793 if (validate_arg (arg0, REAL_TYPE)
10794 && validate_arg(arg1, REAL_TYPE)
10795 && validate_arg(arg2, POINTER_TYPE))
10796 return do_mpfr_remquo (arg0, arg1, arg2);
10797 break;
10799 case BUILT_IN_MEMSET:
10800 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10802 case BUILT_IN_BCOPY:
10803 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10805 case BUILT_IN_MEMCPY:
10806 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10808 case BUILT_IN_MEMPCPY:
10809 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10811 case BUILT_IN_MEMMOVE:
10812 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10814 case BUILT_IN_STRNCAT:
10815 return fold_builtin_strncat (arg0, arg1, arg2);
10817 case BUILT_IN_STRNCPY:
10818 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10820 case BUILT_IN_STRNCMP:
10821 return fold_builtin_strncmp (arg0, arg1, arg2);
10823 case BUILT_IN_MEMCHR:
10824 return fold_builtin_memchr (arg0, arg1, arg2, type);
10826 case BUILT_IN_BCMP:
10827 case BUILT_IN_MEMCMP:
10828 return fold_builtin_memcmp (arg0, arg1, arg2);;
10830 case BUILT_IN_SPRINTF:
10831 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10833 case BUILT_IN_STRCPY_CHK:
10834 case BUILT_IN_STPCPY_CHK:
10835 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10836 ignore, fcode);
10838 case BUILT_IN_STRCAT_CHK:
10839 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10841 case BUILT_IN_PRINTF_CHK:
10842 case BUILT_IN_VPRINTF_CHK:
10843 if (!validate_arg (arg0, INTEGER_TYPE)
10844 || TREE_SIDE_EFFECTS (arg0))
10845 return NULL_TREE;
10846 else
10847 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10848 break;
10850 case BUILT_IN_FPRINTF:
10851 case BUILT_IN_FPRINTF_UNLOCKED:
10852 case BUILT_IN_VFPRINTF:
10853 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10855 case BUILT_IN_FPRINTF_CHK:
10856 case BUILT_IN_VFPRINTF_CHK:
10857 if (!validate_arg (arg1, INTEGER_TYPE)
10858 || TREE_SIDE_EFFECTS (arg1))
10859 return NULL_TREE;
10860 else
10861 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10862 ignore, fcode);
10864 default:
10865 break;
10867 return NULL_TREE;
10870 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10871 ARG2, and ARG3. IGNORE is true if the result of the function call is
10872 ignored. This function returns NULL_TREE if no simplification was
10873 possible. */
10875 static tree
10876 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10877 bool ignore)
10879 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10881 switch (fcode)
10883 case BUILT_IN_MEMCPY_CHK:
10884 case BUILT_IN_MEMPCPY_CHK:
10885 case BUILT_IN_MEMMOVE_CHK:
10886 case BUILT_IN_MEMSET_CHK:
10887 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10888 NULL_TREE, ignore,
10889 DECL_FUNCTION_CODE (fndecl));
10891 case BUILT_IN_STRNCPY_CHK:
10892 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10894 case BUILT_IN_STRNCAT_CHK:
10895 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10897 case BUILT_IN_FPRINTF_CHK:
10898 case BUILT_IN_VFPRINTF_CHK:
10899 if (!validate_arg (arg1, INTEGER_TYPE)
10900 || TREE_SIDE_EFFECTS (arg1))
10901 return NULL_TREE;
10902 else
10903 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10904 ignore, fcode);
10905 break;
10907 default:
10908 break;
10910 return NULL_TREE;
10913 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10914 arguments, where NARGS <= 4. IGNORE is true if the result of the
10915 function call is ignored. This function returns NULL_TREE if no
10916 simplification was possible. Note that this only folds builtins with
10917 fixed argument patterns. Foldings that do varargs-to-varargs
10918 transformations, or that match calls with more than 4 arguments,
10919 need to be handled with fold_builtin_varargs instead. */
10921 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10923 static tree
10924 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10926 tree ret = NULL_TREE;
10928 switch (nargs)
10930 case 0:
10931 ret = fold_builtin_0 (fndecl, ignore);
10932 break;
10933 case 1:
10934 ret = fold_builtin_1 (fndecl, args[0], ignore);
10935 break;
10936 case 2:
10937 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10938 break;
10939 case 3:
10940 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10941 break;
10942 case 4:
10943 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10944 ignore);
10945 break;
10946 default:
10947 break;
10949 if (ret)
10951 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10952 TREE_NO_WARNING (ret) = 1;
10953 return ret;
10955 return NULL_TREE;
10958 /* Builtins with folding operations that operate on "..." arguments
10959 need special handling; we need to store the arguments in a convenient
10960 data structure before attempting any folding. Fortunately there are
10961 only a few builtins that fall into this category. FNDECL is the
10962 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10963 result of the function call is ignored. */
10965 static tree
10966 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10968 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10969 tree ret = NULL_TREE;
10971 switch (fcode)
10973 case BUILT_IN_SPRINTF_CHK:
10974 case BUILT_IN_VSPRINTF_CHK:
10975 ret = fold_builtin_sprintf_chk (exp, fcode);
10976 break;
10978 case BUILT_IN_SNPRINTF_CHK:
10979 case BUILT_IN_VSNPRINTF_CHK:
10980 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10981 break;
10983 case BUILT_IN_FPCLASSIFY:
10984 ret = fold_builtin_fpclassify (exp);
10985 break;
10987 default:
10988 break;
10990 if (ret)
10992 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10993 TREE_NO_WARNING (ret) = 1;
10994 return ret;
10996 return NULL_TREE;
10999 /* Return true if FNDECL shouldn't be folded right now.
11000 If a built-in function has an inline attribute always_inline
11001 wrapper, defer folding it after always_inline functions have
11002 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11003 might not be performed. */
11005 static bool
11006 avoid_folding_inline_builtin (tree fndecl)
11008 return (DECL_DECLARED_INLINE_P (fndecl)
11009 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11010 && cfun
11011 && !cfun->always_inline_functions_inlined
11012 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11015 /* A wrapper function for builtin folding that prevents warnings for
11016 "statement without effect" and the like, caused by removing the
11017 call node earlier than the warning is generated. */
11019 tree
11020 fold_call_expr (tree exp, bool ignore)
11022 tree ret = NULL_TREE;
11023 tree fndecl = get_callee_fndecl (exp);
11024 if (fndecl
11025 && TREE_CODE (fndecl) == FUNCTION_DECL
11026 && DECL_BUILT_IN (fndecl)
11027 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11028 yet. Defer folding until we see all the arguments
11029 (after inlining). */
11030 && !CALL_EXPR_VA_ARG_PACK (exp))
11032 int nargs = call_expr_nargs (exp);
11034 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11035 instead last argument is __builtin_va_arg_pack (). Defer folding
11036 even in that case, until arguments are finalized. */
11037 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11039 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11040 if (fndecl2
11041 && TREE_CODE (fndecl2) == FUNCTION_DECL
11042 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11043 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11044 return NULL_TREE;
11047 if (avoid_folding_inline_builtin (fndecl))
11048 return NULL_TREE;
11050 /* FIXME: Don't use a list in this interface. */
11051 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11052 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11053 else
11055 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11057 tree *args = CALL_EXPR_ARGP (exp);
11058 ret = fold_builtin_n (fndecl, args, nargs, ignore);
11060 if (!ret)
11061 ret = fold_builtin_varargs (fndecl, exp, ignore);
11062 if (ret)
11064 /* Propagate location information from original call to
11065 expansion of builtin. Otherwise things like
11066 maybe_emit_chk_warning, that operate on the expansion
11067 of a builtin, will use the wrong location information. */
11068 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
11070 tree realret = ret;
11071 if (TREE_CODE (ret) == NOP_EXPR)
11072 realret = TREE_OPERAND (ret, 0);
11073 if (CAN_HAVE_LOCATION_P (realret)
11074 && !EXPR_HAS_LOCATION (realret))
11075 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
11077 return ret;
11081 return NULL_TREE;
11084 /* Conveniently construct a function call expression. FNDECL names the
11085 function to be called and ARGLIST is a TREE_LIST of arguments. */
11087 tree
11088 build_function_call_expr (tree fndecl, tree arglist)
11090 tree fntype = TREE_TYPE (fndecl);
11091 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11092 int n = list_length (arglist);
11093 tree *argarray = (tree *) alloca (n * sizeof (tree));
11094 int i;
11096 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11097 argarray[i] = TREE_VALUE (arglist);
11098 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11101 /* Conveniently construct a function call expression. FNDECL names the
11102 function to be called, N is the number of arguments, and the "..."
11103 parameters are the argument expressions. */
11105 tree
11106 build_call_expr (tree fndecl, int n, ...)
11108 va_list ap;
11109 tree fntype = TREE_TYPE (fndecl);
11110 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11111 tree *argarray = (tree *) alloca (n * sizeof (tree));
11112 int i;
11114 va_start (ap, n);
11115 for (i = 0; i < n; i++)
11116 argarray[i] = va_arg (ap, tree);
11117 va_end (ap);
11118 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11121 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11122 N arguments are passed in the array ARGARRAY. */
11124 tree
11125 fold_builtin_call_array (tree type,
11126 tree fn,
11127 int n,
11128 tree *argarray)
11130 tree ret = NULL_TREE;
11131 int i;
11132 tree exp;
11134 if (TREE_CODE (fn) == ADDR_EXPR)
11136 tree fndecl = TREE_OPERAND (fn, 0);
11137 if (TREE_CODE (fndecl) == FUNCTION_DECL
11138 && DECL_BUILT_IN (fndecl))
11140 /* If last argument is __builtin_va_arg_pack (), arguments to this
11141 function are not finalized yet. Defer folding until they are. */
11142 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11144 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11145 if (fndecl2
11146 && TREE_CODE (fndecl2) == FUNCTION_DECL
11147 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11148 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11149 return build_call_array (type, fn, n, argarray);
11151 if (avoid_folding_inline_builtin (fndecl))
11152 return build_call_array (type, fn, n, argarray);
11153 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11155 tree arglist = NULL_TREE;
11156 for (i = n - 1; i >= 0; i--)
11157 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11158 ret = targetm.fold_builtin (fndecl, arglist, false);
11159 if (ret)
11160 return ret;
11161 return build_call_array (type, fn, n, argarray);
11163 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11165 /* First try the transformations that don't require consing up
11166 an exp. */
11167 ret = fold_builtin_n (fndecl, argarray, n, false);
11168 if (ret)
11169 return ret;
11172 /* If we got this far, we need to build an exp. */
11173 exp = build_call_array (type, fn, n, argarray);
11174 ret = fold_builtin_varargs (fndecl, exp, false);
11175 return ret ? ret : exp;
11179 return build_call_array (type, fn, n, argarray);
11182 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11183 along with N new arguments specified as the "..." parameters. SKIP
11184 is the number of arguments in EXP to be omitted. This function is used
11185 to do varargs-to-varargs transformations. */
11187 static tree
11188 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11190 int oldnargs = call_expr_nargs (exp);
11191 int nargs = oldnargs - skip + n;
11192 tree fntype = TREE_TYPE (fndecl);
11193 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11194 tree *buffer;
11196 if (n > 0)
11198 int i, j;
11199 va_list ap;
11201 buffer = XALLOCAVEC (tree, nargs);
11202 va_start (ap, n);
11203 for (i = 0; i < n; i++)
11204 buffer[i] = va_arg (ap, tree);
11205 va_end (ap);
11206 for (j = skip; j < oldnargs; j++, i++)
11207 buffer[i] = CALL_EXPR_ARG (exp, j);
11209 else
11210 buffer = CALL_EXPR_ARGP (exp) + skip;
11212 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11215 /* Validate a single argument ARG against a tree code CODE representing
11216 a type. */
11218 static bool
11219 validate_arg (const_tree arg, enum tree_code code)
11221 if (!arg)
11222 return false;
11223 else if (code == POINTER_TYPE)
11224 return POINTER_TYPE_P (TREE_TYPE (arg));
11225 else if (code == INTEGER_TYPE)
11226 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11227 return code == TREE_CODE (TREE_TYPE (arg));
11230 /* This function validates the types of a function call argument list
11231 against a specified list of tree_codes. If the last specifier is a 0,
11232 that represents an ellipses, otherwise the last specifier must be a
11233 VOID_TYPE.
11235 This is the GIMPLE version of validate_arglist. Eventually we want to
11236 completely convert builtins.c to work from GIMPLEs and the tree based
11237 validate_arglist will then be removed. */
11239 bool
11240 validate_gimple_arglist (const_gimple call, ...)
11242 enum tree_code code;
11243 bool res = 0;
11244 va_list ap;
11245 const_tree arg;
11246 size_t i;
11248 va_start (ap, call);
11249 i = 0;
11253 code = (enum tree_code) va_arg (ap, int);
11254 switch (code)
11256 case 0:
11257 /* This signifies an ellipses, any further arguments are all ok. */
11258 res = true;
11259 goto end;
11260 case VOID_TYPE:
11261 /* This signifies an endlink, if no arguments remain, return
11262 true, otherwise return false. */
11263 res = (i == gimple_call_num_args (call));
11264 goto end;
11265 default:
11266 /* If no parameters remain or the parameter's code does not
11267 match the specified code, return false. Otherwise continue
11268 checking any remaining arguments. */
11269 arg = gimple_call_arg (call, i++);
11270 if (!validate_arg (arg, code))
11271 goto end;
11272 break;
11275 while (1);
11277 /* We need gotos here since we can only have one VA_CLOSE in a
11278 function. */
11279 end: ;
11280 va_end (ap);
11282 return res;
11285 /* This function validates the types of a function call argument list
11286 against a specified list of tree_codes. If the last specifier is a 0,
11287 that represents an ellipses, otherwise the last specifier must be a
11288 VOID_TYPE. */
11290 bool
11291 validate_arglist (const_tree callexpr, ...)
11293 enum tree_code code;
11294 bool res = 0;
11295 va_list ap;
11296 const_call_expr_arg_iterator iter;
11297 const_tree arg;
11299 va_start (ap, callexpr);
11300 init_const_call_expr_arg_iterator (callexpr, &iter);
11304 code = (enum tree_code) va_arg (ap, int);
11305 switch (code)
11307 case 0:
11308 /* This signifies an ellipses, any further arguments are all ok. */
11309 res = true;
11310 goto end;
11311 case VOID_TYPE:
11312 /* This signifies an endlink, if no arguments remain, return
11313 true, otherwise return false. */
11314 res = !more_const_call_expr_args_p (&iter);
11315 goto end;
11316 default:
11317 /* If no parameters remain or the parameter's code does not
11318 match the specified code, return false. Otherwise continue
11319 checking any remaining arguments. */
11320 arg = next_const_call_expr_arg (&iter);
11321 if (!validate_arg (arg, code))
11322 goto end;
11323 break;
11326 while (1);
11328 /* We need gotos here since we can only have one VA_CLOSE in a
11329 function. */
11330 end: ;
11331 va_end (ap);
11333 return res;
11336 /* Default target-specific builtin expander that does nothing. */
11339 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11340 rtx target ATTRIBUTE_UNUSED,
11341 rtx subtarget ATTRIBUTE_UNUSED,
11342 enum machine_mode mode ATTRIBUTE_UNUSED,
11343 int ignore ATTRIBUTE_UNUSED)
11345 return NULL_RTX;
11348 /* Returns true is EXP represents data that would potentially reside
11349 in a readonly section. */
11351 static bool
11352 readonly_data_expr (tree exp)
11354 STRIP_NOPS (exp);
11356 if (TREE_CODE (exp) != ADDR_EXPR)
11357 return false;
11359 exp = get_base_address (TREE_OPERAND (exp, 0));
11360 if (!exp)
11361 return false;
11363 /* Make sure we call decl_readonly_section only for trees it
11364 can handle (since it returns true for everything it doesn't
11365 understand). */
11366 if (TREE_CODE (exp) == STRING_CST
11367 || TREE_CODE (exp) == CONSTRUCTOR
11368 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11369 return decl_readonly_section (exp, 0);
11370 else
11371 return false;
11374 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11375 to the call, and TYPE is its return type.
11377 Return NULL_TREE if no simplification was possible, otherwise return the
11378 simplified form of the call as a tree.
11380 The simplified form may be a constant or other expression which
11381 computes the same value, but in a more efficient manner (including
11382 calls to other builtin functions).
11384 The call may contain arguments which need to be evaluated, but
11385 which are not useful to determine the result of the call. In
11386 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11387 COMPOUND_EXPR will be an argument which must be evaluated.
11388 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11389 COMPOUND_EXPR in the chain will contain the tree for the simplified
11390 form of the builtin function call. */
11392 static tree
11393 fold_builtin_strstr (tree s1, tree s2, tree type)
11395 if (!validate_arg (s1, POINTER_TYPE)
11396 || !validate_arg (s2, POINTER_TYPE))
11397 return NULL_TREE;
11398 else
11400 tree fn;
11401 const char *p1, *p2;
11403 p2 = c_getstr (s2);
11404 if (p2 == NULL)
11405 return NULL_TREE;
11407 p1 = c_getstr (s1);
11408 if (p1 != NULL)
11410 const char *r = strstr (p1, p2);
11411 tree tem;
11413 if (r == NULL)
11414 return build_int_cst (TREE_TYPE (s1), 0);
11416 /* Return an offset into the constant string argument. */
11417 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11418 s1, size_int (r - p1));
11419 return fold_convert (type, tem);
11422 /* The argument is const char *, and the result is char *, so we need
11423 a type conversion here to avoid a warning. */
11424 if (p2[0] == '\0')
11425 return fold_convert (type, s1);
11427 if (p2[1] != '\0')
11428 return NULL_TREE;
11430 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11431 if (!fn)
11432 return NULL_TREE;
11434 /* New argument list transforming strstr(s1, s2) to
11435 strchr(s1, s2[0]). */
11436 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11440 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11441 the call, and TYPE is its return type.
11443 Return NULL_TREE if no simplification was possible, otherwise return the
11444 simplified form of the call as a tree.
11446 The simplified form may be a constant or other expression which
11447 computes the same value, but in a more efficient manner (including
11448 calls to other builtin functions).
11450 The call may contain arguments which need to be evaluated, but
11451 which are not useful to determine the result of the call. In
11452 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11453 COMPOUND_EXPR will be an argument which must be evaluated.
11454 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11455 COMPOUND_EXPR in the chain will contain the tree for the simplified
11456 form of the builtin function call. */
11458 static tree
11459 fold_builtin_strchr (tree s1, tree s2, tree type)
11461 if (!validate_arg (s1, POINTER_TYPE)
11462 || !validate_arg (s2, INTEGER_TYPE))
11463 return NULL_TREE;
11464 else
11466 const char *p1;
11468 if (TREE_CODE (s2) != INTEGER_CST)
11469 return NULL_TREE;
11471 p1 = c_getstr (s1);
11472 if (p1 != NULL)
11474 char c;
11475 const char *r;
11476 tree tem;
11478 if (target_char_cast (s2, &c))
11479 return NULL_TREE;
11481 r = strchr (p1, c);
11483 if (r == NULL)
11484 return build_int_cst (TREE_TYPE (s1), 0);
11486 /* Return an offset into the constant string argument. */
11487 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11488 s1, size_int (r - p1));
11489 return fold_convert (type, tem);
11491 return NULL_TREE;
11495 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11496 the call, and TYPE is its return type.
11498 Return NULL_TREE if no simplification was possible, otherwise return the
11499 simplified form of the call as a tree.
11501 The simplified form may be a constant or other expression which
11502 computes the same value, but in a more efficient manner (including
11503 calls to other builtin functions).
11505 The call may contain arguments which need to be evaluated, but
11506 which are not useful to determine the result of the call. In
11507 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11508 COMPOUND_EXPR will be an argument which must be evaluated.
11509 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11510 COMPOUND_EXPR in the chain will contain the tree for the simplified
11511 form of the builtin function call. */
11513 static tree
11514 fold_builtin_strrchr (tree s1, tree s2, tree type)
11516 if (!validate_arg (s1, POINTER_TYPE)
11517 || !validate_arg (s2, INTEGER_TYPE))
11518 return NULL_TREE;
11519 else
11521 tree fn;
11522 const char *p1;
11524 if (TREE_CODE (s2) != INTEGER_CST)
11525 return NULL_TREE;
11527 p1 = c_getstr (s1);
11528 if (p1 != NULL)
11530 char c;
11531 const char *r;
11532 tree tem;
11534 if (target_char_cast (s2, &c))
11535 return NULL_TREE;
11537 r = strrchr (p1, c);
11539 if (r == NULL)
11540 return build_int_cst (TREE_TYPE (s1), 0);
11542 /* Return an offset into the constant string argument. */
11543 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11544 s1, size_int (r - p1));
11545 return fold_convert (type, tem);
11548 if (! integer_zerop (s2))
11549 return NULL_TREE;
11551 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11552 if (!fn)
11553 return NULL_TREE;
11555 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11556 return build_call_expr (fn, 2, s1, s2);
11560 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11561 to the call, and TYPE is its return type.
11563 Return NULL_TREE if no simplification was possible, otherwise return the
11564 simplified form of the call as a tree.
11566 The simplified form may be a constant or other expression which
11567 computes the same value, but in a more efficient manner (including
11568 calls to other builtin functions).
11570 The call may contain arguments which need to be evaluated, but
11571 which are not useful to determine the result of the call. In
11572 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11573 COMPOUND_EXPR will be an argument which must be evaluated.
11574 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11575 COMPOUND_EXPR in the chain will contain the tree for the simplified
11576 form of the builtin function call. */
11578 static tree
11579 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11581 if (!validate_arg (s1, POINTER_TYPE)
11582 || !validate_arg (s2, POINTER_TYPE))
11583 return NULL_TREE;
11584 else
11586 tree fn;
11587 const char *p1, *p2;
11589 p2 = c_getstr (s2);
11590 if (p2 == NULL)
11591 return NULL_TREE;
11593 p1 = c_getstr (s1);
11594 if (p1 != NULL)
11596 const char *r = strpbrk (p1, p2);
11597 tree tem;
11599 if (r == NULL)
11600 return build_int_cst (TREE_TYPE (s1), 0);
11602 /* Return an offset into the constant string argument. */
11603 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11604 s1, size_int (r - p1));
11605 return fold_convert (type, tem);
11608 if (p2[0] == '\0')
11609 /* strpbrk(x, "") == NULL.
11610 Evaluate and ignore s1 in case it had side-effects. */
11611 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11613 if (p2[1] != '\0')
11614 return NULL_TREE; /* Really call strpbrk. */
11616 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11617 if (!fn)
11618 return NULL_TREE;
11620 /* New argument list transforming strpbrk(s1, s2) to
11621 strchr(s1, s2[0]). */
11622 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11626 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11627 to the call.
11629 Return NULL_TREE if no simplification was possible, otherwise return the
11630 simplified form of the call as a tree.
11632 The simplified form may be a constant or other expression which
11633 computes the same value, but in a more efficient manner (including
11634 calls to other builtin functions).
11636 The call may contain arguments which need to be evaluated, but
11637 which are not useful to determine the result of the call. In
11638 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11639 COMPOUND_EXPR will be an argument which must be evaluated.
11640 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11641 COMPOUND_EXPR in the chain will contain the tree for the simplified
11642 form of the builtin function call. */
11644 static tree
11645 fold_builtin_strcat (tree dst, tree src)
11647 if (!validate_arg (dst, POINTER_TYPE)
11648 || !validate_arg (src, POINTER_TYPE))
11649 return NULL_TREE;
11650 else
11652 const char *p = c_getstr (src);
11654 /* If the string length is zero, return the dst parameter. */
11655 if (p && *p == '\0')
11656 return dst;
11658 return NULL_TREE;
11662 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11663 arguments to the call.
11665 Return NULL_TREE if no simplification was possible, otherwise return the
11666 simplified form of the call as a tree.
11668 The simplified form may be a constant or other expression which
11669 computes the same value, but in a more efficient manner (including
11670 calls to other builtin functions).
11672 The call may contain arguments which need to be evaluated, but
11673 which are not useful to determine the result of the call. In
11674 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11675 COMPOUND_EXPR will be an argument which must be evaluated.
11676 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11677 COMPOUND_EXPR in the chain will contain the tree for the simplified
11678 form of the builtin function call. */
11680 static tree
11681 fold_builtin_strncat (tree dst, tree src, tree len)
11683 if (!validate_arg (dst, POINTER_TYPE)
11684 || !validate_arg (src, POINTER_TYPE)
11685 || !validate_arg (len, INTEGER_TYPE))
11686 return NULL_TREE;
11687 else
11689 const char *p = c_getstr (src);
11691 /* If the requested length is zero, or the src parameter string
11692 length is zero, return the dst parameter. */
11693 if (integer_zerop (len) || (p && *p == '\0'))
11694 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11696 /* If the requested len is greater than or equal to the string
11697 length, call strcat. */
11698 if (TREE_CODE (len) == INTEGER_CST && p
11699 && compare_tree_int (len, strlen (p)) >= 0)
11701 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11703 /* If the replacement _DECL isn't initialized, don't do the
11704 transformation. */
11705 if (!fn)
11706 return NULL_TREE;
11708 return build_call_expr (fn, 2, dst, src);
11710 return NULL_TREE;
11714 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11715 to the call.
11717 Return NULL_TREE if no simplification was possible, otherwise return the
11718 simplified form of the call as a tree.
11720 The simplified form may be a constant or other expression which
11721 computes the same value, but in a more efficient manner (including
11722 calls to other builtin functions).
11724 The call may contain arguments which need to be evaluated, but
11725 which are not useful to determine the result of the call. In
11726 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11727 COMPOUND_EXPR will be an argument which must be evaluated.
11728 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11729 COMPOUND_EXPR in the chain will contain the tree for the simplified
11730 form of the builtin function call. */
11732 static tree
11733 fold_builtin_strspn (tree s1, tree s2)
11735 if (!validate_arg (s1, POINTER_TYPE)
11736 || !validate_arg (s2, POINTER_TYPE))
11737 return NULL_TREE;
11738 else
11740 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11742 /* If both arguments are constants, evaluate at compile-time. */
11743 if (p1 && p2)
11745 const size_t r = strspn (p1, p2);
11746 return size_int (r);
11749 /* If either argument is "", return NULL_TREE. */
11750 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11751 /* Evaluate and ignore both arguments in case either one has
11752 side-effects. */
11753 return omit_two_operands (size_type_node, size_zero_node,
11754 s1, s2);
11755 return NULL_TREE;
11759 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11760 to the call.
11762 Return NULL_TREE if no simplification was possible, otherwise return the
11763 simplified form of the call as a tree.
11765 The simplified form may be a constant or other expression which
11766 computes the same value, but in a more efficient manner (including
11767 calls to other builtin functions).
11769 The call may contain arguments which need to be evaluated, but
11770 which are not useful to determine the result of the call. In
11771 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11772 COMPOUND_EXPR will be an argument which must be evaluated.
11773 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11774 COMPOUND_EXPR in the chain will contain the tree for the simplified
11775 form of the builtin function call. */
11777 static tree
11778 fold_builtin_strcspn (tree s1, tree s2)
11780 if (!validate_arg (s1, POINTER_TYPE)
11781 || !validate_arg (s2, POINTER_TYPE))
11782 return NULL_TREE;
11783 else
11785 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11787 /* If both arguments are constants, evaluate at compile-time. */
11788 if (p1 && p2)
11790 const size_t r = strcspn (p1, p2);
11791 return size_int (r);
11794 /* If the first argument is "", return NULL_TREE. */
11795 if (p1 && *p1 == '\0')
11797 /* Evaluate and ignore argument s2 in case it has
11798 side-effects. */
11799 return omit_one_operand (size_type_node,
11800 size_zero_node, s2);
11803 /* If the second argument is "", return __builtin_strlen(s1). */
11804 if (p2 && *p2 == '\0')
11806 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11808 /* If the replacement _DECL isn't initialized, don't do the
11809 transformation. */
11810 if (!fn)
11811 return NULL_TREE;
11813 return build_call_expr (fn, 1, s1);
11815 return NULL_TREE;
11819 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11820 to the call. IGNORE is true if the value returned
11821 by the builtin will be ignored. UNLOCKED is true is true if this
11822 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11823 the known length of the string. Return NULL_TREE if no simplification
11824 was possible. */
11826 tree
11827 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11829 /* If we're using an unlocked function, assume the other unlocked
11830 functions exist explicitly. */
11831 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11832 : implicit_built_in_decls[BUILT_IN_FPUTC];
11833 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11834 : implicit_built_in_decls[BUILT_IN_FWRITE];
11836 /* If the return value is used, don't do the transformation. */
11837 if (!ignore)
11838 return NULL_TREE;
11840 /* Verify the arguments in the original call. */
11841 if (!validate_arg (arg0, POINTER_TYPE)
11842 || !validate_arg (arg1, POINTER_TYPE))
11843 return NULL_TREE;
11845 if (! len)
11846 len = c_strlen (arg0, 0);
11848 /* Get the length of the string passed to fputs. If the length
11849 can't be determined, punt. */
11850 if (!len
11851 || TREE_CODE (len) != INTEGER_CST)
11852 return NULL_TREE;
11854 switch (compare_tree_int (len, 1))
11856 case -1: /* length is 0, delete the call entirely . */
11857 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11859 case 0: /* length is 1, call fputc. */
11861 const char *p = c_getstr (arg0);
11863 if (p != NULL)
11865 if (fn_fputc)
11866 return build_call_expr (fn_fputc, 2,
11867 build_int_cst (NULL_TREE, p[0]), arg1);
11868 else
11869 return NULL_TREE;
11872 /* FALLTHROUGH */
11873 case 1: /* length is greater than 1, call fwrite. */
11875 /* If optimizing for size keep fputs. */
11876 if (optimize_function_for_size_p (cfun))
11877 return NULL_TREE;
11878 /* New argument list transforming fputs(string, stream) to
11879 fwrite(string, 1, len, stream). */
11880 if (fn_fwrite)
11881 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11882 else
11883 return NULL_TREE;
11885 default:
11886 gcc_unreachable ();
11888 return NULL_TREE;
11891 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11892 produced. False otherwise. This is done so that we don't output the error
11893 or warning twice or three times. */
11895 bool
11896 fold_builtin_next_arg (tree exp, bool va_start_p)
11898 tree fntype = TREE_TYPE (current_function_decl);
11899 int nargs = call_expr_nargs (exp);
11900 tree arg;
11902 if (TYPE_ARG_TYPES (fntype) == 0
11903 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11904 == void_type_node))
11906 error ("%<va_start%> used in function with fixed args");
11907 return true;
11910 if (va_start_p)
11912 if (va_start_p && (nargs != 2))
11914 error ("wrong number of arguments to function %<va_start%>");
11915 return true;
11917 arg = CALL_EXPR_ARG (exp, 1);
11919 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11920 when we checked the arguments and if needed issued a warning. */
11921 else
11923 if (nargs == 0)
11925 /* Evidently an out of date version of <stdarg.h>; can't validate
11926 va_start's second argument, but can still work as intended. */
11927 warning (0, "%<__builtin_next_arg%> called without an argument");
11928 return true;
11930 else if (nargs > 1)
11932 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11933 return true;
11935 arg = CALL_EXPR_ARG (exp, 0);
11938 if (TREE_CODE (arg) == SSA_NAME)
11939 arg = SSA_NAME_VAR (arg);
11941 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11942 or __builtin_next_arg (0) the first time we see it, after checking
11943 the arguments and if needed issuing a warning. */
11944 if (!integer_zerop (arg))
11946 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11948 /* Strip off all nops for the sake of the comparison. This
11949 is not quite the same as STRIP_NOPS. It does more.
11950 We must also strip off INDIRECT_EXPR for C++ reference
11951 parameters. */
11952 while (CONVERT_EXPR_P (arg)
11953 || TREE_CODE (arg) == INDIRECT_REF)
11954 arg = TREE_OPERAND (arg, 0);
11955 if (arg != last_parm)
11957 /* FIXME: Sometimes with the tree optimizers we can get the
11958 not the last argument even though the user used the last
11959 argument. We just warn and set the arg to be the last
11960 argument so that we will get wrong-code because of
11961 it. */
11962 warning (0, "second parameter of %<va_start%> not last named argument");
11965 /* Undefined by C99 7.15.1.4p4 (va_start):
11966 "If the parameter parmN is declared with the register storage
11967 class, with a function or array type, or with a type that is
11968 not compatible with the type that results after application of
11969 the default argument promotions, the behavior is undefined."
11971 else if (DECL_REGISTER (arg))
11972 warning (0, "undefined behaviour when second parameter of "
11973 "%<va_start%> is declared with %<register%> storage");
11975 /* We want to verify the second parameter just once before the tree
11976 optimizers are run and then avoid keeping it in the tree,
11977 as otherwise we could warn even for correct code like:
11978 void foo (int i, ...)
11979 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11980 if (va_start_p)
11981 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11982 else
11983 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11985 return false;
11989 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11990 ORIG may be null if this is a 2-argument call. We don't attempt to
11991 simplify calls with more than 3 arguments.
11993 Return NULL_TREE if no simplification was possible, otherwise return the
11994 simplified form of the call as a tree. If IGNORED is true, it means that
11995 the caller does not use the returned value of the function. */
11997 static tree
11998 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
12000 tree call, retval;
12001 const char *fmt_str = NULL;
12003 /* Verify the required arguments in the original call. We deal with two
12004 types of sprintf() calls: 'sprintf (str, fmt)' and
12005 'sprintf (dest, "%s", orig)'. */
12006 if (!validate_arg (dest, POINTER_TYPE)
12007 || !validate_arg (fmt, POINTER_TYPE))
12008 return NULL_TREE;
12009 if (orig && !validate_arg (orig, POINTER_TYPE))
12010 return NULL_TREE;
12012 /* Check whether the format is a literal string constant. */
12013 fmt_str = c_getstr (fmt);
12014 if (fmt_str == NULL)
12015 return NULL_TREE;
12017 call = NULL_TREE;
12018 retval = NULL_TREE;
12020 if (!init_target_chars ())
12021 return NULL_TREE;
12023 /* If the format doesn't contain % args or %%, use strcpy. */
12024 if (strchr (fmt_str, target_percent) == NULL)
12026 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12028 if (!fn)
12029 return NULL_TREE;
12031 /* Don't optimize sprintf (buf, "abc", ptr++). */
12032 if (orig)
12033 return NULL_TREE;
12035 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12036 'format' is known to contain no % formats. */
12037 call = build_call_expr (fn, 2, dest, fmt);
12038 if (!ignored)
12039 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12042 /* If the format is "%s", use strcpy if the result isn't used. */
12043 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12045 tree fn;
12046 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12048 if (!fn)
12049 return NULL_TREE;
12051 /* Don't crash on sprintf (str1, "%s"). */
12052 if (!orig)
12053 return NULL_TREE;
12055 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12056 if (!ignored)
12058 retval = c_strlen (orig, 1);
12059 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12060 return NULL_TREE;
12062 call = build_call_expr (fn, 2, dest, orig);
12065 if (call && retval)
12067 retval = fold_convert
12068 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12069 retval);
12070 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12072 else
12073 return call;
12076 /* Expand a call EXP to __builtin_object_size. */
12079 expand_builtin_object_size (tree exp)
12081 tree ost;
12082 int object_size_type;
12083 tree fndecl = get_callee_fndecl (exp);
12085 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12087 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12088 exp, fndecl);
12089 expand_builtin_trap ();
12090 return const0_rtx;
12093 ost = CALL_EXPR_ARG (exp, 1);
12094 STRIP_NOPS (ost);
12096 if (TREE_CODE (ost) != INTEGER_CST
12097 || tree_int_cst_sgn (ost) < 0
12098 || compare_tree_int (ost, 3) > 0)
12100 error ("%Klast argument of %D is not integer constant between 0 and 3",
12101 exp, fndecl);
12102 expand_builtin_trap ();
12103 return const0_rtx;
12106 object_size_type = tree_low_cst (ost, 0);
12108 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12111 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12112 FCODE is the BUILT_IN_* to use.
12113 Return NULL_RTX if we failed; the caller should emit a normal call,
12114 otherwise try to get the result in TARGET, if convenient (and in
12115 mode MODE if that's convenient). */
12117 static rtx
12118 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12119 enum built_in_function fcode)
12121 tree dest, src, len, size;
12123 if (!validate_arglist (exp,
12124 POINTER_TYPE,
12125 fcode == BUILT_IN_MEMSET_CHK
12126 ? INTEGER_TYPE : POINTER_TYPE,
12127 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12128 return NULL_RTX;
12130 dest = CALL_EXPR_ARG (exp, 0);
12131 src = CALL_EXPR_ARG (exp, 1);
12132 len = CALL_EXPR_ARG (exp, 2);
12133 size = CALL_EXPR_ARG (exp, 3);
12135 if (! host_integerp (size, 1))
12136 return NULL_RTX;
12138 if (host_integerp (len, 1) || integer_all_onesp (size))
12140 tree fn;
12142 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12144 warning_at (tree_nonartificial_location (exp),
12145 0, "%Kcall to %D will always overflow destination buffer",
12146 exp, get_callee_fndecl (exp));
12147 return NULL_RTX;
12150 fn = NULL_TREE;
12151 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12152 mem{cpy,pcpy,move,set} is available. */
12153 switch (fcode)
12155 case BUILT_IN_MEMCPY_CHK:
12156 fn = built_in_decls[BUILT_IN_MEMCPY];
12157 break;
12158 case BUILT_IN_MEMPCPY_CHK:
12159 fn = built_in_decls[BUILT_IN_MEMPCPY];
12160 break;
12161 case BUILT_IN_MEMMOVE_CHK:
12162 fn = built_in_decls[BUILT_IN_MEMMOVE];
12163 break;
12164 case BUILT_IN_MEMSET_CHK:
12165 fn = built_in_decls[BUILT_IN_MEMSET];
12166 break;
12167 default:
12168 break;
12171 if (! fn)
12172 return NULL_RTX;
12174 fn = build_call_expr (fn, 3, dest, src, len);
12175 STRIP_TYPE_NOPS (fn);
12176 while (TREE_CODE (fn) == COMPOUND_EXPR)
12178 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12179 EXPAND_NORMAL);
12180 fn = TREE_OPERAND (fn, 1);
12182 if (TREE_CODE (fn) == CALL_EXPR)
12183 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12184 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12186 else if (fcode == BUILT_IN_MEMSET_CHK)
12187 return NULL_RTX;
12188 else
12190 unsigned int dest_align
12191 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12193 /* If DEST is not a pointer type, call the normal function. */
12194 if (dest_align == 0)
12195 return NULL_RTX;
12197 /* If SRC and DEST are the same (and not volatile), do nothing. */
12198 if (operand_equal_p (src, dest, 0))
12200 tree expr;
12202 if (fcode != BUILT_IN_MEMPCPY_CHK)
12204 /* Evaluate and ignore LEN in case it has side-effects. */
12205 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12206 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12209 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12210 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12213 /* __memmove_chk special case. */
12214 if (fcode == BUILT_IN_MEMMOVE_CHK)
12216 unsigned int src_align
12217 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12219 if (src_align == 0)
12220 return NULL_RTX;
12222 /* If src is categorized for a readonly section we can use
12223 normal __memcpy_chk. */
12224 if (readonly_data_expr (src))
12226 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12227 if (!fn)
12228 return NULL_RTX;
12229 fn = build_call_expr (fn, 4, dest, src, len, size);
12230 STRIP_TYPE_NOPS (fn);
12231 while (TREE_CODE (fn) == COMPOUND_EXPR)
12233 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12234 EXPAND_NORMAL);
12235 fn = TREE_OPERAND (fn, 1);
12237 if (TREE_CODE (fn) == CALL_EXPR)
12238 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12239 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12242 return NULL_RTX;
12246 /* Emit warning if a buffer overflow is detected at compile time. */
12248 static void
12249 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12251 int is_strlen = 0;
12252 tree len, size;
12253 location_t loc = tree_nonartificial_location (exp);
12255 switch (fcode)
12257 case BUILT_IN_STRCPY_CHK:
12258 case BUILT_IN_STPCPY_CHK:
12259 /* For __strcat_chk the warning will be emitted only if overflowing
12260 by at least strlen (dest) + 1 bytes. */
12261 case BUILT_IN_STRCAT_CHK:
12262 len = CALL_EXPR_ARG (exp, 1);
12263 size = CALL_EXPR_ARG (exp, 2);
12264 is_strlen = 1;
12265 break;
12266 case BUILT_IN_STRNCAT_CHK:
12267 case BUILT_IN_STRNCPY_CHK:
12268 len = CALL_EXPR_ARG (exp, 2);
12269 size = CALL_EXPR_ARG (exp, 3);
12270 break;
12271 case BUILT_IN_SNPRINTF_CHK:
12272 case BUILT_IN_VSNPRINTF_CHK:
12273 len = CALL_EXPR_ARG (exp, 1);
12274 size = CALL_EXPR_ARG (exp, 3);
12275 break;
12276 default:
12277 gcc_unreachable ();
12280 if (!len || !size)
12281 return;
12283 if (! host_integerp (size, 1) || integer_all_onesp (size))
12284 return;
12286 if (is_strlen)
12288 len = c_strlen (len, 1);
12289 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12290 return;
12292 else if (fcode == BUILT_IN_STRNCAT_CHK)
12294 tree src = CALL_EXPR_ARG (exp, 1);
12295 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12296 return;
12297 src = c_strlen (src, 1);
12298 if (! src || ! host_integerp (src, 1))
12300 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12301 exp, get_callee_fndecl (exp));
12302 return;
12304 else if (tree_int_cst_lt (src, size))
12305 return;
12307 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12308 return;
12310 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12311 exp, get_callee_fndecl (exp));
12314 /* Emit warning if a buffer overflow is detected at compile time
12315 in __sprintf_chk/__vsprintf_chk calls. */
12317 static void
12318 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12320 tree dest, size, len, fmt, flag;
12321 const char *fmt_str;
12322 int nargs = call_expr_nargs (exp);
12324 /* Verify the required arguments in the original call. */
12326 if (nargs < 4)
12327 return;
12328 dest = CALL_EXPR_ARG (exp, 0);
12329 flag = CALL_EXPR_ARG (exp, 1);
12330 size = CALL_EXPR_ARG (exp, 2);
12331 fmt = CALL_EXPR_ARG (exp, 3);
12333 if (! host_integerp (size, 1) || integer_all_onesp (size))
12334 return;
12336 /* Check whether the format is a literal string constant. */
12337 fmt_str = c_getstr (fmt);
12338 if (fmt_str == NULL)
12339 return;
12341 if (!init_target_chars ())
12342 return;
12344 /* If the format doesn't contain % args or %%, we know its size. */
12345 if (strchr (fmt_str, target_percent) == 0)
12346 len = build_int_cstu (size_type_node, strlen (fmt_str));
12347 /* If the format is "%s" and first ... argument is a string literal,
12348 we know it too. */
12349 else if (fcode == BUILT_IN_SPRINTF_CHK
12350 && strcmp (fmt_str, target_percent_s) == 0)
12352 tree arg;
12354 if (nargs < 5)
12355 return;
12356 arg = CALL_EXPR_ARG (exp, 4);
12357 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12358 return;
12360 len = c_strlen (arg, 1);
12361 if (!len || ! host_integerp (len, 1))
12362 return;
12364 else
12365 return;
12367 if (! tree_int_cst_lt (len, size))
12368 warning_at (tree_nonartificial_location (exp),
12369 0, "%Kcall to %D will always overflow destination buffer",
12370 exp, get_callee_fndecl (exp));
12373 /* Emit warning if a free is called with address of a variable. */
12375 static void
12376 maybe_emit_free_warning (tree exp)
12378 tree arg = CALL_EXPR_ARG (exp, 0);
12380 STRIP_NOPS (arg);
12381 if (TREE_CODE (arg) != ADDR_EXPR)
12382 return;
12384 arg = get_base_address (TREE_OPERAND (arg, 0));
12385 if (arg == NULL || INDIRECT_REF_P (arg))
12386 return;
12388 if (SSA_VAR_P (arg))
12389 warning_at (tree_nonartificial_location (exp),
12390 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12391 else
12392 warning_at (tree_nonartificial_location (exp),
12393 0, "%Kattempt to free a non-heap object", exp);
12396 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12397 if possible. */
12399 tree
12400 fold_builtin_object_size (tree ptr, tree ost)
12402 tree ret = NULL_TREE;
12403 int object_size_type;
12405 if (!validate_arg (ptr, POINTER_TYPE)
12406 || !validate_arg (ost, INTEGER_TYPE))
12407 return NULL_TREE;
12409 STRIP_NOPS (ost);
12411 if (TREE_CODE (ost) != INTEGER_CST
12412 || tree_int_cst_sgn (ost) < 0
12413 || compare_tree_int (ost, 3) > 0)
12414 return NULL_TREE;
12416 object_size_type = tree_low_cst (ost, 0);
12418 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12419 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12420 and (size_t) 0 for types 2 and 3. */
12421 if (TREE_SIDE_EFFECTS (ptr))
12422 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12424 if (TREE_CODE (ptr) == ADDR_EXPR)
12425 ret = build_int_cstu (size_type_node,
12426 compute_builtin_object_size (ptr, object_size_type));
12428 else if (TREE_CODE (ptr) == SSA_NAME)
12430 unsigned HOST_WIDE_INT bytes;
12432 /* If object size is not known yet, delay folding until
12433 later. Maybe subsequent passes will help determining
12434 it. */
12435 bytes = compute_builtin_object_size (ptr, object_size_type);
12436 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12437 ? -1 : 0))
12438 ret = build_int_cstu (size_type_node, bytes);
12441 if (ret)
12443 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12444 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12445 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12446 ret = NULL_TREE;
12449 return ret;
12452 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12453 DEST, SRC, LEN, and SIZE are the arguments to the call.
12454 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12455 code of the builtin. If MAXLEN is not NULL, it is maximum length
12456 passed as third argument. */
12458 tree
12459 fold_builtin_memory_chk (tree fndecl,
12460 tree dest, tree src, tree len, tree size,
12461 tree maxlen, bool ignore,
12462 enum built_in_function fcode)
12464 tree fn;
12466 if (!validate_arg (dest, POINTER_TYPE)
12467 || !validate_arg (src,
12468 (fcode == BUILT_IN_MEMSET_CHK
12469 ? INTEGER_TYPE : POINTER_TYPE))
12470 || !validate_arg (len, INTEGER_TYPE)
12471 || !validate_arg (size, INTEGER_TYPE))
12472 return NULL_TREE;
12474 /* If SRC and DEST are the same (and not volatile), return DEST
12475 (resp. DEST+LEN for __mempcpy_chk). */
12476 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12478 if (fcode != BUILT_IN_MEMPCPY_CHK)
12479 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12480 else
12482 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12483 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12487 if (! host_integerp (size, 1))
12488 return NULL_TREE;
12490 if (! integer_all_onesp (size))
12492 if (! host_integerp (len, 1))
12494 /* If LEN is not constant, try MAXLEN too.
12495 For MAXLEN only allow optimizing into non-_ocs function
12496 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12497 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12499 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12501 /* (void) __mempcpy_chk () can be optimized into
12502 (void) __memcpy_chk (). */
12503 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12504 if (!fn)
12505 return NULL_TREE;
12507 return build_call_expr (fn, 4, dest, src, len, size);
12509 return NULL_TREE;
12512 else
12513 maxlen = len;
12515 if (tree_int_cst_lt (size, maxlen))
12516 return NULL_TREE;
12519 fn = NULL_TREE;
12520 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12521 mem{cpy,pcpy,move,set} is available. */
12522 switch (fcode)
12524 case BUILT_IN_MEMCPY_CHK:
12525 fn = built_in_decls[BUILT_IN_MEMCPY];
12526 break;
12527 case BUILT_IN_MEMPCPY_CHK:
12528 fn = built_in_decls[BUILT_IN_MEMPCPY];
12529 break;
12530 case BUILT_IN_MEMMOVE_CHK:
12531 fn = built_in_decls[BUILT_IN_MEMMOVE];
12532 break;
12533 case BUILT_IN_MEMSET_CHK:
12534 fn = built_in_decls[BUILT_IN_MEMSET];
12535 break;
12536 default:
12537 break;
12540 if (!fn)
12541 return NULL_TREE;
12543 return build_call_expr (fn, 3, dest, src, len);
12546 /* Fold a call to the __st[rp]cpy_chk builtin.
12547 DEST, SRC, and SIZE are the arguments to the call.
12548 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12549 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12550 strings passed as second argument. */
12552 tree
12553 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12554 tree maxlen, bool ignore,
12555 enum built_in_function fcode)
12557 tree len, fn;
12559 if (!validate_arg (dest, POINTER_TYPE)
12560 || !validate_arg (src, POINTER_TYPE)
12561 || !validate_arg (size, INTEGER_TYPE))
12562 return NULL_TREE;
12564 /* If SRC and DEST are the same (and not volatile), return DEST. */
12565 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12566 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12568 if (! host_integerp (size, 1))
12569 return NULL_TREE;
12571 if (! integer_all_onesp (size))
12573 len = c_strlen (src, 1);
12574 if (! len || ! host_integerp (len, 1))
12576 /* If LEN is not constant, try MAXLEN too.
12577 For MAXLEN only allow optimizing into non-_ocs function
12578 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12579 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12581 if (fcode == BUILT_IN_STPCPY_CHK)
12583 if (! ignore)
12584 return NULL_TREE;
12586 /* If return value of __stpcpy_chk is ignored,
12587 optimize into __strcpy_chk. */
12588 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12589 if (!fn)
12590 return NULL_TREE;
12592 return build_call_expr (fn, 3, dest, src, size);
12595 if (! len || TREE_SIDE_EFFECTS (len))
12596 return NULL_TREE;
12598 /* If c_strlen returned something, but not a constant,
12599 transform __strcpy_chk into __memcpy_chk. */
12600 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12601 if (!fn)
12602 return NULL_TREE;
12604 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12605 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12606 build_call_expr (fn, 4,
12607 dest, src, len, size));
12610 else
12611 maxlen = len;
12613 if (! tree_int_cst_lt (maxlen, size))
12614 return NULL_TREE;
12617 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12618 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12619 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12620 if (!fn)
12621 return NULL_TREE;
12623 return build_call_expr (fn, 2, dest, src);
12626 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12627 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12628 length passed as third argument. */
12630 tree
12631 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12632 tree maxlen)
12634 tree fn;
12636 if (!validate_arg (dest, POINTER_TYPE)
12637 || !validate_arg (src, POINTER_TYPE)
12638 || !validate_arg (len, INTEGER_TYPE)
12639 || !validate_arg (size, INTEGER_TYPE))
12640 return NULL_TREE;
12642 if (! host_integerp (size, 1))
12643 return NULL_TREE;
12645 if (! integer_all_onesp (size))
12647 if (! host_integerp (len, 1))
12649 /* If LEN is not constant, try MAXLEN too.
12650 For MAXLEN only allow optimizing into non-_ocs function
12651 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12652 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12653 return NULL_TREE;
12655 else
12656 maxlen = len;
12658 if (tree_int_cst_lt (size, maxlen))
12659 return NULL_TREE;
12662 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12663 fn = built_in_decls[BUILT_IN_STRNCPY];
12664 if (!fn)
12665 return NULL_TREE;
12667 return build_call_expr (fn, 3, dest, src, len);
12670 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12671 are the arguments to the call. */
12673 static tree
12674 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12676 tree fn;
12677 const char *p;
12679 if (!validate_arg (dest, POINTER_TYPE)
12680 || !validate_arg (src, POINTER_TYPE)
12681 || !validate_arg (size, INTEGER_TYPE))
12682 return NULL_TREE;
12684 p = c_getstr (src);
12685 /* If the SRC parameter is "", return DEST. */
12686 if (p && *p == '\0')
12687 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12689 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12690 return NULL_TREE;
12692 /* If __builtin_strcat_chk is used, assume strcat is available. */
12693 fn = built_in_decls[BUILT_IN_STRCAT];
12694 if (!fn)
12695 return NULL_TREE;
12697 return build_call_expr (fn, 2, dest, src);
12700 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12701 LEN, and SIZE. */
12703 static tree
12704 fold_builtin_strncat_chk (tree fndecl,
12705 tree dest, tree src, tree len, tree size)
12707 tree fn;
12708 const char *p;
12710 if (!validate_arg (dest, POINTER_TYPE)
12711 || !validate_arg (src, POINTER_TYPE)
12712 || !validate_arg (size, INTEGER_TYPE)
12713 || !validate_arg (size, INTEGER_TYPE))
12714 return NULL_TREE;
12716 p = c_getstr (src);
12717 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12718 if (p && *p == '\0')
12719 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12720 else if (integer_zerop (len))
12721 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12723 if (! host_integerp (size, 1))
12724 return NULL_TREE;
12726 if (! integer_all_onesp (size))
12728 tree src_len = c_strlen (src, 1);
12729 if (src_len
12730 && host_integerp (src_len, 1)
12731 && host_integerp (len, 1)
12732 && ! tree_int_cst_lt (len, src_len))
12734 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12735 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12736 if (!fn)
12737 return NULL_TREE;
12739 return build_call_expr (fn, 3, dest, src, size);
12741 return NULL_TREE;
12744 /* If __builtin_strncat_chk is used, assume strncat is available. */
12745 fn = built_in_decls[BUILT_IN_STRNCAT];
12746 if (!fn)
12747 return NULL_TREE;
12749 return build_call_expr (fn, 3, dest, src, len);
12752 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12753 a normal call should be emitted rather than expanding the function
12754 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12756 static tree
12757 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12759 tree dest, size, len, fn, fmt, flag;
12760 const char *fmt_str;
12761 int nargs = call_expr_nargs (exp);
12763 /* Verify the required arguments in the original call. */
12764 if (nargs < 4)
12765 return NULL_TREE;
12766 dest = CALL_EXPR_ARG (exp, 0);
12767 if (!validate_arg (dest, POINTER_TYPE))
12768 return NULL_TREE;
12769 flag = CALL_EXPR_ARG (exp, 1);
12770 if (!validate_arg (flag, INTEGER_TYPE))
12771 return NULL_TREE;
12772 size = CALL_EXPR_ARG (exp, 2);
12773 if (!validate_arg (size, INTEGER_TYPE))
12774 return NULL_TREE;
12775 fmt = CALL_EXPR_ARG (exp, 3);
12776 if (!validate_arg (fmt, POINTER_TYPE))
12777 return NULL_TREE;
12779 if (! host_integerp (size, 1))
12780 return NULL_TREE;
12782 len = NULL_TREE;
12784 if (!init_target_chars ())
12785 return NULL_TREE;
12787 /* Check whether the format is a literal string constant. */
12788 fmt_str = c_getstr (fmt);
12789 if (fmt_str != NULL)
12791 /* If the format doesn't contain % args or %%, we know the size. */
12792 if (strchr (fmt_str, target_percent) == 0)
12794 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12795 len = build_int_cstu (size_type_node, strlen (fmt_str));
12797 /* If the format is "%s" and first ... argument is a string literal,
12798 we know the size too. */
12799 else if (fcode == BUILT_IN_SPRINTF_CHK
12800 && strcmp (fmt_str, target_percent_s) == 0)
12802 tree arg;
12804 if (nargs == 5)
12806 arg = CALL_EXPR_ARG (exp, 4);
12807 if (validate_arg (arg, POINTER_TYPE))
12809 len = c_strlen (arg, 1);
12810 if (! len || ! host_integerp (len, 1))
12811 len = NULL_TREE;
12817 if (! integer_all_onesp (size))
12819 if (! len || ! tree_int_cst_lt (len, size))
12820 return NULL_TREE;
12823 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12824 or if format doesn't contain % chars or is "%s". */
12825 if (! integer_zerop (flag))
12827 if (fmt_str == NULL)
12828 return NULL_TREE;
12829 if (strchr (fmt_str, target_percent) != NULL
12830 && strcmp (fmt_str, target_percent_s))
12831 return NULL_TREE;
12834 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12835 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12836 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12837 if (!fn)
12838 return NULL_TREE;
12840 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12843 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12844 a normal call should be emitted rather than expanding the function
12845 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12846 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12847 passed as second argument. */
12849 tree
12850 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12851 enum built_in_function fcode)
12853 tree dest, size, len, fn, fmt, flag;
12854 const char *fmt_str;
12856 /* Verify the required arguments in the original call. */
12857 if (call_expr_nargs (exp) < 5)
12858 return NULL_TREE;
12859 dest = CALL_EXPR_ARG (exp, 0);
12860 if (!validate_arg (dest, POINTER_TYPE))
12861 return NULL_TREE;
12862 len = CALL_EXPR_ARG (exp, 1);
12863 if (!validate_arg (len, INTEGER_TYPE))
12864 return NULL_TREE;
12865 flag = CALL_EXPR_ARG (exp, 2);
12866 if (!validate_arg (flag, INTEGER_TYPE))
12867 return NULL_TREE;
12868 size = CALL_EXPR_ARG (exp, 3);
12869 if (!validate_arg (size, INTEGER_TYPE))
12870 return NULL_TREE;
12871 fmt = CALL_EXPR_ARG (exp, 4);
12872 if (!validate_arg (fmt, POINTER_TYPE))
12873 return NULL_TREE;
12875 if (! host_integerp (size, 1))
12876 return NULL_TREE;
12878 if (! integer_all_onesp (size))
12880 if (! host_integerp (len, 1))
12882 /* If LEN is not constant, try MAXLEN too.
12883 For MAXLEN only allow optimizing into non-_ocs function
12884 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12885 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12886 return NULL_TREE;
12888 else
12889 maxlen = len;
12891 if (tree_int_cst_lt (size, maxlen))
12892 return NULL_TREE;
12895 if (!init_target_chars ())
12896 return NULL_TREE;
12898 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12899 or if format doesn't contain % chars or is "%s". */
12900 if (! integer_zerop (flag))
12902 fmt_str = c_getstr (fmt);
12903 if (fmt_str == NULL)
12904 return NULL_TREE;
12905 if (strchr (fmt_str, target_percent) != NULL
12906 && strcmp (fmt_str, target_percent_s))
12907 return NULL_TREE;
12910 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12911 available. */
12912 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12913 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12914 if (!fn)
12915 return NULL_TREE;
12917 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12920 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12921 FMT and ARG are the arguments to the call; we don't fold cases with
12922 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12924 Return NULL_TREE if no simplification was possible, otherwise return the
12925 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12926 code of the function to be simplified. */
12928 static tree
12929 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12930 enum built_in_function fcode)
12932 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12933 const char *fmt_str = NULL;
12935 /* If the return value is used, don't do the transformation. */
12936 if (! ignore)
12937 return NULL_TREE;
12939 /* Verify the required arguments in the original call. */
12940 if (!validate_arg (fmt, POINTER_TYPE))
12941 return NULL_TREE;
12943 /* Check whether the format is a literal string constant. */
12944 fmt_str = c_getstr (fmt);
12945 if (fmt_str == NULL)
12946 return NULL_TREE;
12948 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12950 /* If we're using an unlocked function, assume the other
12951 unlocked functions exist explicitly. */
12952 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12953 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12955 else
12957 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12958 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12961 if (!init_target_chars ())
12962 return NULL_TREE;
12964 if (strcmp (fmt_str, target_percent_s) == 0
12965 || strchr (fmt_str, target_percent) == NULL)
12967 const char *str;
12969 if (strcmp (fmt_str, target_percent_s) == 0)
12971 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12972 return NULL_TREE;
12974 if (!arg || !validate_arg (arg, POINTER_TYPE))
12975 return NULL_TREE;
12977 str = c_getstr (arg);
12978 if (str == NULL)
12979 return NULL_TREE;
12981 else
12983 /* The format specifier doesn't contain any '%' characters. */
12984 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12985 && arg)
12986 return NULL_TREE;
12987 str = fmt_str;
12990 /* If the string was "", printf does nothing. */
12991 if (str[0] == '\0')
12992 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12994 /* If the string has length of 1, call putchar. */
12995 if (str[1] == '\0')
12997 /* Given printf("c"), (where c is any one character,)
12998 convert "c"[0] to an int and pass that to the replacement
12999 function. */
13000 newarg = build_int_cst (NULL_TREE, str[0]);
13001 if (fn_putchar)
13002 call = build_call_expr (fn_putchar, 1, newarg);
13004 else
13006 /* If the string was "string\n", call puts("string"). */
13007 size_t len = strlen (str);
13008 if ((unsigned char)str[len - 1] == target_newline)
13010 /* Create a NUL-terminated string that's one char shorter
13011 than the original, stripping off the trailing '\n'. */
13012 char *newstr = XALLOCAVEC (char, len);
13013 memcpy (newstr, str, len - 1);
13014 newstr[len - 1] = 0;
13016 newarg = build_string_literal (len, newstr);
13017 if (fn_puts)
13018 call = build_call_expr (fn_puts, 1, newarg);
13020 else
13021 /* We'd like to arrange to call fputs(string,stdout) here,
13022 but we need stdout and don't have a way to get it yet. */
13023 return NULL_TREE;
13027 /* The other optimizations can be done only on the non-va_list variants. */
13028 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13029 return NULL_TREE;
13031 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13032 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13034 if (!arg || !validate_arg (arg, POINTER_TYPE))
13035 return NULL_TREE;
13036 if (fn_puts)
13037 call = build_call_expr (fn_puts, 1, arg);
13040 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13041 else if (strcmp (fmt_str, target_percent_c) == 0)
13043 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13044 return NULL_TREE;
13045 if (fn_putchar)
13046 call = build_call_expr (fn_putchar, 1, arg);
13049 if (!call)
13050 return NULL_TREE;
13052 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13055 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13056 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13057 more than 3 arguments, and ARG may be null in the 2-argument case.
13059 Return NULL_TREE if no simplification was possible, otherwise return the
13060 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13061 code of the function to be simplified. */
13063 static tree
13064 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
13065 enum built_in_function fcode)
13067 tree fn_fputc, fn_fputs, call = NULL_TREE;
13068 const char *fmt_str = NULL;
13070 /* If the return value is used, don't do the transformation. */
13071 if (! ignore)
13072 return NULL_TREE;
13074 /* Verify the required arguments in the original call. */
13075 if (!validate_arg (fp, POINTER_TYPE))
13076 return NULL_TREE;
13077 if (!validate_arg (fmt, POINTER_TYPE))
13078 return NULL_TREE;
13080 /* Check whether the format is a literal string constant. */
13081 fmt_str = c_getstr (fmt);
13082 if (fmt_str == NULL)
13083 return NULL_TREE;
13085 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13087 /* If we're using an unlocked function, assume the other
13088 unlocked functions exist explicitly. */
13089 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13090 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13092 else
13094 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13095 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13098 if (!init_target_chars ())
13099 return NULL_TREE;
13101 /* If the format doesn't contain % args or %%, use strcpy. */
13102 if (strchr (fmt_str, target_percent) == NULL)
13104 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13105 && arg)
13106 return NULL_TREE;
13108 /* If the format specifier was "", fprintf does nothing. */
13109 if (fmt_str[0] == '\0')
13111 /* If FP has side-effects, just wait until gimplification is
13112 done. */
13113 if (TREE_SIDE_EFFECTS (fp))
13114 return NULL_TREE;
13116 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13119 /* When "string" doesn't contain %, replace all cases of
13120 fprintf (fp, string) with fputs (string, fp). The fputs
13121 builtin will take care of special cases like length == 1. */
13122 if (fn_fputs)
13123 call = build_call_expr (fn_fputs, 2, fmt, fp);
13126 /* The other optimizations can be done only on the non-va_list variants. */
13127 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13128 return NULL_TREE;
13130 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13131 else if (strcmp (fmt_str, target_percent_s) == 0)
13133 if (!arg || !validate_arg (arg, POINTER_TYPE))
13134 return NULL_TREE;
13135 if (fn_fputs)
13136 call = build_call_expr (fn_fputs, 2, arg, fp);
13139 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13140 else if (strcmp (fmt_str, target_percent_c) == 0)
13142 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13143 return NULL_TREE;
13144 if (fn_fputc)
13145 call = build_call_expr (fn_fputc, 2, arg, fp);
13148 if (!call)
13149 return NULL_TREE;
13150 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13153 /* Initialize format string characters in the target charset. */
13155 static bool
13156 init_target_chars (void)
13158 static bool init;
13159 if (!init)
13161 target_newline = lang_hooks.to_target_charset ('\n');
13162 target_percent = lang_hooks.to_target_charset ('%');
13163 target_c = lang_hooks.to_target_charset ('c');
13164 target_s = lang_hooks.to_target_charset ('s');
13165 if (target_newline == 0 || target_percent == 0 || target_c == 0
13166 || target_s == 0)
13167 return false;
13169 target_percent_c[0] = target_percent;
13170 target_percent_c[1] = target_c;
13171 target_percent_c[2] = '\0';
13173 target_percent_s[0] = target_percent;
13174 target_percent_s[1] = target_s;
13175 target_percent_s[2] = '\0';
13177 target_percent_s_newline[0] = target_percent;
13178 target_percent_s_newline[1] = target_s;
13179 target_percent_s_newline[2] = target_newline;
13180 target_percent_s_newline[3] = '\0';
13182 init = true;
13184 return true;
13187 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13188 and no overflow/underflow occurred. INEXACT is true if M was not
13189 exactly calculated. TYPE is the tree type for the result. This
13190 function assumes that you cleared the MPFR flags and then
13191 calculated M to see if anything subsequently set a flag prior to
13192 entering this function. Return NULL_TREE if any checks fail. */
13194 static tree
13195 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13197 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13198 overflow/underflow occurred. If -frounding-math, proceed iff the
13199 result of calling FUNC was exact. */
13200 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13201 && (!flag_rounding_math || !inexact))
13203 REAL_VALUE_TYPE rr;
13205 real_from_mpfr (&rr, m, type, GMP_RNDN);
13206 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13207 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13208 but the mpft_t is not, then we underflowed in the
13209 conversion. */
13210 if (real_isfinite (&rr)
13211 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13213 REAL_VALUE_TYPE rmode;
13215 real_convert (&rmode, TYPE_MODE (type), &rr);
13216 /* Proceed iff the specified mode can hold the value. */
13217 if (real_identical (&rmode, &rr))
13218 return build_real (type, rmode);
13221 return NULL_TREE;
13224 #ifdef HAVE_mpc
13225 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13226 number and no overflow/underflow occurred. INEXACT is true if M
13227 was not exactly calculated. TYPE is the tree type for the result.
13228 This function assumes that you cleared the MPFR flags and then
13229 calculated M to see if anything subsequently set a flag prior to
13230 entering this function. Return NULL_TREE if any checks fail. */
13232 static tree
13233 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13235 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13236 overflow/underflow occurred. If -frounding-math, proceed iff the
13237 result of calling FUNC was exact. */
13238 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13239 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13240 && (!flag_rounding_math || !inexact))
13242 REAL_VALUE_TYPE re, im;
13244 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13245 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13246 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13247 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13248 but the mpft_t is not, then we underflowed in the
13249 conversion. */
13250 if (real_isfinite (&re) && real_isfinite (&im)
13251 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13252 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13254 REAL_VALUE_TYPE re_mode, im_mode;
13256 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13257 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13258 /* Proceed iff the specified mode can hold the value. */
13259 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13260 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13261 build_real (TREE_TYPE (type), im_mode));
13264 return NULL_TREE;
13266 #endif /* HAVE_mpc */
13268 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13269 FUNC on it and return the resulting value as a tree with type TYPE.
13270 If MIN and/or MAX are not NULL, then the supplied ARG must be
13271 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13272 acceptable values, otherwise they are not. The mpfr precision is
13273 set to the precision of TYPE. We assume that function FUNC returns
13274 zero if the result could be calculated exactly within the requested
13275 precision. */
13277 static tree
13278 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13279 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13280 bool inclusive)
13282 tree result = NULL_TREE;
13284 STRIP_NOPS (arg);
13286 /* To proceed, MPFR must exactly represent the target floating point
13287 format, which only happens when the target base equals two. */
13288 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13289 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13291 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13293 if (real_isfinite (ra)
13294 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13295 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13297 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13298 const int prec = fmt->p;
13299 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13300 int inexact;
13301 mpfr_t m;
13303 mpfr_init2 (m, prec);
13304 mpfr_from_real (m, ra, GMP_RNDN);
13305 mpfr_clear_flags ();
13306 inexact = func (m, m, rnd);
13307 result = do_mpfr_ckconv (m, type, inexact);
13308 mpfr_clear (m);
13312 return result;
13315 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13316 FUNC on it and return the resulting value as a tree with type TYPE.
13317 The mpfr precision is set to the precision of TYPE. We assume that
13318 function FUNC returns zero if the result could be calculated
13319 exactly within the requested precision. */
13321 static tree
13322 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13323 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13325 tree result = NULL_TREE;
13327 STRIP_NOPS (arg1);
13328 STRIP_NOPS (arg2);
13330 /* To proceed, MPFR must exactly represent the target floating point
13331 format, which only happens when the target base equals two. */
13332 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13333 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13334 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13336 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13337 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13339 if (real_isfinite (ra1) && real_isfinite (ra2))
13341 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13342 const int prec = fmt->p;
13343 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13344 int inexact;
13345 mpfr_t m1, m2;
13347 mpfr_inits2 (prec, m1, m2, NULL);
13348 mpfr_from_real (m1, ra1, GMP_RNDN);
13349 mpfr_from_real (m2, ra2, GMP_RNDN);
13350 mpfr_clear_flags ();
13351 inexact = func (m1, m1, m2, rnd);
13352 result = do_mpfr_ckconv (m1, type, inexact);
13353 mpfr_clears (m1, m2, NULL);
13357 return result;
13360 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13361 FUNC on it and return the resulting value as a tree with type TYPE.
13362 The mpfr precision is set to the precision of TYPE. We assume that
13363 function FUNC returns zero if the result could be calculated
13364 exactly within the requested precision. */
13366 static tree
13367 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13368 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13370 tree result = NULL_TREE;
13372 STRIP_NOPS (arg1);
13373 STRIP_NOPS (arg2);
13374 STRIP_NOPS (arg3);
13376 /* To proceed, MPFR must exactly represent the target floating point
13377 format, which only happens when the target base equals two. */
13378 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13379 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13380 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13381 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13383 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13384 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13385 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13387 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13389 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13390 const int prec = fmt->p;
13391 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13392 int inexact;
13393 mpfr_t m1, m2, m3;
13395 mpfr_inits2 (prec, m1, m2, m3, NULL);
13396 mpfr_from_real (m1, ra1, GMP_RNDN);
13397 mpfr_from_real (m2, ra2, GMP_RNDN);
13398 mpfr_from_real (m3, ra3, GMP_RNDN);
13399 mpfr_clear_flags ();
13400 inexact = func (m1, m1, m2, m3, rnd);
13401 result = do_mpfr_ckconv (m1, type, inexact);
13402 mpfr_clears (m1, m2, m3, NULL);
13406 return result;
13409 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13410 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13411 If ARG_SINP and ARG_COSP are NULL then the result is returned
13412 as a complex value.
13413 The type is taken from the type of ARG and is used for setting the
13414 precision of the calculation and results. */
13416 static tree
13417 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13419 tree const type = TREE_TYPE (arg);
13420 tree result = NULL_TREE;
13422 STRIP_NOPS (arg);
13424 /* To proceed, MPFR must exactly represent the target floating point
13425 format, which only happens when the target base equals two. */
13426 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13427 && TREE_CODE (arg) == REAL_CST
13428 && !TREE_OVERFLOW (arg))
13430 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13432 if (real_isfinite (ra))
13434 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13435 const int prec = fmt->p;
13436 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13437 tree result_s, result_c;
13438 int inexact;
13439 mpfr_t m, ms, mc;
13441 mpfr_inits2 (prec, m, ms, mc, NULL);
13442 mpfr_from_real (m, ra, GMP_RNDN);
13443 mpfr_clear_flags ();
13444 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13445 result_s = do_mpfr_ckconv (ms, type, inexact);
13446 result_c = do_mpfr_ckconv (mc, type, inexact);
13447 mpfr_clears (m, ms, mc, NULL);
13448 if (result_s && result_c)
13450 /* If we are to return in a complex value do so. */
13451 if (!arg_sinp && !arg_cosp)
13452 return build_complex (build_complex_type (type),
13453 result_c, result_s);
13455 /* Dereference the sin/cos pointer arguments. */
13456 arg_sinp = build_fold_indirect_ref (arg_sinp);
13457 arg_cosp = build_fold_indirect_ref (arg_cosp);
13458 /* Proceed if valid pointer type were passed in. */
13459 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13460 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13462 /* Set the values. */
13463 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13464 result_s);
13465 TREE_SIDE_EFFECTS (result_s) = 1;
13466 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13467 result_c);
13468 TREE_SIDE_EFFECTS (result_c) = 1;
13469 /* Combine the assignments into a compound expr. */
13470 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13471 result_s, result_c));
13476 return result;
13479 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13480 two-argument mpfr order N Bessel function FUNC on them and return
13481 the resulting value as a tree with type TYPE. The mpfr precision
13482 is set to the precision of TYPE. We assume that function FUNC
13483 returns zero if the result could be calculated exactly within the
13484 requested precision. */
13485 static tree
13486 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13487 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13488 const REAL_VALUE_TYPE *min, bool inclusive)
13490 tree result = NULL_TREE;
13492 STRIP_NOPS (arg1);
13493 STRIP_NOPS (arg2);
13495 /* To proceed, MPFR must exactly represent the target floating point
13496 format, which only happens when the target base equals two. */
13497 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13498 && host_integerp (arg1, 0)
13499 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13501 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13502 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13504 if (n == (long)n
13505 && real_isfinite (ra)
13506 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13508 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13509 const int prec = fmt->p;
13510 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13511 int inexact;
13512 mpfr_t m;
13514 mpfr_init2 (m, prec);
13515 mpfr_from_real (m, ra, GMP_RNDN);
13516 mpfr_clear_flags ();
13517 inexact = func (m, n, m, rnd);
13518 result = do_mpfr_ckconv (m, type, inexact);
13519 mpfr_clear (m);
13523 return result;
13526 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13527 the pointer *(ARG_QUO) and return the result. The type is taken
13528 from the type of ARG0 and is used for setting the precision of the
13529 calculation and results. */
13531 static tree
13532 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13534 tree const type = TREE_TYPE (arg0);
13535 tree result = NULL_TREE;
13537 STRIP_NOPS (arg0);
13538 STRIP_NOPS (arg1);
13540 /* To proceed, MPFR must exactly represent the target floating point
13541 format, which only happens when the target base equals two. */
13542 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13543 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13544 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13546 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13547 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13549 if (real_isfinite (ra0) && real_isfinite (ra1))
13551 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13552 const int prec = fmt->p;
13553 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13554 tree result_rem;
13555 long integer_quo;
13556 mpfr_t m0, m1;
13558 mpfr_inits2 (prec, m0, m1, NULL);
13559 mpfr_from_real (m0, ra0, GMP_RNDN);
13560 mpfr_from_real (m1, ra1, GMP_RNDN);
13561 mpfr_clear_flags ();
13562 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13563 /* Remquo is independent of the rounding mode, so pass
13564 inexact=0 to do_mpfr_ckconv(). */
13565 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13566 mpfr_clears (m0, m1, NULL);
13567 if (result_rem)
13569 /* MPFR calculates quo in the host's long so it may
13570 return more bits in quo than the target int can hold
13571 if sizeof(host long) > sizeof(target int). This can
13572 happen even for native compilers in LP64 mode. In
13573 these cases, modulo the quo value with the largest
13574 number that the target int can hold while leaving one
13575 bit for the sign. */
13576 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13577 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13579 /* Dereference the quo pointer argument. */
13580 arg_quo = build_fold_indirect_ref (arg_quo);
13581 /* Proceed iff a valid pointer type was passed in. */
13582 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13584 /* Set the value. */
13585 tree result_quo = fold_build2 (MODIFY_EXPR,
13586 TREE_TYPE (arg_quo), arg_quo,
13587 build_int_cst (NULL, integer_quo));
13588 TREE_SIDE_EFFECTS (result_quo) = 1;
13589 /* Combine the quo assignment with the rem. */
13590 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13591 result_quo, result_rem));
13596 return result;
13599 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13600 resulting value as a tree with type TYPE. The mpfr precision is
13601 set to the precision of TYPE. We assume that this mpfr function
13602 returns zero if the result could be calculated exactly within the
13603 requested precision. In addition, the integer pointer represented
13604 by ARG_SG will be dereferenced and set to the appropriate signgam
13605 (-1,1) value. */
13607 static tree
13608 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13610 tree result = NULL_TREE;
13612 STRIP_NOPS (arg);
13614 /* To proceed, MPFR must exactly represent the target floating point
13615 format, which only happens when the target base equals two. Also
13616 verify ARG is a constant and that ARG_SG is an int pointer. */
13617 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13618 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13619 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13620 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13622 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13624 /* In addition to NaN and Inf, the argument cannot be zero or a
13625 negative integer. */
13626 if (real_isfinite (ra)
13627 && ra->cl != rvc_zero
13628 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13630 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13631 const int prec = fmt->p;
13632 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13633 int inexact, sg;
13634 mpfr_t m;
13635 tree result_lg;
13637 mpfr_init2 (m, prec);
13638 mpfr_from_real (m, ra, GMP_RNDN);
13639 mpfr_clear_flags ();
13640 inexact = mpfr_lgamma (m, &sg, m, rnd);
13641 result_lg = do_mpfr_ckconv (m, type, inexact);
13642 mpfr_clear (m);
13643 if (result_lg)
13645 tree result_sg;
13647 /* Dereference the arg_sg pointer argument. */
13648 arg_sg = build_fold_indirect_ref (arg_sg);
13649 /* Assign the signgam value into *arg_sg. */
13650 result_sg = fold_build2 (MODIFY_EXPR,
13651 TREE_TYPE (arg_sg), arg_sg,
13652 build_int_cst (NULL, sg));
13653 TREE_SIDE_EFFECTS (result_sg) = 1;
13654 /* Combine the signgam assignment with the lgamma result. */
13655 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13656 result_sg, result_lg));
13661 return result;
13664 #ifdef HAVE_mpc
13665 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13666 function FUNC on it and return the resulting value as a tree with
13667 type TYPE. The mpfr precision is set to the precision of TYPE. We
13668 assume that function FUNC returns zero if the result could be
13669 calculated exactly within the requested precision. */
13671 static tree
13672 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13674 tree result = NULL_TREE;
13676 STRIP_NOPS (arg);
13678 /* To proceed, MPFR must exactly represent the target floating point
13679 format, which only happens when the target base equals two. */
13680 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13682 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13684 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13685 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13687 if (real_isfinite (re) && real_isfinite (im))
13689 const struct real_format *const fmt =
13690 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13691 const int prec = fmt->p;
13692 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13693 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13694 int inexact;
13695 mpc_t m;
13697 mpc_init2 (m, prec);
13698 mpfr_from_real (mpc_realref(m), re, rnd);
13699 mpfr_from_real (mpc_imagref(m), im, rnd);
13700 mpfr_clear_flags ();
13701 inexact = func (m, m, crnd);
13702 result = do_mpc_ckconv (m, type, inexact);
13703 mpc_clear (m);
13707 return result;
13709 #endif /* HAVE_mpc */
13711 /* FIXME tuples.
13712 The functions below provide an alternate interface for folding
13713 builtin function calls presented as GIMPLE_CALL statements rather
13714 than as CALL_EXPRs. The folded result is still expressed as a
13715 tree. There is too much code duplication in the handling of
13716 varargs functions, and a more intrusive re-factoring would permit
13717 better sharing of code between the tree and statement-based
13718 versions of these functions. */
13720 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13721 along with N new arguments specified as the "..." parameters. SKIP
13722 is the number of arguments in STMT to be omitted. This function is used
13723 to do varargs-to-varargs transformations. */
13725 static tree
13726 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13728 int oldnargs = gimple_call_num_args (stmt);
13729 int nargs = oldnargs - skip + n;
13730 tree fntype = TREE_TYPE (fndecl);
13731 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13732 tree *buffer;
13733 int i, j;
13734 va_list ap;
13736 buffer = XALLOCAVEC (tree, nargs);
13737 va_start (ap, n);
13738 for (i = 0; i < n; i++)
13739 buffer[i] = va_arg (ap, tree);
13740 va_end (ap);
13741 for (j = skip; j < oldnargs; j++, i++)
13742 buffer[i] = gimple_call_arg (stmt, j);
13744 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13747 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13748 a normal call should be emitted rather than expanding the function
13749 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13751 static tree
13752 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13754 tree dest, size, len, fn, fmt, flag;
13755 const char *fmt_str;
13756 int nargs = gimple_call_num_args (stmt);
13758 /* Verify the required arguments in the original call. */
13759 if (nargs < 4)
13760 return NULL_TREE;
13761 dest = gimple_call_arg (stmt, 0);
13762 if (!validate_arg (dest, POINTER_TYPE))
13763 return NULL_TREE;
13764 flag = gimple_call_arg (stmt, 1);
13765 if (!validate_arg (flag, INTEGER_TYPE))
13766 return NULL_TREE;
13767 size = gimple_call_arg (stmt, 2);
13768 if (!validate_arg (size, INTEGER_TYPE))
13769 return NULL_TREE;
13770 fmt = gimple_call_arg (stmt, 3);
13771 if (!validate_arg (fmt, POINTER_TYPE))
13772 return NULL_TREE;
13774 if (! host_integerp (size, 1))
13775 return NULL_TREE;
13777 len = NULL_TREE;
13779 if (!init_target_chars ())
13780 return NULL_TREE;
13782 /* Check whether the format is a literal string constant. */
13783 fmt_str = c_getstr (fmt);
13784 if (fmt_str != NULL)
13786 /* If the format doesn't contain % args or %%, we know the size. */
13787 if (strchr (fmt_str, target_percent) == 0)
13789 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13790 len = build_int_cstu (size_type_node, strlen (fmt_str));
13792 /* If the format is "%s" and first ... argument is a string literal,
13793 we know the size too. */
13794 else if (fcode == BUILT_IN_SPRINTF_CHK
13795 && strcmp (fmt_str, target_percent_s) == 0)
13797 tree arg;
13799 if (nargs == 5)
13801 arg = gimple_call_arg (stmt, 4);
13802 if (validate_arg (arg, POINTER_TYPE))
13804 len = c_strlen (arg, 1);
13805 if (! len || ! host_integerp (len, 1))
13806 len = NULL_TREE;
13812 if (! integer_all_onesp (size))
13814 if (! len || ! tree_int_cst_lt (len, size))
13815 return NULL_TREE;
13818 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13819 or if format doesn't contain % chars or is "%s". */
13820 if (! integer_zerop (flag))
13822 if (fmt_str == NULL)
13823 return NULL_TREE;
13824 if (strchr (fmt_str, target_percent) != NULL
13825 && strcmp (fmt_str, target_percent_s))
13826 return NULL_TREE;
13829 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13830 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13831 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13832 if (!fn)
13833 return NULL_TREE;
13835 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13838 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13839 a normal call should be emitted rather than expanding the function
13840 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13841 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13842 passed as second argument. */
13844 tree
13845 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13846 enum built_in_function fcode)
13848 tree dest, size, len, fn, fmt, flag;
13849 const char *fmt_str;
13851 /* Verify the required arguments in the original call. */
13852 if (gimple_call_num_args (stmt) < 5)
13853 return NULL_TREE;
13854 dest = gimple_call_arg (stmt, 0);
13855 if (!validate_arg (dest, POINTER_TYPE))
13856 return NULL_TREE;
13857 len = gimple_call_arg (stmt, 1);
13858 if (!validate_arg (len, INTEGER_TYPE))
13859 return NULL_TREE;
13860 flag = gimple_call_arg (stmt, 2);
13861 if (!validate_arg (flag, INTEGER_TYPE))
13862 return NULL_TREE;
13863 size = gimple_call_arg (stmt, 3);
13864 if (!validate_arg (size, INTEGER_TYPE))
13865 return NULL_TREE;
13866 fmt = gimple_call_arg (stmt, 4);
13867 if (!validate_arg (fmt, POINTER_TYPE))
13868 return NULL_TREE;
13870 if (! host_integerp (size, 1))
13871 return NULL_TREE;
13873 if (! integer_all_onesp (size))
13875 if (! host_integerp (len, 1))
13877 /* If LEN is not constant, try MAXLEN too.
13878 For MAXLEN only allow optimizing into non-_ocs function
13879 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13880 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13881 return NULL_TREE;
13883 else
13884 maxlen = len;
13886 if (tree_int_cst_lt (size, maxlen))
13887 return NULL_TREE;
13890 if (!init_target_chars ())
13891 return NULL_TREE;
13893 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13894 or if format doesn't contain % chars or is "%s". */
13895 if (! integer_zerop (flag))
13897 fmt_str = c_getstr (fmt);
13898 if (fmt_str == NULL)
13899 return NULL_TREE;
13900 if (strchr (fmt_str, target_percent) != NULL
13901 && strcmp (fmt_str, target_percent_s))
13902 return NULL_TREE;
13905 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13906 available. */
13907 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13908 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13909 if (!fn)
13910 return NULL_TREE;
13912 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13915 /* Builtins with folding operations that operate on "..." arguments
13916 need special handling; we need to store the arguments in a convenient
13917 data structure before attempting any folding. Fortunately there are
13918 only a few builtins that fall into this category. FNDECL is the
13919 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13920 result of the function call is ignored. */
13922 static tree
13923 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13925 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13926 tree ret = NULL_TREE;
13928 switch (fcode)
13930 case BUILT_IN_SPRINTF_CHK:
13931 case BUILT_IN_VSPRINTF_CHK:
13932 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13933 break;
13935 case BUILT_IN_SNPRINTF_CHK:
13936 case BUILT_IN_VSNPRINTF_CHK:
13937 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13939 default:
13940 break;
13942 if (ret)
13944 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13945 TREE_NO_WARNING (ret) = 1;
13946 return ret;
13948 return NULL_TREE;
13951 /* A wrapper function for builtin folding that prevents warnings for
13952 "statement without effect" and the like, caused by removing the
13953 call node earlier than the warning is generated. */
13955 tree
13956 fold_call_stmt (gimple stmt, bool ignore)
13958 tree ret = NULL_TREE;
13959 tree fndecl = gimple_call_fndecl (stmt);
13960 if (fndecl
13961 && TREE_CODE (fndecl) == FUNCTION_DECL
13962 && DECL_BUILT_IN (fndecl)
13963 && !gimple_call_va_arg_pack_p (stmt))
13965 int nargs = gimple_call_num_args (stmt);
13967 if (avoid_folding_inline_builtin (fndecl))
13968 return NULL_TREE;
13969 /* FIXME: Don't use a list in this interface. */
13970 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13972 tree arglist = NULL_TREE;
13973 int i;
13974 for (i = nargs - 1; i >= 0; i--)
13975 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13976 return targetm.fold_builtin (fndecl, arglist, ignore);
13978 else
13980 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13982 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13983 int i;
13984 for (i = 0; i < nargs; i++)
13985 args[i] = gimple_call_arg (stmt, i);
13986 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13988 if (!ret)
13989 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13990 if (ret)
13992 /* Propagate location information from original call to
13993 expansion of builtin. Otherwise things like
13994 maybe_emit_chk_warning, that operate on the expansion
13995 of a builtin, will use the wrong location information. */
13996 if (gimple_has_location (stmt))
13998 tree realret = ret;
13999 if (TREE_CODE (ret) == NOP_EXPR)
14000 realret = TREE_OPERAND (ret, 0);
14001 if (CAN_HAVE_LOCATION_P (realret)
14002 && !EXPR_HAS_LOCATION (realret))
14003 SET_EXPR_LOCATION (realret, gimple_location (stmt));
14004 return realret;
14006 return ret;
14010 return NULL_TREE;