* inclhack.def (aix_complex): Redefine _Complex_I. Do not
[official-gcc.git] / gcc / builtins.c
blob025c1694e4c13e87ac5d7eaa04aec0cfcefef21b
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list_loc (location_t, tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (location_t, tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (location_t, tree);
158 static tree fold_builtin_inf (location_t, tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (location_t, tree, tree);
168 static tree fold_builtin_cbrt (location_t, tree, tree);
169 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_cos (location_t, tree, tree, tree);
172 static tree fold_builtin_cosh (location_t, tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (location_t, tree, tree);
175 static tree fold_builtin_floor (location_t, tree, tree);
176 static tree fold_builtin_ceil (location_t, tree, tree);
177 static tree fold_builtin_round (location_t, tree, tree);
178 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (location_t, tree, tree, tree);
182 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
184 static tree fold_builtin_strcmp (location_t, tree, tree);
185 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
186 static tree fold_builtin_signbit (location_t, tree, tree);
187 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
188 static tree fold_builtin_isascii (location_t, tree);
189 static tree fold_builtin_toascii (location_t, tree);
190 static tree fold_builtin_isdigit (location_t, tree);
191 static tree fold_builtin_fabs (location_t, tree, tree);
192 static tree fold_builtin_abs (location_t, tree, tree);
193 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
194 enum tree_code);
195 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
196 static tree fold_builtin_0 (location_t, tree, bool);
197 static tree fold_builtin_1 (location_t, tree, tree, bool);
198 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
199 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (location_t, tree, tree, bool);
203 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
204 static tree fold_builtin_strstr (location_t, tree, tree, tree);
205 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
206 static tree fold_builtin_strcat (location_t, tree, tree);
207 static tree fold_builtin_strncat (location_t, tree, tree, tree);
208 static tree fold_builtin_strspn (location_t, tree, tree);
209 static tree fold_builtin_strcspn (location_t, tree, tree);
210 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
222 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
247 bool
248 is_builtin_name (const char *name)
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
261 static bool
262 called_as_built_in (tree node)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
266 will have. */
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
278 unsigned int inner;
280 inner = max_align;
281 if (handled_component_p (exp))
283 HOST_WIDE_INT bitsize, bitpos;
284 tree offset;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
290 if (bitpos)
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
292 while (offset)
294 tree next_offset;
296 if (TREE_CODE (offset) == PLUS_EXPR)
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
301 else
302 next_offset = NULL;
303 if (host_integerp (offset, 1))
305 /* Any overflow in calculating offset_bits won't change
306 the alignment. */
307 unsigned offset_bits
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
310 if (offset_bits)
311 inner = MIN (inner, (offset_bits & -offset_bits));
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
316 /* Any overflow in calculating offset_factor won't change
317 the alignment. */
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
320 * BITS_PER_UNIT);
322 if (offset_factor)
323 inner = MIN (inner, (offset_factor & -offset_factor));
325 else
327 inner = MIN (inner, BITS_PER_UNIT);
328 break;
330 offset = next_offset;
333 if (DECL_P (exp))
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
338 #endif
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
342 else
343 align = MIN (align, inner);
344 return MIN (align, max_align);
347 /* Returns true iff we can trust that alignment information has been
348 calculated properly. */
350 bool
351 can_trust_pointer_alignment (void)
353 /* We rely on TER to compute accurate alignment information. */
354 return (optimize && flag_tree_ter);
357 /* Return the alignment in bits of EXP, a pointer valued expression.
358 But don't return more than MAX_ALIGN no matter what.
359 The alignment returned is, by default, the alignment of the thing that
360 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
362 Otherwise, look at the expression to see if we can do better, i.e., if the
363 expression is actually pointing at an object whose alignment is tighter. */
366 get_pointer_alignment (tree exp, unsigned int max_align)
368 unsigned int align, inner;
370 if (!can_trust_pointer_alignment ())
371 return 0;
373 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
374 return 0;
376 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (align, max_align);
379 while (1)
381 switch (TREE_CODE (exp))
383 CASE_CONVERT:
384 exp = TREE_OPERAND (exp, 0);
385 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
386 return align;
388 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
389 align = MIN (inner, max_align);
390 break;
392 case POINTER_PLUS_EXPR:
393 /* If sum of pointer + int, restrict our maximum alignment to that
394 imposed by the integer. If not, we can't do any better than
395 ALIGN. */
396 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
397 return align;
399 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
400 & (max_align / BITS_PER_UNIT - 1))
401 != 0)
402 max_align >>= 1;
404 exp = TREE_OPERAND (exp, 0);
405 break;
407 case ADDR_EXPR:
408 /* See what we are pointing at and look at its alignment. */
409 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
411 default:
412 return align;
417 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
418 way, because it could contain a zero byte in the middle.
419 TREE_STRING_LENGTH is the size of the character array, not the string.
421 ONLY_VALUE should be nonzero if the result is not going to be emitted
422 into the instruction stream and zero if it is going to be expanded.
423 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
424 is returned, otherwise NULL, since
425 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
426 evaluate the side-effects.
428 The value returned is of type `ssizetype'.
430 Unfortunately, string_constant can't access the values of const char
431 arrays with initializers, so neither can we do so here. */
433 tree
434 c_strlen (tree src, int only_value)
436 tree offset_node;
437 HOST_WIDE_INT offset;
438 int max;
439 const char *ptr;
441 STRIP_NOPS (src);
442 if (TREE_CODE (src) == COND_EXPR
443 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
445 tree len1, len2;
447 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
448 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
449 if (tree_int_cst_equal (len1, len2))
450 return len1;
453 if (TREE_CODE (src) == COMPOUND_EXPR
454 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
455 return c_strlen (TREE_OPERAND (src, 1), only_value);
457 src = string_constant (src, &offset_node);
458 if (src == 0)
459 return NULL_TREE;
461 max = TREE_STRING_LENGTH (src) - 1;
462 ptr = TREE_STRING_POINTER (src);
464 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
466 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
467 compute the offset to the following null if we don't know where to
468 start searching for it. */
469 int i;
471 for (i = 0; i < max; i++)
472 if (ptr[i] == 0)
473 return NULL_TREE;
475 /* We don't know the starting offset, but we do know that the string
476 has no internal zero bytes. We can assume that the offset falls
477 within the bounds of the string; otherwise, the programmer deserves
478 what he gets. Subtract the offset from the length of the string,
479 and return that. This would perhaps not be valid if we were dealing
480 with named arrays in addition to literal string constants. */
482 return size_diffop_loc (input_location, size_int (max), offset_node);
485 /* We have a known offset into the string. Start searching there for
486 a null character if we can represent it as a single HOST_WIDE_INT. */
487 if (offset_node == 0)
488 offset = 0;
489 else if (! host_integerp (offset_node, 0))
490 offset = -1;
491 else
492 offset = tree_low_cst (offset_node, 0);
494 /* If the offset is known to be out of bounds, warn, and call strlen at
495 runtime. */
496 if (offset < 0 || offset > max)
498 /* Suppress multiple warnings for propagated constant strings. */
499 if (! TREE_NO_WARNING (src))
501 warning (0, "offset outside bounds of constant string");
502 TREE_NO_WARNING (src) = 1;
504 return NULL_TREE;
507 /* Use strlen to search for the first zero byte. Since any strings
508 constructed with build_string will have nulls appended, we win even
509 if we get handed something like (char[4])"abcd".
511 Since OFFSET is our starting index into the string, no further
512 calculation is needed. */
513 return ssize_int (strlen (ptr + offset));
516 /* Return a char pointer for a C string if it is a string constant
517 or sum of string constant and integer constant. */
519 static const char *
520 c_getstr (tree src)
522 tree offset_node;
524 src = string_constant (src, &offset_node);
525 if (src == 0)
526 return 0;
528 if (offset_node == 0)
529 return TREE_STRING_POINTER (src);
530 else if (!host_integerp (offset_node, 1)
531 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
532 return 0;
534 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
537 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
538 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
540 static rtx
541 c_readstr (const char *str, enum machine_mode mode)
543 HOST_WIDE_INT c[2];
544 HOST_WIDE_INT ch;
545 unsigned int i, j;
547 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
549 c[0] = 0;
550 c[1] = 0;
551 ch = 1;
552 for (i = 0; i < GET_MODE_SIZE (mode); i++)
554 j = i;
555 if (WORDS_BIG_ENDIAN)
556 j = GET_MODE_SIZE (mode) - i - 1;
557 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
558 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
559 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
560 j *= BITS_PER_UNIT;
561 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
563 if (ch)
564 ch = (unsigned char) str[i];
565 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
567 return immed_double_const (c[0], c[1], mode);
570 /* Cast a target constant CST to target CHAR and if that value fits into
571 host char type, return zero and put that value into variable pointed to by
572 P. */
574 static int
575 target_char_cast (tree cst, char *p)
577 unsigned HOST_WIDE_INT val, hostval;
579 if (!host_integerp (cst, 1)
580 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
581 return 1;
583 val = tree_low_cst (cst, 1);
584 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
585 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
587 hostval = val;
588 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
589 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
591 if (val != hostval)
592 return 1;
594 *p = hostval;
595 return 0;
598 /* Similar to save_expr, but assumes that arbitrary code is not executed
599 in between the multiple evaluations. In particular, we assume that a
600 non-addressable local variable will not be modified. */
602 static tree
603 builtin_save_expr (tree exp)
605 if (TREE_ADDRESSABLE (exp) == 0
606 && (TREE_CODE (exp) == PARM_DECL
607 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
608 return exp;
610 return save_expr (exp);
613 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
614 times to get the address of either a higher stack frame, or a return
615 address located within it (depending on FNDECL_CODE). */
617 static rtx
618 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
620 int i;
622 #ifdef INITIAL_FRAME_ADDRESS_RTX
623 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
624 #else
625 rtx tem;
627 /* For a zero count with __builtin_return_address, we don't care what
628 frame address we return, because target-specific definitions will
629 override us. Therefore frame pointer elimination is OK, and using
630 the soft frame pointer is OK.
632 For a nonzero count, or a zero count with __builtin_frame_address,
633 we require a stable offset from the current frame pointer to the
634 previous one, so we must use the hard frame pointer, and
635 we must disable frame pointer elimination. */
636 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
637 tem = frame_pointer_rtx;
638 else
640 tem = hard_frame_pointer_rtx;
642 /* Tell reload not to eliminate the frame pointer. */
643 crtl->accesses_prior_frames = 1;
645 #endif
647 /* Some machines need special handling before we can access
648 arbitrary frames. For example, on the SPARC, we must first flush
649 all register windows to the stack. */
650 #ifdef SETUP_FRAME_ADDRESSES
651 if (count > 0)
652 SETUP_FRAME_ADDRESSES ();
653 #endif
655 /* On the SPARC, the return address is not in the frame, it is in a
656 register. There is no way to access it off of the current frame
657 pointer, but it can be accessed off the previous frame pointer by
658 reading the value from the register window save area. */
659 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
660 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
661 count--;
662 #endif
664 /* Scan back COUNT frames to the specified frame. */
665 for (i = 0; i < count; i++)
667 /* Assume the dynamic chain pointer is in the word that the
668 frame address points to, unless otherwise specified. */
669 #ifdef DYNAMIC_CHAIN_ADDRESS
670 tem = DYNAMIC_CHAIN_ADDRESS (tem);
671 #endif
672 tem = memory_address (Pmode, tem);
673 tem = gen_frame_mem (Pmode, tem);
674 tem = copy_to_reg (tem);
677 /* For __builtin_frame_address, return what we've got. But, on
678 the SPARC for example, we may have to add a bias. */
679 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
680 #ifdef FRAME_ADDR_RTX
681 return FRAME_ADDR_RTX (tem);
682 #else
683 return tem;
684 #endif
686 /* For __builtin_return_address, get the return address from that frame. */
687 #ifdef RETURN_ADDR_RTX
688 tem = RETURN_ADDR_RTX (count, tem);
689 #else
690 tem = memory_address (Pmode,
691 plus_constant (tem, GET_MODE_SIZE (Pmode)));
692 tem = gen_frame_mem (Pmode, tem);
693 #endif
694 return tem;
697 /* Alias set used for setjmp buffer. */
698 static alias_set_type setjmp_alias_set = -1;
700 /* Construct the leading half of a __builtin_setjmp call. Control will
701 return to RECEIVER_LABEL. This is also called directly by the SJLJ
702 exception handling code. */
704 void
705 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
707 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
708 rtx stack_save;
709 rtx mem;
711 if (setjmp_alias_set == -1)
712 setjmp_alias_set = new_alias_set ();
714 buf_addr = convert_memory_address (Pmode, buf_addr);
716 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
718 /* We store the frame pointer and the address of receiver_label in
719 the buffer and use the rest of it for the stack save area, which
720 is machine-dependent. */
722 mem = gen_rtx_MEM (Pmode, buf_addr);
723 set_mem_alias_set (mem, setjmp_alias_set);
724 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
726 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
727 set_mem_alias_set (mem, setjmp_alias_set);
729 emit_move_insn (validize_mem (mem),
730 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
732 stack_save = gen_rtx_MEM (sa_mode,
733 plus_constant (buf_addr,
734 2 * GET_MODE_SIZE (Pmode)));
735 set_mem_alias_set (stack_save, setjmp_alias_set);
736 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
738 /* If there is further processing to do, do it. */
739 #ifdef HAVE_builtin_setjmp_setup
740 if (HAVE_builtin_setjmp_setup)
741 emit_insn (gen_builtin_setjmp_setup (buf_addr));
742 #endif
744 /* Tell optimize_save_area_alloca that extra work is going to
745 need to go on during alloca. */
746 cfun->calls_setjmp = 1;
748 /* We have a nonlocal label. */
749 cfun->has_nonlocal_label = 1;
752 /* Construct the trailing part of a __builtin_setjmp call. This is
753 also called directly by the SJLJ exception handling code. */
755 void
756 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
758 /* Clobber the FP when we get here, so we have to make sure it's
759 marked as used by this function. */
760 emit_use (hard_frame_pointer_rtx);
762 /* Mark the static chain as clobbered here so life information
763 doesn't get messed up for it. */
764 emit_clobber (static_chain_rtx);
766 /* Now put in the code to restore the frame pointer, and argument
767 pointer, if needed. */
768 #ifdef HAVE_nonlocal_goto
769 if (! HAVE_nonlocal_goto)
770 #endif
772 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
773 /* This might change the hard frame pointer in ways that aren't
774 apparent to early optimization passes, so force a clobber. */
775 emit_clobber (hard_frame_pointer_rtx);
778 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
779 if (fixed_regs[ARG_POINTER_REGNUM])
781 #ifdef ELIMINABLE_REGS
782 size_t i;
783 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
785 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
786 if (elim_regs[i].from == ARG_POINTER_REGNUM
787 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
788 break;
790 if (i == ARRAY_SIZE (elim_regs))
791 #endif
793 /* Now restore our arg pointer from the address at which it
794 was saved in our stack frame. */
795 emit_move_insn (crtl->args.internal_arg_pointer,
796 copy_to_reg (get_arg_pointer_save_area ()));
799 #endif
801 #ifdef HAVE_builtin_setjmp_receiver
802 if (HAVE_builtin_setjmp_receiver)
803 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
804 else
805 #endif
806 #ifdef HAVE_nonlocal_goto_receiver
807 if (HAVE_nonlocal_goto_receiver)
808 emit_insn (gen_nonlocal_goto_receiver ());
809 else
810 #endif
811 { /* Nothing */ }
813 /* We must not allow the code we just generated to be reordered by
814 scheduling. Specifically, the update of the frame pointer must
815 happen immediately, not later. */
816 emit_insn (gen_blockage ());
819 /* __builtin_longjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient. Much of
822 the code below is copied from the handling of non-local gotos. */
824 static void
825 expand_builtin_longjmp (rtx buf_addr, rtx value)
827 rtx fp, lab, stack, insn, last;
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
830 /* DRAP is needed for stack realign if longjmp is expanded to current
831 function */
832 if (SUPPORTS_STACK_ALIGNMENT)
833 crtl->need_drap = true;
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
838 buf_addr = convert_memory_address (Pmode, buf_addr);
840 buf_addr = force_reg (Pmode, buf_addr);
842 /* We used to store value in static_chain_rtx, but that fails if pointers
843 are smaller than integers. We instead require that the user must pass
844 a second argument of 1, because that is what builtin_setjmp will
845 return. This also makes EH slightly more efficient, since we are no
846 longer copying around a value that we don't care about. */
847 gcc_assert (value == const1_rtx);
849 last = get_last_insn ();
850 #ifdef HAVE_builtin_longjmp
851 if (HAVE_builtin_longjmp)
852 emit_insn (gen_builtin_longjmp (buf_addr));
853 else
854 #endif
856 fp = gen_rtx_MEM (Pmode, buf_addr);
857 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
858 GET_MODE_SIZE (Pmode)));
860 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
861 2 * GET_MODE_SIZE (Pmode)));
862 set_mem_alias_set (fp, setjmp_alias_set);
863 set_mem_alias_set (lab, setjmp_alias_set);
864 set_mem_alias_set (stack, setjmp_alias_set);
866 /* Pick up FP, label, and SP from the block and jump. This code is
867 from expand_goto in stmt.c; see there for detailed comments. */
868 #ifdef HAVE_nonlocal_goto
869 if (HAVE_nonlocal_goto)
870 /* We have to pass a value to the nonlocal_goto pattern that will
871 get copied into the static_chain pointer, but it does not matter
872 what that value is, because builtin_setjmp does not use it. */
873 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
874 else
875 #endif
877 lab = copy_to_reg (lab);
879 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
880 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
882 emit_move_insn (hard_frame_pointer_rtx, fp);
883 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
885 emit_use (hard_frame_pointer_rtx);
886 emit_use (stack_pointer_rtx);
887 emit_indirect_jump (lab);
891 /* Search backwards and mark the jump insn as a non-local goto.
892 Note that this precludes the use of __builtin_longjmp to a
893 __builtin_setjmp target in the same function. However, we've
894 already cautioned the user that these functions are for
895 internal exception handling use only. */
896 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
898 gcc_assert (insn != last);
900 if (JUMP_P (insn))
902 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
903 break;
905 else if (CALL_P (insn))
906 break;
910 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
911 and the address of the save area. */
913 static rtx
914 expand_builtin_nonlocal_goto (tree exp)
916 tree t_label, t_save_area;
917 rtx r_label, r_save_area, r_fp, r_sp, insn;
919 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
920 return NULL_RTX;
922 t_label = CALL_EXPR_ARG (exp, 0);
923 t_save_area = CALL_EXPR_ARG (exp, 1);
925 r_label = expand_normal (t_label);
926 r_label = convert_memory_address (Pmode, r_label);
927 r_save_area = expand_normal (t_save_area);
928 r_save_area = convert_memory_address (Pmode, r_save_area);
929 /* Copy the address of the save location to a register just in case it was based
930 on the frame pointer. */
931 r_save_area = copy_to_reg (r_save_area);
932 r_fp = gen_rtx_MEM (Pmode, r_save_area);
933 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
934 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
936 crtl->has_nonlocal_goto = 1;
938 #ifdef HAVE_nonlocal_goto
939 /* ??? We no longer need to pass the static chain value, afaik. */
940 if (HAVE_nonlocal_goto)
941 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
942 else
943 #endif
945 r_label = copy_to_reg (r_label);
947 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
948 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
950 /* Restore frame pointer for containing function.
951 This sets the actual hard register used for the frame pointer
952 to the location of the function's incoming static chain info.
953 The non-local goto handler will then adjust it to contain the
954 proper value and reload the argument pointer, if needed. */
955 emit_move_insn (hard_frame_pointer_rtx, r_fp);
956 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
958 /* USE of hard_frame_pointer_rtx added for consistency;
959 not clear if really needed. */
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
963 /* If the architecture is using a GP register, we must
964 conservatively assume that the target function makes use of it.
965 The prologue of functions with nonlocal gotos must therefore
966 initialize the GP register to the appropriate value, and we
967 must then make sure that this value is live at the point
968 of the jump. (Note that this doesn't necessarily apply
969 to targets with a nonlocal_goto pattern; they are free
970 to implement it in their own way. Note also that this is
971 a no-op if the GP register is a global invariant.) */
972 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
973 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
974 emit_use (pic_offset_table_rtx);
976 emit_indirect_jump (r_label);
979 /* Search backwards to the jump insn and mark it as a
980 non-local goto. */
981 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
983 if (JUMP_P (insn))
985 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
986 break;
988 else if (CALL_P (insn))
989 break;
992 return const0_rtx;
995 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
996 (not all will be used on all machines) that was passed to __builtin_setjmp.
997 It updates the stack pointer in that block to correspond to the current
998 stack pointer. */
1000 static void
1001 expand_builtin_update_setjmp_buf (rtx buf_addr)
1003 enum machine_mode sa_mode = Pmode;
1004 rtx stack_save;
1007 #ifdef HAVE_save_stack_nonlocal
1008 if (HAVE_save_stack_nonlocal)
1009 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1010 #endif
1011 #ifdef STACK_SAVEAREA_MODE
1012 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1013 #endif
1015 stack_save
1016 = gen_rtx_MEM (sa_mode,
1017 memory_address
1018 (sa_mode,
1019 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1021 #ifdef HAVE_setjmp
1022 if (HAVE_setjmp)
1023 emit_insn (gen_setjmp ());
1024 #endif
1026 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1029 /* Expand a call to __builtin_prefetch. For a target that does not support
1030 data prefetch, evaluate the memory address argument in case it has side
1031 effects. */
1033 static void
1034 expand_builtin_prefetch (tree exp)
1036 tree arg0, arg1, arg2;
1037 int nargs;
1038 rtx op0, op1, op2;
1040 if (!validate_arglist (exp, POINTER_TYPE, 0))
1041 return;
1043 arg0 = CALL_EXPR_ARG (exp, 0);
1045 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1046 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1047 locality). */
1048 nargs = call_expr_nargs (exp);
1049 if (nargs > 1)
1050 arg1 = CALL_EXPR_ARG (exp, 1);
1051 else
1052 arg1 = integer_zero_node;
1053 if (nargs > 2)
1054 arg2 = CALL_EXPR_ARG (exp, 2);
1055 else
1056 arg2 = build_int_cst (NULL_TREE, 3);
1058 /* Argument 0 is an address. */
1059 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1061 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1062 if (TREE_CODE (arg1) != INTEGER_CST)
1064 error ("second argument to %<__builtin_prefetch%> must be a constant");
1065 arg1 = integer_zero_node;
1067 op1 = expand_normal (arg1);
1068 /* Argument 1 must be either zero or one. */
1069 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1071 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1072 " using zero");
1073 op1 = const0_rtx;
1076 /* Argument 2 (locality) must be a compile-time constant int. */
1077 if (TREE_CODE (arg2) != INTEGER_CST)
1079 error ("third argument to %<__builtin_prefetch%> must be a constant");
1080 arg2 = integer_zero_node;
1082 op2 = expand_normal (arg2);
1083 /* Argument 2 must be 0, 1, 2, or 3. */
1084 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1086 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1087 op2 = const0_rtx;
1090 #ifdef HAVE_prefetch
1091 if (HAVE_prefetch)
1093 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1094 (op0,
1095 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1096 || (GET_MODE (op0) != Pmode))
1098 op0 = convert_memory_address (Pmode, op0);
1099 op0 = force_reg (Pmode, op0);
1101 emit_insn (gen_prefetch (op0, op1, op2));
1103 #endif
1105 /* Don't do anything with direct references to volatile memory, but
1106 generate code to handle other side effects. */
1107 if (!MEM_P (op0) && side_effects_p (op0))
1108 emit_insn (op0);
1111 /* Get a MEM rtx for expression EXP which is the address of an operand
1112 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1113 the maximum length of the block of memory that might be accessed or
1114 NULL if unknown. */
1116 static rtx
1117 get_memory_rtx (tree exp, tree len)
1119 tree orig_exp = exp;
1120 rtx addr, mem;
1121 HOST_WIDE_INT off;
1123 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1124 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1125 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1126 exp = TREE_OPERAND (exp, 0);
1128 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1129 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1131 /* Get an expression we can use to find the attributes to assign to MEM.
1132 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1133 we can. First remove any nops. */
1134 while (CONVERT_EXPR_P (exp)
1135 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1136 exp = TREE_OPERAND (exp, 0);
1138 off = 0;
1139 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1140 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1141 && host_integerp (TREE_OPERAND (exp, 1), 0)
1142 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1143 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1144 else if (TREE_CODE (exp) == ADDR_EXPR)
1145 exp = TREE_OPERAND (exp, 0);
1146 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1147 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1148 else
1149 exp = NULL;
1151 /* Honor attributes derived from exp, except for the alias set
1152 (as builtin stringops may alias with anything) and the size
1153 (as stringops may access multiple array elements). */
1154 if (exp)
1156 set_mem_attributes (mem, exp, 0);
1158 if (off)
1159 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1161 /* Allow the string and memory builtins to overflow from one
1162 field into another, see http://gcc.gnu.org/PR23561.
1163 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1164 memory accessed by the string or memory builtin will fit
1165 within the field. */
1166 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1168 tree mem_expr = MEM_EXPR (mem);
1169 HOST_WIDE_INT offset = -1, length = -1;
1170 tree inner = exp;
1172 while (TREE_CODE (inner) == ARRAY_REF
1173 || CONVERT_EXPR_P (inner)
1174 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1175 || TREE_CODE (inner) == SAVE_EXPR)
1176 inner = TREE_OPERAND (inner, 0);
1178 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1180 if (MEM_OFFSET (mem)
1181 && CONST_INT_P (MEM_OFFSET (mem)))
1182 offset = INTVAL (MEM_OFFSET (mem));
1184 if (offset >= 0 && len && host_integerp (len, 0))
1185 length = tree_low_cst (len, 0);
1187 while (TREE_CODE (inner) == COMPONENT_REF)
1189 tree field = TREE_OPERAND (inner, 1);
1190 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1191 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1193 /* Bitfields are generally not byte-addressable. */
1194 gcc_assert (!DECL_BIT_FIELD (field)
1195 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1196 % BITS_PER_UNIT) == 0
1197 && host_integerp (DECL_SIZE (field), 0)
1198 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1199 % BITS_PER_UNIT) == 0));
1201 /* If we can prove that the memory starting at XEXP (mem, 0) and
1202 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1203 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1204 fields without DECL_SIZE_UNIT like flexible array members. */
1205 if (length >= 0
1206 && DECL_SIZE_UNIT (field)
1207 && host_integerp (DECL_SIZE_UNIT (field), 0))
1209 HOST_WIDE_INT size
1210 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1211 if (offset <= size
1212 && length <= size
1213 && offset + length <= size)
1214 break;
1217 if (offset >= 0
1218 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1219 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1220 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1221 / BITS_PER_UNIT;
1222 else
1224 offset = -1;
1225 length = -1;
1228 mem_expr = TREE_OPERAND (mem_expr, 0);
1229 inner = TREE_OPERAND (inner, 0);
1232 if (mem_expr == NULL)
1233 offset = -1;
1234 if (mem_expr != MEM_EXPR (mem))
1236 set_mem_expr (mem, mem_expr);
1237 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1240 set_mem_alias_set (mem, 0);
1241 set_mem_size (mem, NULL_RTX);
1244 return mem;
1247 /* Built-in functions to perform an untyped call and return. */
1249 /* For each register that may be used for calling a function, this
1250 gives a mode used to copy the register's value. VOIDmode indicates
1251 the register is not used for calling a function. If the machine
1252 has register windows, this gives only the outbound registers.
1253 INCOMING_REGNO gives the corresponding inbound register. */
1254 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1256 /* For each register that may be used for returning values, this gives
1257 a mode used to copy the register's value. VOIDmode indicates the
1258 register is not used for returning values. If the machine has
1259 register windows, this gives only the outbound registers.
1260 INCOMING_REGNO gives the corresponding inbound register. */
1261 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1263 /* For each register that may be used for calling a function, this
1264 gives the offset of that register into the block returned by
1265 __builtin_apply_args. 0 indicates that the register is not
1266 used for calling a function. */
1267 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1269 /* Return the size required for the block returned by __builtin_apply_args,
1270 and initialize apply_args_mode. */
1272 static int
1273 apply_args_size (void)
1275 static int size = -1;
1276 int align;
1277 unsigned int regno;
1278 enum machine_mode mode;
1280 /* The values computed by this function never change. */
1281 if (size < 0)
1283 /* The first value is the incoming arg-pointer. */
1284 size = GET_MODE_SIZE (Pmode);
1286 /* The second value is the structure value address unless this is
1287 passed as an "invisible" first argument. */
1288 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1289 size += GET_MODE_SIZE (Pmode);
1291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1292 if (FUNCTION_ARG_REGNO_P (regno))
1294 mode = reg_raw_mode[regno];
1296 gcc_assert (mode != VOIDmode);
1298 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1299 if (size % align != 0)
1300 size = CEIL (size, align) * align;
1301 apply_args_reg_offset[regno] = size;
1302 size += GET_MODE_SIZE (mode);
1303 apply_args_mode[regno] = mode;
1305 else
1307 apply_args_mode[regno] = VOIDmode;
1308 apply_args_reg_offset[regno] = 0;
1311 return size;
1314 /* Return the size required for the block returned by __builtin_apply,
1315 and initialize apply_result_mode. */
1317 static int
1318 apply_result_size (void)
1320 static int size = -1;
1321 int align, regno;
1322 enum machine_mode mode;
1324 /* The values computed by this function never change. */
1325 if (size < 0)
1327 size = 0;
1329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1330 if (FUNCTION_VALUE_REGNO_P (regno))
1332 mode = reg_raw_mode[regno];
1334 gcc_assert (mode != VOIDmode);
1336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1337 if (size % align != 0)
1338 size = CEIL (size, align) * align;
1339 size += GET_MODE_SIZE (mode);
1340 apply_result_mode[regno] = mode;
1342 else
1343 apply_result_mode[regno] = VOIDmode;
1345 /* Allow targets that use untyped_call and untyped_return to override
1346 the size so that machine-specific information can be stored here. */
1347 #ifdef APPLY_RESULT_SIZE
1348 size = APPLY_RESULT_SIZE;
1349 #endif
1351 return size;
1354 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1355 /* Create a vector describing the result block RESULT. If SAVEP is true,
1356 the result block is used to save the values; otherwise it is used to
1357 restore the values. */
1359 static rtx
1360 result_vector (int savep, rtx result)
1362 int regno, size, align, nelts;
1363 enum machine_mode mode;
1364 rtx reg, mem;
1365 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1367 size = nelts = 0;
1368 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1369 if ((mode = apply_result_mode[regno]) != VOIDmode)
1371 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1372 if (size % align != 0)
1373 size = CEIL (size, align) * align;
1374 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1375 mem = adjust_address (result, mode, size);
1376 savevec[nelts++] = (savep
1377 ? gen_rtx_SET (VOIDmode, mem, reg)
1378 : gen_rtx_SET (VOIDmode, reg, mem));
1379 size += GET_MODE_SIZE (mode);
1381 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1383 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1385 /* Save the state required to perform an untyped call with the same
1386 arguments as were passed to the current function. */
1388 static rtx
1389 expand_builtin_apply_args_1 (void)
1391 rtx registers, tem;
1392 int size, align, regno;
1393 enum machine_mode mode;
1394 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1396 /* Create a block where the arg-pointer, structure value address,
1397 and argument registers can be saved. */
1398 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1400 /* Walk past the arg-pointer and structure value address. */
1401 size = GET_MODE_SIZE (Pmode);
1402 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1403 size += GET_MODE_SIZE (Pmode);
1405 /* Save each register used in calling a function to the block. */
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if ((mode = apply_args_mode[regno]) != VOIDmode)
1409 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1410 if (size % align != 0)
1411 size = CEIL (size, align) * align;
1413 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1415 emit_move_insn (adjust_address (registers, mode, size), tem);
1416 size += GET_MODE_SIZE (mode);
1419 /* Save the arg pointer to the block. */
1420 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1421 #ifdef STACK_GROWS_DOWNWARD
1422 /* We need the pointer as the caller actually passed them to us, not
1423 as we might have pretended they were passed. Make sure it's a valid
1424 operand, as emit_move_insn isn't expected to handle a PLUS. */
1426 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1427 NULL_RTX);
1428 #endif
1429 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1431 size = GET_MODE_SIZE (Pmode);
1433 /* Save the structure value address unless this is passed as an
1434 "invisible" first argument. */
1435 if (struct_incoming_value)
1437 emit_move_insn (adjust_address (registers, Pmode, size),
1438 copy_to_reg (struct_incoming_value));
1439 size += GET_MODE_SIZE (Pmode);
1442 /* Return the address of the block. */
1443 return copy_addr_to_reg (XEXP (registers, 0));
1446 /* __builtin_apply_args returns block of memory allocated on
1447 the stack into which is stored the arg pointer, structure
1448 value address, static chain, and all the registers that might
1449 possibly be used in performing a function call. The code is
1450 moved to the start of the function so the incoming values are
1451 saved. */
1453 static rtx
1454 expand_builtin_apply_args (void)
1456 /* Don't do __builtin_apply_args more than once in a function.
1457 Save the result of the first call and reuse it. */
1458 if (apply_args_value != 0)
1459 return apply_args_value;
1461 /* When this function is called, it means that registers must be
1462 saved on entry to this function. So we migrate the
1463 call to the first insn of this function. */
1464 rtx temp;
1465 rtx seq;
1467 start_sequence ();
1468 temp = expand_builtin_apply_args_1 ();
1469 seq = get_insns ();
1470 end_sequence ();
1472 apply_args_value = temp;
1474 /* Put the insns after the NOTE that starts the function.
1475 If this is inside a start_sequence, make the outer-level insn
1476 chain current, so the code is placed at the start of the
1477 function. If internal_arg_pointer is a non-virtual pseudo,
1478 it needs to be placed after the function that initializes
1479 that pseudo. */
1480 push_topmost_sequence ();
1481 if (REG_P (crtl->args.internal_arg_pointer)
1482 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1483 emit_insn_before (seq, parm_birth_insn);
1484 else
1485 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1486 pop_topmost_sequence ();
1487 return temp;
1491 /* Perform an untyped call and save the state required to perform an
1492 untyped return of whatever value was returned by the given function. */
1494 static rtx
1495 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1497 int size, align, regno;
1498 enum machine_mode mode;
1499 rtx incoming_args, result, reg, dest, src, call_insn;
1500 rtx old_stack_level = 0;
1501 rtx call_fusage = 0;
1502 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1504 arguments = convert_memory_address (Pmode, arguments);
1506 /* Create a block where the return registers can be saved. */
1507 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1509 /* Fetch the arg pointer from the ARGUMENTS block. */
1510 incoming_args = gen_reg_rtx (Pmode);
1511 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1512 #ifndef STACK_GROWS_DOWNWARD
1513 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1514 incoming_args, 0, OPTAB_LIB_WIDEN);
1515 #endif
1517 /* Push a new argument block and copy the arguments. Do not allow
1518 the (potential) memcpy call below to interfere with our stack
1519 manipulations. */
1520 do_pending_stack_adjust ();
1521 NO_DEFER_POP;
1523 /* Save the stack with nonlocal if available. */
1524 #ifdef HAVE_save_stack_nonlocal
1525 if (HAVE_save_stack_nonlocal)
1526 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1527 else
1528 #endif
1529 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1531 /* Allocate a block of memory onto the stack and copy the memory
1532 arguments to the outgoing arguments address. */
1533 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1535 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1536 may have already set current_function_calls_alloca to true.
1537 current_function_calls_alloca won't be set if argsize is zero,
1538 so we have to guarantee need_drap is true here. */
1539 if (SUPPORTS_STACK_ALIGNMENT)
1540 crtl->need_drap = true;
1542 dest = virtual_outgoing_args_rtx;
1543 #ifndef STACK_GROWS_DOWNWARD
1544 if (CONST_INT_P (argsize))
1545 dest = plus_constant (dest, -INTVAL (argsize));
1546 else
1547 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1548 #endif
1549 dest = gen_rtx_MEM (BLKmode, dest);
1550 set_mem_align (dest, PARM_BOUNDARY);
1551 src = gen_rtx_MEM (BLKmode, incoming_args);
1552 set_mem_align (src, PARM_BOUNDARY);
1553 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1555 /* Refer to the argument block. */
1556 apply_args_size ();
1557 arguments = gen_rtx_MEM (BLKmode, arguments);
1558 set_mem_align (arguments, PARM_BOUNDARY);
1560 /* Walk past the arg-pointer and structure value address. */
1561 size = GET_MODE_SIZE (Pmode);
1562 if (struct_value)
1563 size += GET_MODE_SIZE (Pmode);
1565 /* Restore each of the registers previously saved. Make USE insns
1566 for each of these registers for use in making the call. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_args_mode[regno]) != VOIDmode)
1570 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1571 if (size % align != 0)
1572 size = CEIL (size, align) * align;
1573 reg = gen_rtx_REG (mode, regno);
1574 emit_move_insn (reg, adjust_address (arguments, mode, size));
1575 use_reg (&call_fusage, reg);
1576 size += GET_MODE_SIZE (mode);
1579 /* Restore the structure value address unless this is passed as an
1580 "invisible" first argument. */
1581 size = GET_MODE_SIZE (Pmode);
1582 if (struct_value)
1584 rtx value = gen_reg_rtx (Pmode);
1585 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1586 emit_move_insn (struct_value, value);
1587 if (REG_P (struct_value))
1588 use_reg (&call_fusage, struct_value);
1589 size += GET_MODE_SIZE (Pmode);
1592 /* All arguments and registers used for the call are set up by now! */
1593 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1595 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1596 and we don't want to load it into a register as an optimization,
1597 because prepare_call_address already did it if it should be done. */
1598 if (GET_CODE (function) != SYMBOL_REF)
1599 function = memory_address (FUNCTION_MODE, function);
1601 /* Generate the actual call instruction and save the return value. */
1602 #ifdef HAVE_untyped_call
1603 if (HAVE_untyped_call)
1604 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1605 result, result_vector (1, result)));
1606 else
1607 #endif
1608 #ifdef HAVE_call_value
1609 if (HAVE_call_value)
1611 rtx valreg = 0;
1613 /* Locate the unique return register. It is not possible to
1614 express a call that sets more than one return register using
1615 call_value; use untyped_call for that. In fact, untyped_call
1616 only needs to save the return registers in the given block. */
1617 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1618 if ((mode = apply_result_mode[regno]) != VOIDmode)
1620 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1622 valreg = gen_rtx_REG (mode, regno);
1625 emit_call_insn (GEN_CALL_VALUE (valreg,
1626 gen_rtx_MEM (FUNCTION_MODE, function),
1627 const0_rtx, NULL_RTX, const0_rtx));
1629 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1631 else
1632 #endif
1633 gcc_unreachable ();
1635 /* Find the CALL insn we just emitted, and attach the register usage
1636 information. */
1637 call_insn = last_call_insn ();
1638 add_function_usage_to (call_insn, call_fusage);
1640 /* Restore the stack. */
1641 #ifdef HAVE_save_stack_nonlocal
1642 if (HAVE_save_stack_nonlocal)
1643 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1644 else
1645 #endif
1646 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1648 OK_DEFER_POP;
1650 /* Return the address of the result block. */
1651 result = copy_addr_to_reg (XEXP (result, 0));
1652 return convert_memory_address (ptr_mode, result);
1655 /* Perform an untyped return. */
1657 static void
1658 expand_builtin_return (rtx result)
1660 int size, align, regno;
1661 enum machine_mode mode;
1662 rtx reg;
1663 rtx call_fusage = 0;
1665 result = convert_memory_address (Pmode, result);
1667 apply_result_size ();
1668 result = gen_rtx_MEM (BLKmode, result);
1670 #ifdef HAVE_untyped_return
1671 if (HAVE_untyped_return)
1673 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 emit_barrier ();
1675 return;
1677 #endif
1679 /* Restore the return value and note that each value is used. */
1680 size = 0;
1681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1682 if ((mode = apply_result_mode[regno]) != VOIDmode)
1684 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1685 if (size % align != 0)
1686 size = CEIL (size, align) * align;
1687 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1688 emit_move_insn (reg, adjust_address (result, mode, size));
1690 push_to_sequence (call_fusage);
1691 emit_use (reg);
1692 call_fusage = get_insns ();
1693 end_sequence ();
1694 size += GET_MODE_SIZE (mode);
1697 /* Put the USE insns before the return. */
1698 emit_insn (call_fusage);
1700 /* Return whatever values was restored by jumping directly to the end
1701 of the function. */
1702 expand_naked_return ();
1705 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1707 static enum type_class
1708 type_to_class (tree type)
1710 switch (TREE_CODE (type))
1712 case VOID_TYPE: return void_type_class;
1713 case INTEGER_TYPE: return integer_type_class;
1714 case ENUMERAL_TYPE: return enumeral_type_class;
1715 case BOOLEAN_TYPE: return boolean_type_class;
1716 case POINTER_TYPE: return pointer_type_class;
1717 case REFERENCE_TYPE: return reference_type_class;
1718 case OFFSET_TYPE: return offset_type_class;
1719 case REAL_TYPE: return real_type_class;
1720 case COMPLEX_TYPE: return complex_type_class;
1721 case FUNCTION_TYPE: return function_type_class;
1722 case METHOD_TYPE: return method_type_class;
1723 case RECORD_TYPE: return record_type_class;
1724 case UNION_TYPE:
1725 case QUAL_UNION_TYPE: return union_type_class;
1726 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1727 ? string_type_class : array_type_class);
1728 case LANG_TYPE: return lang_type_class;
1729 default: return no_type_class;
1733 /* Expand a call EXP to __builtin_classify_type. */
1735 static rtx
1736 expand_builtin_classify_type (tree exp)
1738 if (call_expr_nargs (exp))
1739 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1740 return GEN_INT (no_type_class);
1743 /* This helper macro, meant to be used in mathfn_built_in below,
1744 determines which among a set of three builtin math functions is
1745 appropriate for a given type mode. The `F' and `L' cases are
1746 automatically generated from the `double' case. */
1747 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1749 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1750 fcodel = BUILT_IN_MATHFN##L ; break;
1751 /* Similar to above, but appends _R after any F/L suffix. */
1752 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1753 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1754 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1755 fcodel = BUILT_IN_MATHFN##L_R ; break;
1757 /* Return mathematic function equivalent to FN but operating directly
1758 on TYPE, if available. If IMPLICIT is true find the function in
1759 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1760 can't do the conversion, return zero. */
1762 static tree
1763 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1765 tree const *const fn_arr
1766 = implicit ? implicit_built_in_decls : built_in_decls;
1767 enum built_in_function fcode, fcodef, fcodel;
1769 switch (fn)
1771 CASE_MATHFN (BUILT_IN_ACOS)
1772 CASE_MATHFN (BUILT_IN_ACOSH)
1773 CASE_MATHFN (BUILT_IN_ASIN)
1774 CASE_MATHFN (BUILT_IN_ASINH)
1775 CASE_MATHFN (BUILT_IN_ATAN)
1776 CASE_MATHFN (BUILT_IN_ATAN2)
1777 CASE_MATHFN (BUILT_IN_ATANH)
1778 CASE_MATHFN (BUILT_IN_CBRT)
1779 CASE_MATHFN (BUILT_IN_CEIL)
1780 CASE_MATHFN (BUILT_IN_CEXPI)
1781 CASE_MATHFN (BUILT_IN_COPYSIGN)
1782 CASE_MATHFN (BUILT_IN_COS)
1783 CASE_MATHFN (BUILT_IN_COSH)
1784 CASE_MATHFN (BUILT_IN_DREM)
1785 CASE_MATHFN (BUILT_IN_ERF)
1786 CASE_MATHFN (BUILT_IN_ERFC)
1787 CASE_MATHFN (BUILT_IN_EXP)
1788 CASE_MATHFN (BUILT_IN_EXP10)
1789 CASE_MATHFN (BUILT_IN_EXP2)
1790 CASE_MATHFN (BUILT_IN_EXPM1)
1791 CASE_MATHFN (BUILT_IN_FABS)
1792 CASE_MATHFN (BUILT_IN_FDIM)
1793 CASE_MATHFN (BUILT_IN_FLOOR)
1794 CASE_MATHFN (BUILT_IN_FMA)
1795 CASE_MATHFN (BUILT_IN_FMAX)
1796 CASE_MATHFN (BUILT_IN_FMIN)
1797 CASE_MATHFN (BUILT_IN_FMOD)
1798 CASE_MATHFN (BUILT_IN_FREXP)
1799 CASE_MATHFN (BUILT_IN_GAMMA)
1800 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1801 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1802 CASE_MATHFN (BUILT_IN_HYPOT)
1803 CASE_MATHFN (BUILT_IN_ILOGB)
1804 CASE_MATHFN (BUILT_IN_INF)
1805 CASE_MATHFN (BUILT_IN_ISINF)
1806 CASE_MATHFN (BUILT_IN_J0)
1807 CASE_MATHFN (BUILT_IN_J1)
1808 CASE_MATHFN (BUILT_IN_JN)
1809 CASE_MATHFN (BUILT_IN_LCEIL)
1810 CASE_MATHFN (BUILT_IN_LDEXP)
1811 CASE_MATHFN (BUILT_IN_LFLOOR)
1812 CASE_MATHFN (BUILT_IN_LGAMMA)
1813 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1814 CASE_MATHFN (BUILT_IN_LLCEIL)
1815 CASE_MATHFN (BUILT_IN_LLFLOOR)
1816 CASE_MATHFN (BUILT_IN_LLRINT)
1817 CASE_MATHFN (BUILT_IN_LLROUND)
1818 CASE_MATHFN (BUILT_IN_LOG)
1819 CASE_MATHFN (BUILT_IN_LOG10)
1820 CASE_MATHFN (BUILT_IN_LOG1P)
1821 CASE_MATHFN (BUILT_IN_LOG2)
1822 CASE_MATHFN (BUILT_IN_LOGB)
1823 CASE_MATHFN (BUILT_IN_LRINT)
1824 CASE_MATHFN (BUILT_IN_LROUND)
1825 CASE_MATHFN (BUILT_IN_MODF)
1826 CASE_MATHFN (BUILT_IN_NAN)
1827 CASE_MATHFN (BUILT_IN_NANS)
1828 CASE_MATHFN (BUILT_IN_NEARBYINT)
1829 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1830 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1831 CASE_MATHFN (BUILT_IN_POW)
1832 CASE_MATHFN (BUILT_IN_POWI)
1833 CASE_MATHFN (BUILT_IN_POW10)
1834 CASE_MATHFN (BUILT_IN_REMAINDER)
1835 CASE_MATHFN (BUILT_IN_REMQUO)
1836 CASE_MATHFN (BUILT_IN_RINT)
1837 CASE_MATHFN (BUILT_IN_ROUND)
1838 CASE_MATHFN (BUILT_IN_SCALB)
1839 CASE_MATHFN (BUILT_IN_SCALBLN)
1840 CASE_MATHFN (BUILT_IN_SCALBN)
1841 CASE_MATHFN (BUILT_IN_SIGNBIT)
1842 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1843 CASE_MATHFN (BUILT_IN_SIN)
1844 CASE_MATHFN (BUILT_IN_SINCOS)
1845 CASE_MATHFN (BUILT_IN_SINH)
1846 CASE_MATHFN (BUILT_IN_SQRT)
1847 CASE_MATHFN (BUILT_IN_TAN)
1848 CASE_MATHFN (BUILT_IN_TANH)
1849 CASE_MATHFN (BUILT_IN_TGAMMA)
1850 CASE_MATHFN (BUILT_IN_TRUNC)
1851 CASE_MATHFN (BUILT_IN_Y0)
1852 CASE_MATHFN (BUILT_IN_Y1)
1853 CASE_MATHFN (BUILT_IN_YN)
1855 default:
1856 return NULL_TREE;
1859 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1860 return fn_arr[fcode];
1861 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1862 return fn_arr[fcodef];
1863 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1864 return fn_arr[fcodel];
1865 else
1866 return NULL_TREE;
1869 /* Like mathfn_built_in_1(), but always use the implicit array. */
1871 tree
1872 mathfn_built_in (tree type, enum built_in_function fn)
1874 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1877 /* If errno must be maintained, expand the RTL to check if the result,
1878 TARGET, of a built-in function call, EXP, is NaN, and if so set
1879 errno to EDOM. */
1881 static void
1882 expand_errno_check (tree exp, rtx target)
1884 rtx lab = gen_label_rtx ();
1886 /* Test the result; if it is NaN, set errno=EDOM because
1887 the argument was not in the domain. */
1888 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1889 NULL_RTX, NULL_RTX, lab);
1891 #ifdef TARGET_EDOM
1892 /* If this built-in doesn't throw an exception, set errno directly. */
1893 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1895 #ifdef GEN_ERRNO_RTX
1896 rtx errno_rtx = GEN_ERRNO_RTX;
1897 #else
1898 rtx errno_rtx
1899 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1900 #endif
1901 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1902 emit_label (lab);
1903 return;
1905 #endif
1907 /* Make sure the library call isn't expanded as a tail call. */
1908 CALL_EXPR_TAILCALL (exp) = 0;
1910 /* We can't set errno=EDOM directly; let the library call do it.
1911 Pop the arguments right away in case the call gets deleted. */
1912 NO_DEFER_POP;
1913 expand_call (exp, target, 0);
1914 OK_DEFER_POP;
1915 emit_label (lab);
1918 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1919 Return NULL_RTX if a normal call should be emitted rather than expanding
1920 the function in-line. EXP is the expression that is a call to the builtin
1921 function; if convenient, the result should be placed in TARGET.
1922 SUBTARGET may be used as the target for computing one of EXP's operands. */
1924 static rtx
1925 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1927 optab builtin_optab;
1928 rtx op0, insns, before_call;
1929 tree fndecl = get_callee_fndecl (exp);
1930 enum machine_mode mode;
1931 bool errno_set = false;
1932 tree arg;
1934 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1935 return NULL_RTX;
1937 arg = CALL_EXPR_ARG (exp, 0);
1939 switch (DECL_FUNCTION_CODE (fndecl))
1941 CASE_FLT_FN (BUILT_IN_SQRT):
1942 errno_set = ! tree_expr_nonnegative_p (arg);
1943 builtin_optab = sqrt_optab;
1944 break;
1945 CASE_FLT_FN (BUILT_IN_EXP):
1946 errno_set = true; builtin_optab = exp_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP10):
1948 CASE_FLT_FN (BUILT_IN_POW10):
1949 errno_set = true; builtin_optab = exp10_optab; break;
1950 CASE_FLT_FN (BUILT_IN_EXP2):
1951 errno_set = true; builtin_optab = exp2_optab; break;
1952 CASE_FLT_FN (BUILT_IN_EXPM1):
1953 errno_set = true; builtin_optab = expm1_optab; break;
1954 CASE_FLT_FN (BUILT_IN_LOGB):
1955 errno_set = true; builtin_optab = logb_optab; break;
1956 CASE_FLT_FN (BUILT_IN_LOG):
1957 errno_set = true; builtin_optab = log_optab; break;
1958 CASE_FLT_FN (BUILT_IN_LOG10):
1959 errno_set = true; builtin_optab = log10_optab; break;
1960 CASE_FLT_FN (BUILT_IN_LOG2):
1961 errno_set = true; builtin_optab = log2_optab; break;
1962 CASE_FLT_FN (BUILT_IN_LOG1P):
1963 errno_set = true; builtin_optab = log1p_optab; break;
1964 CASE_FLT_FN (BUILT_IN_ASIN):
1965 builtin_optab = asin_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ACOS):
1967 builtin_optab = acos_optab; break;
1968 CASE_FLT_FN (BUILT_IN_TAN):
1969 builtin_optab = tan_optab; break;
1970 CASE_FLT_FN (BUILT_IN_ATAN):
1971 builtin_optab = atan_optab; break;
1972 CASE_FLT_FN (BUILT_IN_FLOOR):
1973 builtin_optab = floor_optab; break;
1974 CASE_FLT_FN (BUILT_IN_CEIL):
1975 builtin_optab = ceil_optab; break;
1976 CASE_FLT_FN (BUILT_IN_TRUNC):
1977 builtin_optab = btrunc_optab; break;
1978 CASE_FLT_FN (BUILT_IN_ROUND):
1979 builtin_optab = round_optab; break;
1980 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1981 builtin_optab = nearbyint_optab;
1982 if (flag_trapping_math)
1983 break;
1984 /* Else fallthrough and expand as rint. */
1985 CASE_FLT_FN (BUILT_IN_RINT):
1986 builtin_optab = rint_optab; break;
1987 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1988 builtin_optab = significand_optab; break;
1989 default:
1990 gcc_unreachable ();
1993 /* Make a suitable register to place result in. */
1994 mode = TYPE_MODE (TREE_TYPE (exp));
1996 if (! flag_errno_math || ! HONOR_NANS (mode))
1997 errno_set = false;
1999 /* Before working hard, check whether the instruction is available. */
2000 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2002 target = gen_reg_rtx (mode);
2004 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2005 need to expand the argument again. This way, we will not perform
2006 side-effects more the once. */
2007 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2009 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2011 start_sequence ();
2013 /* Compute into TARGET.
2014 Set TARGET to wherever the result comes back. */
2015 target = expand_unop (mode, builtin_optab, op0, target, 0);
2017 if (target != 0)
2019 if (errno_set)
2020 expand_errno_check (exp, target);
2022 /* Output the entire sequence. */
2023 insns = get_insns ();
2024 end_sequence ();
2025 emit_insn (insns);
2026 return target;
2029 /* If we were unable to expand via the builtin, stop the sequence
2030 (without outputting the insns) and call to the library function
2031 with the stabilized argument list. */
2032 end_sequence ();
2035 before_call = get_last_insn ();
2037 return expand_call (exp, target, target == const0_rtx);
2040 /* Expand a call to the builtin binary math functions (pow and atan2).
2041 Return NULL_RTX if a normal call should be emitted rather than expanding the
2042 function in-line. EXP is the expression that is a call to the builtin
2043 function; if convenient, the result should be placed in TARGET.
2044 SUBTARGET may be used as the target for computing one of EXP's
2045 operands. */
2047 static rtx
2048 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2050 optab builtin_optab;
2051 rtx op0, op1, insns;
2052 int op1_type = REAL_TYPE;
2053 tree fndecl = get_callee_fndecl (exp);
2054 tree arg0, arg1;
2055 enum machine_mode mode;
2056 bool errno_set = true;
2058 switch (DECL_FUNCTION_CODE (fndecl))
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 CASE_FLT_FN (BUILT_IN_LDEXP):
2063 op1_type = INTEGER_TYPE;
2064 default:
2065 break;
2068 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2069 return NULL_RTX;
2071 arg0 = CALL_EXPR_ARG (exp, 0);
2072 arg1 = CALL_EXPR_ARG (exp, 1);
2074 switch (DECL_FUNCTION_CODE (fndecl))
2076 CASE_FLT_FN (BUILT_IN_POW):
2077 builtin_optab = pow_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ATAN2):
2079 builtin_optab = atan2_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SCALB):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 builtin_optab = scalb_optab; break;
2084 CASE_FLT_FN (BUILT_IN_SCALBN):
2085 CASE_FLT_FN (BUILT_IN_SCALBLN):
2086 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2087 return 0;
2088 /* Fall through... */
2089 CASE_FLT_FN (BUILT_IN_LDEXP):
2090 builtin_optab = ldexp_optab; break;
2091 CASE_FLT_FN (BUILT_IN_FMOD):
2092 builtin_optab = fmod_optab; break;
2093 CASE_FLT_FN (BUILT_IN_REMAINDER):
2094 CASE_FLT_FN (BUILT_IN_DREM):
2095 builtin_optab = remainder_optab; break;
2096 default:
2097 gcc_unreachable ();
2100 /* Make a suitable register to place result in. */
2101 mode = TYPE_MODE (TREE_TYPE (exp));
2103 /* Before working hard, check whether the instruction is available. */
2104 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2105 return NULL_RTX;
2107 target = gen_reg_rtx (mode);
2109 if (! flag_errno_math || ! HONOR_NANS (mode))
2110 errno_set = false;
2112 /* Always stabilize the argument list. */
2113 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2114 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2116 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2117 op1 = expand_normal (arg1);
2119 start_sequence ();
2121 /* Compute into TARGET.
2122 Set TARGET to wherever the result comes back. */
2123 target = expand_binop (mode, builtin_optab, op0, op1,
2124 target, 0, OPTAB_DIRECT);
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2129 if (target == 0)
2131 end_sequence ();
2132 return expand_call (exp, target, target == const0_rtx);
2135 if (errno_set)
2136 expand_errno_check (exp, target);
2138 /* Output the entire sequence. */
2139 insns = get_insns ();
2140 end_sequence ();
2141 emit_insn (insns);
2143 return target;
2146 /* Expand a call to the builtin sin and cos math functions.
2147 Return NULL_RTX if a normal call should be emitted rather than expanding the
2148 function in-line. EXP is the expression that is a call to the builtin
2149 function; if convenient, the result should be placed in TARGET.
2150 SUBTARGET may be used as the target for computing one of EXP's
2151 operands. */
2153 static rtx
2154 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2156 optab builtin_optab;
2157 rtx op0, insns;
2158 tree fndecl = get_callee_fndecl (exp);
2159 enum machine_mode mode;
2160 tree arg;
2162 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2163 return NULL_RTX;
2165 arg = CALL_EXPR_ARG (exp, 0);
2167 switch (DECL_FUNCTION_CODE (fndecl))
2169 CASE_FLT_FN (BUILT_IN_SIN):
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 builtin_optab = sincos_optab; break;
2172 default:
2173 gcc_unreachable ();
2176 /* Make a suitable register to place result in. */
2177 mode = TYPE_MODE (TREE_TYPE (exp));
2179 /* Check if sincos insn is available, otherwise fallback
2180 to sin or cos insn. */
2181 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2182 switch (DECL_FUNCTION_CODE (fndecl))
2184 CASE_FLT_FN (BUILT_IN_SIN):
2185 builtin_optab = sin_optab; break;
2186 CASE_FLT_FN (BUILT_IN_COS):
2187 builtin_optab = cos_optab; break;
2188 default:
2189 gcc_unreachable ();
2192 /* Before working hard, check whether the instruction is available. */
2193 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2195 target = gen_reg_rtx (mode);
2197 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2198 need to expand the argument again. This way, we will not perform
2199 side-effects more the once. */
2200 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2202 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2204 start_sequence ();
2206 /* Compute into TARGET.
2207 Set TARGET to wherever the result comes back. */
2208 if (builtin_optab == sincos_optab)
2210 int result;
2212 switch (DECL_FUNCTION_CODE (fndecl))
2214 CASE_FLT_FN (BUILT_IN_SIN):
2215 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2216 break;
2217 CASE_FLT_FN (BUILT_IN_COS):
2218 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2219 break;
2220 default:
2221 gcc_unreachable ();
2223 gcc_assert (result);
2225 else
2227 target = expand_unop (mode, builtin_optab, op0, target, 0);
2230 if (target != 0)
2232 /* Output the entire sequence. */
2233 insns = get_insns ();
2234 end_sequence ();
2235 emit_insn (insns);
2236 return target;
2239 /* If we were unable to expand via the builtin, stop the sequence
2240 (without outputting the insns) and call to the library function
2241 with the stabilized argument list. */
2242 end_sequence ();
2245 target = expand_call (exp, target, target == const0_rtx);
2247 return target;
2250 /* Expand a call to one of the builtin math functions that operate on
2251 floating point argument and output an integer result (ilogb, isinf,
2252 isnan, etc).
2253 Return 0 if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's operands. */
2258 static rtx
2259 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2261 optab builtin_optab = 0;
2262 enum insn_code icode = CODE_FOR_nothing;
2263 rtx op0;
2264 tree fndecl = get_callee_fndecl (exp);
2265 enum machine_mode mode;
2266 bool errno_set = false;
2267 tree arg;
2268 location_t loc = EXPR_LOCATION (exp);
2270 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2271 return NULL_RTX;
2273 arg = CALL_EXPR_ARG (exp, 0);
2275 switch (DECL_FUNCTION_CODE (fndecl))
2277 CASE_FLT_FN (BUILT_IN_ILOGB):
2278 errno_set = true; builtin_optab = ilogb_optab; break;
2279 CASE_FLT_FN (BUILT_IN_ISINF):
2280 builtin_optab = isinf_optab; break;
2281 case BUILT_IN_ISNORMAL:
2282 case BUILT_IN_ISFINITE:
2283 CASE_FLT_FN (BUILT_IN_FINITE):
2284 /* These builtins have no optabs (yet). */
2285 break;
2286 default:
2287 gcc_unreachable ();
2290 /* There's no easy way to detect the case we need to set EDOM. */
2291 if (flag_errno_math && errno_set)
2292 return NULL_RTX;
2294 /* Optab mode depends on the mode of the input argument. */
2295 mode = TYPE_MODE (TREE_TYPE (arg));
2297 if (builtin_optab)
2298 icode = optab_handler (builtin_optab, mode)->insn_code;
2300 /* Before working hard, check whether the instruction is available. */
2301 if (icode != CODE_FOR_nothing)
2303 /* Make a suitable register to place result in. */
2304 if (!target
2305 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2306 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2308 gcc_assert (insn_data[icode].operand[0].predicate
2309 (target, GET_MODE (target)));
2311 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2312 need to expand the argument again. This way, we will not perform
2313 side-effects more the once. */
2314 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2316 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2318 if (mode != GET_MODE (op0))
2319 op0 = convert_to_mode (mode, op0, 0);
2321 /* Compute into TARGET.
2322 Set TARGET to wherever the result comes back. */
2323 emit_unop_insn (icode, target, op0, UNKNOWN);
2324 return target;
2327 /* If there is no optab, try generic code. */
2328 switch (DECL_FUNCTION_CODE (fndecl))
2330 tree result;
2332 CASE_FLT_FN (BUILT_IN_ISINF):
2334 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2335 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2336 tree const type = TREE_TYPE (arg);
2337 REAL_VALUE_TYPE r;
2338 char buf[128];
2340 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2341 real_from_string (&r, buf);
2342 result = build_call_expr (isgr_fn, 2,
2343 fold_build1_loc (loc, ABS_EXPR, type, arg),
2344 build_real (type, r));
2345 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2347 CASE_FLT_FN (BUILT_IN_FINITE):
2348 case BUILT_IN_ISFINITE:
2350 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2351 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2352 tree const type = TREE_TYPE (arg);
2353 REAL_VALUE_TYPE r;
2354 char buf[128];
2356 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2357 real_from_string (&r, buf);
2358 result = build_call_expr (isle_fn, 2,
2359 fold_build1_loc (loc, ABS_EXPR, type, arg),
2360 build_real (type, r));
2361 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2363 case BUILT_IN_ISNORMAL:
2365 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2366 islessequal(fabs(x),DBL_MAX). */
2367 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2368 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2369 tree const type = TREE_TYPE (arg);
2370 REAL_VALUE_TYPE rmax, rmin;
2371 char buf[128];
2373 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2374 real_from_string (&rmax, buf);
2375 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2376 real_from_string (&rmin, buf);
2377 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
2378 result = build_call_expr (isle_fn, 2, arg,
2379 build_real (type, rmax));
2380 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2381 build_call_expr (isge_fn, 2, arg,
2382 build_real (type, rmin)));
2383 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2385 default:
2386 break;
2389 target = expand_call (exp, target, target == const0_rtx);
2391 return target;
2394 /* Expand a call to the builtin sincos math function.
2395 Return NULL_RTX if a normal call should be emitted rather than expanding the
2396 function in-line. EXP is the expression that is a call to the builtin
2397 function. */
2399 static rtx
2400 expand_builtin_sincos (tree exp)
2402 rtx op0, op1, op2, target1, target2;
2403 enum machine_mode mode;
2404 tree arg, sinp, cosp;
2405 int result;
2406 location_t loc = EXPR_LOCATION (exp);
2408 if (!validate_arglist (exp, REAL_TYPE,
2409 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2410 return NULL_RTX;
2412 arg = CALL_EXPR_ARG (exp, 0);
2413 sinp = CALL_EXPR_ARG (exp, 1);
2414 cosp = CALL_EXPR_ARG (exp, 2);
2416 /* Make a suitable register to place result in. */
2417 mode = TYPE_MODE (TREE_TYPE (arg));
2419 /* Check if sincos insn is available, otherwise emit the call. */
2420 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2421 return NULL_RTX;
2423 target1 = gen_reg_rtx (mode);
2424 target2 = gen_reg_rtx (mode);
2426 op0 = expand_normal (arg);
2427 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2428 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2430 /* Compute into target1 and target2.
2431 Set TARGET to wherever the result comes back. */
2432 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2433 gcc_assert (result);
2435 /* Move target1 and target2 to the memory locations indicated
2436 by op1 and op2. */
2437 emit_move_insn (op1, target1);
2438 emit_move_insn (op2, target2);
2440 return const0_rtx;
2443 /* Expand a call to the internal cexpi builtin to the sincos math function.
2444 EXP is the expression that is a call to the builtin function; if convenient,
2445 the result should be placed in TARGET. SUBTARGET may be used as the target
2446 for computing one of EXP's operands. */
2448 static rtx
2449 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2451 tree fndecl = get_callee_fndecl (exp);
2452 tree arg, type;
2453 enum machine_mode mode;
2454 rtx op0, op1, op2;
2455 location_t loc = EXPR_LOCATION (exp);
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 type = TREE_TYPE (arg);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2464 /* Try expanding via a sincos optab, fall back to emitting a libcall
2465 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2466 is only generated from sincos, cexp or if we have either of them. */
2467 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2469 op1 = gen_reg_rtx (mode);
2470 op2 = gen_reg_rtx (mode);
2472 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2474 /* Compute into op1 and op2. */
2475 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2477 else if (TARGET_HAS_SINCOS)
2479 tree call, fn = NULL_TREE;
2480 tree top1, top2;
2481 rtx op1a, op2a;
2483 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2484 fn = built_in_decls[BUILT_IN_SINCOSF];
2485 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2486 fn = built_in_decls[BUILT_IN_SINCOS];
2487 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2488 fn = built_in_decls[BUILT_IN_SINCOSL];
2489 else
2490 gcc_unreachable ();
2492 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2493 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2494 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2495 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2496 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2497 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2499 /* Make sure not to fold the sincos call again. */
2500 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2501 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2502 call, 3, arg, top1, top2));
2504 else
2506 tree call, fn = NULL_TREE, narg;
2507 tree ctype = build_complex_type (type);
2509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2510 fn = built_in_decls[BUILT_IN_CEXPF];
2511 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2512 fn = built_in_decls[BUILT_IN_CEXP];
2513 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2514 fn = built_in_decls[BUILT_IN_CEXPL];
2515 else
2516 gcc_unreachable ();
2518 /* If we don't have a decl for cexp create one. This is the
2519 friendliest fallback if the user calls __builtin_cexpi
2520 without full target C99 function support. */
2521 if (fn == NULL_TREE)
2523 tree fntype;
2524 const char *name = NULL;
2526 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2527 name = "cexpf";
2528 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2529 name = "cexp";
2530 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2531 name = "cexpl";
2533 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2534 fn = build_fn_decl (name, fntype);
2537 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2538 build_real (type, dconst0), arg);
2540 /* Make sure not to fold the cexp call again. */
2541 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2542 return expand_expr (build_call_nary (ctype, call, 1, narg),
2543 target, VOIDmode, EXPAND_NORMAL);
2546 /* Now build the proper return type. */
2547 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2548 make_tree (TREE_TYPE (arg), op2),
2549 make_tree (TREE_TYPE (arg), op1)),
2550 target, VOIDmode, EXPAND_NORMAL);
2553 /* Expand a call to one of the builtin rounding functions gcc defines
2554 as an extension (lfloor and lceil). As these are gcc extensions we
2555 do not need to worry about setting errno to EDOM.
2556 If expanding via optab fails, lower expression to (int)(floor(x)).
2557 EXP is the expression that is a call to the builtin function;
2558 if convenient, the result should be placed in TARGET. */
2560 static rtx
2561 expand_builtin_int_roundingfn (tree exp, rtx target)
2563 convert_optab builtin_optab;
2564 rtx op0, insns, tmp;
2565 tree fndecl = get_callee_fndecl (exp);
2566 enum built_in_function fallback_fn;
2567 tree fallback_fndecl;
2568 enum machine_mode mode;
2569 tree arg;
2571 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2572 gcc_unreachable ();
2574 arg = CALL_EXPR_ARG (exp, 0);
2576 switch (DECL_FUNCTION_CODE (fndecl))
2578 CASE_FLT_FN (BUILT_IN_LCEIL):
2579 CASE_FLT_FN (BUILT_IN_LLCEIL):
2580 builtin_optab = lceil_optab;
2581 fallback_fn = BUILT_IN_CEIL;
2582 break;
2584 CASE_FLT_FN (BUILT_IN_LFLOOR):
2585 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2586 builtin_optab = lfloor_optab;
2587 fallback_fn = BUILT_IN_FLOOR;
2588 break;
2590 default:
2591 gcc_unreachable ();
2594 /* Make a suitable register to place result in. */
2595 mode = TYPE_MODE (TREE_TYPE (exp));
2597 target = gen_reg_rtx (mode);
2599 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2600 need to expand the argument again. This way, we will not perform
2601 side-effects more the once. */
2602 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2604 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2606 start_sequence ();
2608 /* Compute into TARGET. */
2609 if (expand_sfix_optab (target, op0, builtin_optab))
2611 /* Output the entire sequence. */
2612 insns = get_insns ();
2613 end_sequence ();
2614 emit_insn (insns);
2615 return target;
2618 /* If we were unable to expand via the builtin, stop the sequence
2619 (without outputting the insns). */
2620 end_sequence ();
2622 /* Fall back to floating point rounding optab. */
2623 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2625 /* For non-C99 targets we may end up without a fallback fndecl here
2626 if the user called __builtin_lfloor directly. In this case emit
2627 a call to the floor/ceil variants nevertheless. This should result
2628 in the best user experience for not full C99 targets. */
2629 if (fallback_fndecl == NULL_TREE)
2631 tree fntype;
2632 const char *name = NULL;
2634 switch (DECL_FUNCTION_CODE (fndecl))
2636 case BUILT_IN_LCEIL:
2637 case BUILT_IN_LLCEIL:
2638 name = "ceil";
2639 break;
2640 case BUILT_IN_LCEILF:
2641 case BUILT_IN_LLCEILF:
2642 name = "ceilf";
2643 break;
2644 case BUILT_IN_LCEILL:
2645 case BUILT_IN_LLCEILL:
2646 name = "ceill";
2647 break;
2648 case BUILT_IN_LFLOOR:
2649 case BUILT_IN_LLFLOOR:
2650 name = "floor";
2651 break;
2652 case BUILT_IN_LFLOORF:
2653 case BUILT_IN_LLFLOORF:
2654 name = "floorf";
2655 break;
2656 case BUILT_IN_LFLOORL:
2657 case BUILT_IN_LLFLOORL:
2658 name = "floorl";
2659 break;
2660 default:
2661 gcc_unreachable ();
2664 fntype = build_function_type_list (TREE_TYPE (arg),
2665 TREE_TYPE (arg), NULL_TREE);
2666 fallback_fndecl = build_fn_decl (name, fntype);
2669 exp = build_call_expr (fallback_fndecl, 1, arg);
2671 tmp = expand_normal (exp);
2673 /* Truncate the result of floating point optab to integer
2674 via expand_fix (). */
2675 target = gen_reg_rtx (mode);
2676 expand_fix (target, tmp, 0);
2678 return target;
2681 /* Expand a call to one of the builtin math functions doing integer
2682 conversion (lrint).
2683 Return 0 if a normal call should be emitted rather than expanding the
2684 function in-line. EXP is the expression that is a call to the builtin
2685 function; if convenient, the result should be placed in TARGET. */
2687 static rtx
2688 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2690 convert_optab builtin_optab;
2691 rtx op0, insns;
2692 tree fndecl = get_callee_fndecl (exp);
2693 tree arg;
2694 enum machine_mode mode;
2696 /* There's no easy way to detect the case we need to set EDOM. */
2697 if (flag_errno_math)
2698 return NULL_RTX;
2700 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2701 gcc_unreachable ();
2703 arg = CALL_EXPR_ARG (exp, 0);
2705 switch (DECL_FUNCTION_CODE (fndecl))
2707 CASE_FLT_FN (BUILT_IN_LRINT):
2708 CASE_FLT_FN (BUILT_IN_LLRINT):
2709 builtin_optab = lrint_optab; break;
2710 CASE_FLT_FN (BUILT_IN_LROUND):
2711 CASE_FLT_FN (BUILT_IN_LLROUND):
2712 builtin_optab = lround_optab; break;
2713 default:
2714 gcc_unreachable ();
2717 /* Make a suitable register to place result in. */
2718 mode = TYPE_MODE (TREE_TYPE (exp));
2720 target = gen_reg_rtx (mode);
2722 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2723 need to expand the argument again. This way, we will not perform
2724 side-effects more the once. */
2725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2727 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2729 start_sequence ();
2731 if (expand_sfix_optab (target, op0, builtin_optab))
2733 /* Output the entire sequence. */
2734 insns = get_insns ();
2735 end_sequence ();
2736 emit_insn (insns);
2737 return target;
2740 /* If we were unable to expand via the builtin, stop the sequence
2741 (without outputting the insns) and call to the library function
2742 with the stabilized argument list. */
2743 end_sequence ();
2745 target = expand_call (exp, target, target == const0_rtx);
2747 return target;
2750 /* To evaluate powi(x,n), the floating point value x raised to the
2751 constant integer exponent n, we use a hybrid algorithm that
2752 combines the "window method" with look-up tables. For an
2753 introduction to exponentiation algorithms and "addition chains",
2754 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2755 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2756 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2757 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2759 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2760 multiplications to inline before calling the system library's pow
2761 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2762 so this default never requires calling pow, powf or powl. */
2764 #ifndef POWI_MAX_MULTS
2765 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2766 #endif
2768 /* The size of the "optimal power tree" lookup table. All
2769 exponents less than this value are simply looked up in the
2770 powi_table below. This threshold is also used to size the
2771 cache of pseudo registers that hold intermediate results. */
2772 #define POWI_TABLE_SIZE 256
2774 /* The size, in bits of the window, used in the "window method"
2775 exponentiation algorithm. This is equivalent to a radix of
2776 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2777 #define POWI_WINDOW_SIZE 3
2779 /* The following table is an efficient representation of an
2780 "optimal power tree". For each value, i, the corresponding
2781 value, j, in the table states than an optimal evaluation
2782 sequence for calculating pow(x,i) can be found by evaluating
2783 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2784 100 integers is given in Knuth's "Seminumerical algorithms". */
2786 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2788 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2789 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2790 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2791 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2792 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2793 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2794 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2795 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2796 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2797 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2798 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2799 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2800 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2801 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2802 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2803 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2804 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2805 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2806 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2807 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2808 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2809 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2810 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2811 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2812 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2813 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2814 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2815 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2816 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2817 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2818 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2819 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2823 /* Return the number of multiplications required to calculate
2824 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2825 subroutine of powi_cost. CACHE is an array indicating
2826 which exponents have already been calculated. */
2828 static int
2829 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2831 /* If we've already calculated this exponent, then this evaluation
2832 doesn't require any additional multiplications. */
2833 if (cache[n])
2834 return 0;
2836 cache[n] = true;
2837 return powi_lookup_cost (n - powi_table[n], cache)
2838 + powi_lookup_cost (powi_table[n], cache) + 1;
2841 /* Return the number of multiplications required to calculate
2842 powi(x,n) for an arbitrary x, given the exponent N. This
2843 function needs to be kept in sync with expand_powi below. */
2845 static int
2846 powi_cost (HOST_WIDE_INT n)
2848 bool cache[POWI_TABLE_SIZE];
2849 unsigned HOST_WIDE_INT digit;
2850 unsigned HOST_WIDE_INT val;
2851 int result;
2853 if (n == 0)
2854 return 0;
2856 /* Ignore the reciprocal when calculating the cost. */
2857 val = (n < 0) ? -n : n;
2859 /* Initialize the exponent cache. */
2860 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2861 cache[1] = true;
2863 result = 0;
2865 while (val >= POWI_TABLE_SIZE)
2867 if (val & 1)
2869 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2870 result += powi_lookup_cost (digit, cache)
2871 + POWI_WINDOW_SIZE + 1;
2872 val >>= POWI_WINDOW_SIZE;
2874 else
2876 val >>= 1;
2877 result++;
2881 return result + powi_lookup_cost (val, cache);
2884 /* Recursive subroutine of expand_powi. This function takes the array,
2885 CACHE, of already calculated exponents and an exponent N and returns
2886 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2888 static rtx
2889 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2891 unsigned HOST_WIDE_INT digit;
2892 rtx target, result;
2893 rtx op0, op1;
2895 if (n < POWI_TABLE_SIZE)
2897 if (cache[n])
2898 return cache[n];
2900 target = gen_reg_rtx (mode);
2901 cache[n] = target;
2903 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2904 op1 = expand_powi_1 (mode, powi_table[n], cache);
2906 else if (n & 1)
2908 target = gen_reg_rtx (mode);
2909 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2910 op0 = expand_powi_1 (mode, n - digit, cache);
2911 op1 = expand_powi_1 (mode, digit, cache);
2913 else
2915 target = gen_reg_rtx (mode);
2916 op0 = expand_powi_1 (mode, n >> 1, cache);
2917 op1 = op0;
2920 result = expand_mult (mode, op0, op1, target, 0);
2921 if (result != target)
2922 emit_move_insn (target, result);
2923 return target;
2926 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2927 floating point operand in mode MODE, and N is the exponent. This
2928 function needs to be kept in sync with powi_cost above. */
2930 static rtx
2931 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2933 unsigned HOST_WIDE_INT val;
2934 rtx cache[POWI_TABLE_SIZE];
2935 rtx result;
2937 if (n == 0)
2938 return CONST1_RTX (mode);
2940 val = (n < 0) ? -n : n;
2942 memset (cache, 0, sizeof (cache));
2943 cache[1] = x;
2945 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2947 /* If the original exponent was negative, reciprocate the result. */
2948 if (n < 0)
2949 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2950 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2952 return result;
2955 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2956 a normal call should be emitted rather than expanding the function
2957 in-line. EXP is the expression that is a call to the builtin
2958 function; if convenient, the result should be placed in TARGET. */
2960 static rtx
2961 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2963 tree arg0, arg1;
2964 tree fn, narg0;
2965 tree type = TREE_TYPE (exp);
2966 REAL_VALUE_TYPE cint, c, c2;
2967 HOST_WIDE_INT n;
2968 rtx op, op2;
2969 enum machine_mode mode = TYPE_MODE (type);
2971 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2972 return NULL_RTX;
2974 arg0 = CALL_EXPR_ARG (exp, 0);
2975 arg1 = CALL_EXPR_ARG (exp, 1);
2977 if (TREE_CODE (arg1) != REAL_CST
2978 || TREE_OVERFLOW (arg1))
2979 return expand_builtin_mathfn_2 (exp, target, subtarget);
2981 /* Handle constant exponents. */
2983 /* For integer valued exponents we can expand to an optimal multiplication
2984 sequence using expand_powi. */
2985 c = TREE_REAL_CST (arg1);
2986 n = real_to_integer (&c);
2987 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2988 if (real_identical (&c, &cint)
2989 && ((n >= -1 && n <= 2)
2990 || (flag_unsafe_math_optimizations
2991 && optimize_insn_for_speed_p ()
2992 && powi_cost (n) <= POWI_MAX_MULTS)))
2994 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2995 if (n != 1)
2997 op = force_reg (mode, op);
2998 op = expand_powi (op, mode, n);
3000 return op;
3003 narg0 = builtin_save_expr (arg0);
3005 /* If the exponent is not integer valued, check if it is half of an integer.
3006 In this case we can expand to sqrt (x) * x**(n/2). */
3007 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3008 if (fn != NULL_TREE)
3010 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3011 n = real_to_integer (&c2);
3012 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3013 if (real_identical (&c2, &cint)
3014 && ((flag_unsafe_math_optimizations
3015 && optimize_insn_for_speed_p ()
3016 && powi_cost (n/2) <= POWI_MAX_MULTS)
3017 || n == 1))
3019 tree call_expr = build_call_expr (fn, 1, narg0);
3020 /* Use expand_expr in case the newly built call expression
3021 was folded to a non-call. */
3022 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3023 if (n != 1)
3025 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3026 op2 = force_reg (mode, op2);
3027 op2 = expand_powi (op2, mode, abs (n / 2));
3028 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3029 0, OPTAB_LIB_WIDEN);
3030 /* If the original exponent was negative, reciprocate the
3031 result. */
3032 if (n < 0)
3033 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3034 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3036 return op;
3040 /* Try if the exponent is a third of an integer. In this case
3041 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3042 different from pow (x, 1./3.) due to rounding and behavior
3043 with negative x we need to constrain this transformation to
3044 unsafe math and positive x or finite math. */
3045 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3046 if (fn != NULL_TREE
3047 && flag_unsafe_math_optimizations
3048 && (tree_expr_nonnegative_p (arg0)
3049 || !HONOR_NANS (mode)))
3051 REAL_VALUE_TYPE dconst3;
3052 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3053 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3054 real_round (&c2, mode, &c2);
3055 n = real_to_integer (&c2);
3056 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3057 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3058 real_convert (&c2, mode, &c2);
3059 if (real_identical (&c2, &c)
3060 && ((optimize_insn_for_speed_p ()
3061 && powi_cost (n/3) <= POWI_MAX_MULTS)
3062 || n == 1))
3064 tree call_expr = build_call_expr (fn, 1,narg0);
3065 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3066 if (abs (n) % 3 == 2)
3067 op = expand_simple_binop (mode, MULT, op, op, op,
3068 0, OPTAB_LIB_WIDEN);
3069 if (n != 1)
3071 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3072 op2 = force_reg (mode, op2);
3073 op2 = expand_powi (op2, mode, abs (n / 3));
3074 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3075 0, OPTAB_LIB_WIDEN);
3076 /* If the original exponent was negative, reciprocate the
3077 result. */
3078 if (n < 0)
3079 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3080 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3082 return op;
3086 /* Fall back to optab expansion. */
3087 return expand_builtin_mathfn_2 (exp, target, subtarget);
3090 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3091 a normal call should be emitted rather than expanding the function
3092 in-line. EXP is the expression that is a call to the builtin
3093 function; if convenient, the result should be placed in TARGET. */
3095 static rtx
3096 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3098 tree arg0, arg1;
3099 rtx op0, op1;
3100 enum machine_mode mode;
3101 enum machine_mode mode2;
3103 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3104 return NULL_RTX;
3106 arg0 = CALL_EXPR_ARG (exp, 0);
3107 arg1 = CALL_EXPR_ARG (exp, 1);
3108 mode = TYPE_MODE (TREE_TYPE (exp));
3110 /* Handle constant power. */
3112 if (TREE_CODE (arg1) == INTEGER_CST
3113 && !TREE_OVERFLOW (arg1))
3115 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3117 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3118 Otherwise, check the number of multiplications required. */
3119 if ((TREE_INT_CST_HIGH (arg1) == 0
3120 || TREE_INT_CST_HIGH (arg1) == -1)
3121 && ((n >= -1 && n <= 2)
3122 || (optimize_insn_for_speed_p ()
3123 && powi_cost (n) <= POWI_MAX_MULTS)))
3125 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3126 op0 = force_reg (mode, op0);
3127 return expand_powi (op0, mode, n);
3131 /* Emit a libcall to libgcc. */
3133 /* Mode of the 2nd argument must match that of an int. */
3134 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3136 if (target == NULL_RTX)
3137 target = gen_reg_rtx (mode);
3139 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3140 if (GET_MODE (op0) != mode)
3141 op0 = convert_to_mode (mode, op0, 0);
3142 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3143 if (GET_MODE (op1) != mode2)
3144 op1 = convert_to_mode (mode2, op1, 0);
3146 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3147 target, LCT_CONST, mode, 2,
3148 op0, mode, op1, mode2);
3150 return target;
3153 /* Expand expression EXP which is a call to the strlen builtin. Return
3154 NULL_RTX if we failed the caller should emit a normal call, otherwise
3155 try to get the result in TARGET, if convenient. */
3157 static rtx
3158 expand_builtin_strlen (tree exp, rtx target,
3159 enum machine_mode target_mode)
3161 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3162 return NULL_RTX;
3163 else
3165 rtx pat;
3166 tree len;
3167 tree src = CALL_EXPR_ARG (exp, 0);
3168 rtx result, src_reg, char_rtx, before_strlen;
3169 enum machine_mode insn_mode = target_mode, char_mode;
3170 enum insn_code icode = CODE_FOR_nothing;
3171 int align;
3173 /* If the length can be computed at compile-time, return it. */
3174 len = c_strlen (src, 0);
3175 if (len)
3176 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3178 /* If the length can be computed at compile-time and is constant
3179 integer, but there are side-effects in src, evaluate
3180 src for side-effects, then return len.
3181 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3182 can be optimized into: i++; x = 3; */
3183 len = c_strlen (src, 1);
3184 if (len && TREE_CODE (len) == INTEGER_CST)
3186 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3187 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3190 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3192 /* If SRC is not a pointer type, don't do this operation inline. */
3193 if (align == 0)
3194 return NULL_RTX;
3196 /* Bail out if we can't compute strlen in the right mode. */
3197 while (insn_mode != VOIDmode)
3199 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3200 if (icode != CODE_FOR_nothing)
3201 break;
3203 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3205 if (insn_mode == VOIDmode)
3206 return NULL_RTX;
3208 /* Make a place to write the result of the instruction. */
3209 result = target;
3210 if (! (result != 0
3211 && REG_P (result)
3212 && GET_MODE (result) == insn_mode
3213 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3214 result = gen_reg_rtx (insn_mode);
3216 /* Make a place to hold the source address. We will not expand
3217 the actual source until we are sure that the expansion will
3218 not fail -- there are trees that cannot be expanded twice. */
3219 src_reg = gen_reg_rtx (Pmode);
3221 /* Mark the beginning of the strlen sequence so we can emit the
3222 source operand later. */
3223 before_strlen = get_last_insn ();
3225 char_rtx = const0_rtx;
3226 char_mode = insn_data[(int) icode].operand[2].mode;
3227 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3228 char_mode))
3229 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3231 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3232 char_rtx, GEN_INT (align));
3233 if (! pat)
3234 return NULL_RTX;
3235 emit_insn (pat);
3237 /* Now that we are assured of success, expand the source. */
3238 start_sequence ();
3239 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3240 if (pat != src_reg)
3241 emit_move_insn (src_reg, pat);
3242 pat = get_insns ();
3243 end_sequence ();
3245 if (before_strlen)
3246 emit_insn_after (pat, before_strlen);
3247 else
3248 emit_insn_before (pat, get_insns ());
3250 /* Return the value in the proper mode for this function. */
3251 if (GET_MODE (result) == target_mode)
3252 target = result;
3253 else if (target != 0)
3254 convert_move (target, result, 0);
3255 else
3256 target = convert_to_mode (target_mode, result, 0);
3258 return target;
3262 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3263 caller should emit a normal call, otherwise try to get the result
3264 in TARGET, if convenient (and in mode MODE if that's convenient). */
3266 static rtx
3267 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3269 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3271 tree type = TREE_TYPE (exp);
3272 tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
3273 CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3275 if (result)
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3278 return NULL_RTX;
3281 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3282 caller should emit a normal call, otherwise try to get the result
3283 in TARGET, if convenient (and in mode MODE if that's convenient). */
3285 static rtx
3286 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3288 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3290 tree type = TREE_TYPE (exp);
3291 tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
3292 CALL_EXPR_ARG (exp, 0),
3293 CALL_EXPR_ARG (exp, 1), type);
3294 if (result)
3295 return expand_expr (result, target, mode, EXPAND_NORMAL);
3297 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3299 return NULL_RTX;
3302 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3303 caller should emit a normal call, otherwise try to get the result
3304 in TARGET, if convenient (and in mode MODE if that's convenient). */
3306 static rtx
3307 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3309 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3311 tree type = TREE_TYPE (exp);
3312 tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
3313 CALL_EXPR_ARG (exp, 0),
3314 CALL_EXPR_ARG (exp, 1), type);
3315 if (result)
3316 return expand_expr (result, target, mode, EXPAND_NORMAL);
3318 return NULL_RTX;
3321 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3322 caller should emit a normal call, otherwise try to get the result
3323 in TARGET, if convenient (and in mode MODE if that's convenient). */
3325 static rtx
3326 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3328 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3330 tree type = TREE_TYPE (exp);
3331 tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
3332 CALL_EXPR_ARG (exp, 0),
3333 CALL_EXPR_ARG (exp, 1), type);
3334 if (result)
3335 return expand_expr (result, target, mode, EXPAND_NORMAL);
3337 return NULL_RTX;
3340 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3341 bytes from constant string DATA + OFFSET and return it as target
3342 constant. */
3344 static rtx
3345 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3346 enum machine_mode mode)
3348 const char *str = (const char *) data;
3350 gcc_assert (offset >= 0
3351 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3352 <= strlen (str) + 1));
3354 return c_readstr (str + offset, mode);
3357 /* Expand a call EXP to the memcpy builtin.
3358 Return NULL_RTX if we failed, the caller should emit a normal call,
3359 otherwise try to get the result in TARGET, if convenient (and in
3360 mode MODE if that's convenient). */
3362 static rtx
3363 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3365 tree fndecl = get_callee_fndecl (exp);
3367 if (!validate_arglist (exp,
3368 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3369 return NULL_RTX;
3370 else
3372 tree dest = CALL_EXPR_ARG (exp, 0);
3373 tree src = CALL_EXPR_ARG (exp, 1);
3374 tree len = CALL_EXPR_ARG (exp, 2);
3375 const char *src_str;
3376 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3377 unsigned int dest_align
3378 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3379 rtx dest_mem, src_mem, dest_addr, len_rtx;
3380 tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
3381 dest, src, len,
3382 TREE_TYPE (TREE_TYPE (fndecl)),
3383 false, /*endp=*/0);
3384 HOST_WIDE_INT expected_size = -1;
3385 unsigned int expected_align = 0;
3386 tree_ann_common_t ann;
3388 if (result)
3390 while (TREE_CODE (result) == COMPOUND_EXPR)
3392 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3393 EXPAND_NORMAL);
3394 result = TREE_OPERAND (result, 1);
3396 return expand_expr (result, target, mode, EXPAND_NORMAL);
3399 /* If DEST is not a pointer type, call the normal function. */
3400 if (dest_align == 0)
3401 return NULL_RTX;
3403 /* If either SRC is not a pointer type, don't do this
3404 operation in-line. */
3405 if (src_align == 0)
3406 return NULL_RTX;
3408 ann = tree_common_ann (exp);
3409 if (ann)
3410 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3412 if (expected_align < dest_align)
3413 expected_align = dest_align;
3414 dest_mem = get_memory_rtx (dest, len);
3415 set_mem_align (dest_mem, dest_align);
3416 len_rtx = expand_normal (len);
3417 src_str = c_getstr (src);
3419 /* If SRC is a string constant and block move would be done
3420 by pieces, we can avoid loading the string from memory
3421 and only stored the computed constants. */
3422 if (src_str
3423 && CONST_INT_P (len_rtx)
3424 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3425 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3426 CONST_CAST (char *, src_str),
3427 dest_align, false))
3429 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3430 builtin_memcpy_read_str,
3431 CONST_CAST (char *, src_str),
3432 dest_align, false, 0);
3433 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3434 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3435 return dest_mem;
3438 src_mem = get_memory_rtx (src, len);
3439 set_mem_align (src_mem, src_align);
3441 /* Copy word part most expediently. */
3442 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3443 CALL_EXPR_TAILCALL (exp)
3444 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3445 expected_align, expected_size);
3447 if (dest_addr == 0)
3449 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3450 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3452 return dest_addr;
3456 /* Expand a call EXP to the mempcpy builtin.
3457 Return NULL_RTX if we failed; the caller should emit a normal call,
3458 otherwise try to get the result in TARGET, if convenient (and in
3459 mode MODE if that's convenient). If ENDP is 0 return the
3460 destination pointer, if ENDP is 1 return the end pointer ala
3461 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3462 stpcpy. */
3464 static rtx
3465 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3467 if (!validate_arglist (exp,
3468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3469 return NULL_RTX;
3470 else
3472 tree dest = CALL_EXPR_ARG (exp, 0);
3473 tree src = CALL_EXPR_ARG (exp, 1);
3474 tree len = CALL_EXPR_ARG (exp, 2);
3475 return expand_builtin_mempcpy_args (dest, src, len,
3476 TREE_TYPE (exp),
3477 target, mode, /*endp=*/ 1);
3481 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3482 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3483 so that this can also be called without constructing an actual CALL_EXPR.
3484 TYPE is the return type of the call. The other arguments and return value
3485 are the same as for expand_builtin_mempcpy. */
3487 static rtx
3488 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3489 rtx target, enum machine_mode mode, int endp)
3491 /* If return value is ignored, transform mempcpy into memcpy. */
3492 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3494 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3495 tree result = build_call_expr (fn, 3, dest, src, len);
3497 while (TREE_CODE (result) == COMPOUND_EXPR)
3499 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3500 EXPAND_NORMAL);
3501 result = TREE_OPERAND (result, 1);
3503 return expand_expr (result, target, mode, EXPAND_NORMAL);
3505 else
3507 const char *src_str;
3508 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3509 unsigned int dest_align
3510 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3511 rtx dest_mem, src_mem, len_rtx;
3512 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3513 dest, src, len, type, false, endp);
3515 if (result)
3517 while (TREE_CODE (result) == COMPOUND_EXPR)
3519 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3520 EXPAND_NORMAL);
3521 result = TREE_OPERAND (result, 1);
3523 return expand_expr (result, target, mode, EXPAND_NORMAL);
3526 /* If either SRC or DEST is not a pointer type, don't do this
3527 operation in-line. */
3528 if (dest_align == 0 || src_align == 0)
3529 return NULL_RTX;
3531 /* If LEN is not constant, call the normal function. */
3532 if (! host_integerp (len, 1))
3533 return NULL_RTX;
3535 len_rtx = expand_normal (len);
3536 src_str = c_getstr (src);
3538 /* If SRC is a string constant and block move would be done
3539 by pieces, we can avoid loading the string from memory
3540 and only stored the computed constants. */
3541 if (src_str
3542 && CONST_INT_P (len_rtx)
3543 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3544 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3545 CONST_CAST (char *, src_str),
3546 dest_align, false))
3548 dest_mem = get_memory_rtx (dest, len);
3549 set_mem_align (dest_mem, dest_align);
3550 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3551 builtin_memcpy_read_str,
3552 CONST_CAST (char *, src_str),
3553 dest_align, false, endp);
3554 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3555 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3556 return dest_mem;
3559 if (CONST_INT_P (len_rtx)
3560 && can_move_by_pieces (INTVAL (len_rtx),
3561 MIN (dest_align, src_align)))
3563 dest_mem = get_memory_rtx (dest, len);
3564 set_mem_align (dest_mem, dest_align);
3565 src_mem = get_memory_rtx (src, len);
3566 set_mem_align (src_mem, src_align);
3567 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3568 MIN (dest_align, src_align), endp);
3569 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3570 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3571 return dest_mem;
3574 return NULL_RTX;
3578 /* Expand expression EXP, which is a call to the memmove builtin. Return
3579 NULL_RTX if we failed; the caller should emit a normal call. */
3581 static rtx
3582 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3584 if (!validate_arglist (exp,
3585 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3586 return NULL_RTX;
3587 else
3589 tree dest = CALL_EXPR_ARG (exp, 0);
3590 tree src = CALL_EXPR_ARG (exp, 1);
3591 tree len = CALL_EXPR_ARG (exp, 2);
3592 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3593 target, mode, ignore);
3597 /* Helper function to do the actual work for expand_builtin_memmove. The
3598 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3599 so that this can also be called without constructing an actual CALL_EXPR.
3600 TYPE is the return type of the call. The other arguments and return value
3601 are the same as for expand_builtin_memmove. */
3603 static rtx
3604 expand_builtin_memmove_args (tree dest, tree src, tree len,
3605 tree type, rtx target, enum machine_mode mode,
3606 int ignore)
3608 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3609 dest, src, len, type, ignore, /*endp=*/3);
3611 if (result)
3613 STRIP_TYPE_NOPS (result);
3614 while (TREE_CODE (result) == COMPOUND_EXPR)
3616 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3617 EXPAND_NORMAL);
3618 result = TREE_OPERAND (result, 1);
3620 return expand_expr (result, target, mode, EXPAND_NORMAL);
3623 /* Otherwise, call the normal function. */
3624 return NULL_RTX;
3627 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3628 NULL_RTX if we failed the caller should emit a normal call. */
3630 static rtx
3631 expand_builtin_bcopy (tree exp, int ignore)
3633 tree type = TREE_TYPE (exp);
3634 tree src, dest, size;
3635 location_t loc = EXPR_LOCATION (exp);
3637 if (!validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3641 src = CALL_EXPR_ARG (exp, 0);
3642 dest = CALL_EXPR_ARG (exp, 1);
3643 size = CALL_EXPR_ARG (exp, 2);
3645 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3646 This is done this way so that if it isn't expanded inline, we fall
3647 back to calling bcopy instead of memmove. */
3648 return expand_builtin_memmove_args (dest, src,
3649 fold_convert_loc (loc, sizetype, size),
3650 type, const0_rtx, VOIDmode,
3651 ignore);
3654 #ifndef HAVE_movstr
3655 # define HAVE_movstr 0
3656 # define CODE_FOR_movstr CODE_FOR_nothing
3657 #endif
3659 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3660 we failed, the caller should emit a normal call, otherwise try to
3661 get the result in TARGET, if convenient. If ENDP is 0 return the
3662 destination pointer, if ENDP is 1 return the end pointer ala
3663 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3664 stpcpy. */
3666 static rtx
3667 expand_movstr (tree dest, tree src, rtx target, int endp)
3669 rtx end;
3670 rtx dest_mem;
3671 rtx src_mem;
3672 rtx insn;
3673 const struct insn_data * data;
3675 if (!HAVE_movstr)
3676 return NULL_RTX;
3678 dest_mem = get_memory_rtx (dest, NULL);
3679 src_mem = get_memory_rtx (src, NULL);
3680 if (!endp)
3682 target = force_reg (Pmode, XEXP (dest_mem, 0));
3683 dest_mem = replace_equiv_address (dest_mem, target);
3684 end = gen_reg_rtx (Pmode);
3686 else
3688 if (target == 0 || target == const0_rtx)
3690 end = gen_reg_rtx (Pmode);
3691 if (target == 0)
3692 target = end;
3694 else
3695 end = target;
3698 data = insn_data + CODE_FOR_movstr;
3700 if (data->operand[0].mode != VOIDmode)
3701 end = gen_lowpart (data->operand[0].mode, end);
3703 insn = data->genfun (end, dest_mem, src_mem);
3705 gcc_assert (insn);
3707 emit_insn (insn);
3709 /* movstr is supposed to set end to the address of the NUL
3710 terminator. If the caller requested a mempcpy-like return value,
3711 adjust it. */
3712 if (endp == 1 && target != const0_rtx)
3714 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3715 emit_move_insn (target, force_operand (tem, NULL_RTX));
3718 return target;
3721 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3722 NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3726 static rtx
3727 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3729 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3731 tree dest = CALL_EXPR_ARG (exp, 0);
3732 tree src = CALL_EXPR_ARG (exp, 1);
3733 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3735 return NULL_RTX;
3738 /* Helper function to do the actual work for expand_builtin_strcpy. The
3739 arguments to the builtin_strcpy call DEST and SRC are broken out
3740 so that this can also be called without constructing an actual CALL_EXPR.
3741 The other arguments and return value are the same as for
3742 expand_builtin_strcpy. */
3744 static rtx
3745 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3746 rtx target, enum machine_mode mode)
3748 tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
3749 fndecl, dest, src, 0);
3750 if (result)
3751 return expand_expr (result, target, mode, EXPAND_NORMAL);
3752 return expand_movstr (dest, src, target, /*endp=*/0);
3756 /* Expand a call EXP to the stpcpy builtin.
3757 Return NULL_RTX if we failed the caller should emit a normal call,
3758 otherwise try to get the result in TARGET, if convenient (and in
3759 mode MODE if that's convenient). */
3761 static rtx
3762 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3764 tree dst, src;
3765 location_t loc = EXPR_LOCATION (exp);
3767 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3768 return NULL_RTX;
3770 dst = CALL_EXPR_ARG (exp, 0);
3771 src = CALL_EXPR_ARG (exp, 1);
3773 /* If return value is ignored, transform stpcpy into strcpy. */
3774 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3776 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3777 tree result = build_call_expr (fn, 2, dst, src);
3779 STRIP_NOPS (result);
3780 while (TREE_CODE (result) == COMPOUND_EXPR)
3782 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3783 EXPAND_NORMAL);
3784 result = TREE_OPERAND (result, 1);
3786 return expand_expr (result, target, mode, EXPAND_NORMAL);
3788 else
3790 tree len, lenp1;
3791 rtx ret;
3793 /* Ensure we get an actual string whose length can be evaluated at
3794 compile-time, not an expression containing a string. This is
3795 because the latter will potentially produce pessimized code
3796 when used to produce the return value. */
3797 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3798 return expand_movstr (dst, src, target, /*endp=*/2);
3800 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3801 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3802 target, mode, /*endp=*/2);
3804 if (ret)
3805 return ret;
3807 if (TREE_CODE (len) == INTEGER_CST)
3809 rtx len_rtx = expand_normal (len);
3811 if (CONST_INT_P (len_rtx))
3813 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3814 dst, src, target, mode);
3816 if (ret)
3818 if (! target)
3820 if (mode != VOIDmode)
3821 target = gen_reg_rtx (mode);
3822 else
3823 target = gen_reg_rtx (GET_MODE (ret));
3825 if (GET_MODE (target) != GET_MODE (ret))
3826 ret = gen_lowpart (GET_MODE (target), ret);
3828 ret = plus_constant (ret, INTVAL (len_rtx));
3829 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3830 gcc_assert (ret);
3832 return target;
3837 return expand_movstr (dst, src, target, /*endp=*/2);
3841 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3842 bytes from constant string DATA + OFFSET and return it as target
3843 constant. */
3846 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3847 enum machine_mode mode)
3849 const char *str = (const char *) data;
3851 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3852 return const0_rtx;
3854 return c_readstr (str + offset, mode);
3857 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call. */
3860 static rtx
3861 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3863 tree fndecl = get_callee_fndecl (exp);
3864 location_t loc = EXPR_LOCATION (exp);
3866 if (validate_arglist (exp,
3867 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3869 tree dest = CALL_EXPR_ARG (exp, 0);
3870 tree src = CALL_EXPR_ARG (exp, 1);
3871 tree len = CALL_EXPR_ARG (exp, 2);
3872 tree slen = c_strlen (src, 1);
3873 tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
3874 fndecl, dest, src, len, slen);
3876 if (result)
3878 while (TREE_CODE (result) == COMPOUND_EXPR)
3880 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3881 EXPAND_NORMAL);
3882 result = TREE_OPERAND (result, 1);
3884 return expand_expr (result, target, mode, EXPAND_NORMAL);
3887 /* We must be passed a constant len and src parameter. */
3888 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3889 return NULL_RTX;
3891 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3893 /* We're required to pad with trailing zeros if the requested
3894 len is greater than strlen(s2)+1. In that case try to
3895 use store_by_pieces, if it fails, punt. */
3896 if (tree_int_cst_lt (slen, len))
3898 unsigned int dest_align
3899 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3900 const char *p = c_getstr (src);
3901 rtx dest_mem;
3903 if (!p || dest_align == 0 || !host_integerp (len, 1)
3904 || !can_store_by_pieces (tree_low_cst (len, 1),
3905 builtin_strncpy_read_str,
3906 CONST_CAST (char *, p),
3907 dest_align, false))
3908 return NULL_RTX;
3910 dest_mem = get_memory_rtx (dest, len);
3911 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3912 builtin_strncpy_read_str,
3913 CONST_CAST (char *, p), dest_align, false, 0);
3914 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3915 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3916 return dest_mem;
3919 return NULL_RTX;
3922 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3923 bytes from constant string DATA + OFFSET and return it as target
3924 constant. */
3927 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3928 enum machine_mode mode)
3930 const char *c = (const char *) data;
3931 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3933 memset (p, *c, GET_MODE_SIZE (mode));
3935 return c_readstr (p, mode);
3938 /* Callback routine for store_by_pieces. Return the RTL of a register
3939 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3940 char value given in the RTL register data. For example, if mode is
3941 4 bytes wide, return the RTL for 0x01010101*data. */
3943 static rtx
3944 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3945 enum machine_mode mode)
3947 rtx target, coeff;
3948 size_t size;
3949 char *p;
3951 size = GET_MODE_SIZE (mode);
3952 if (size == 1)
3953 return (rtx) data;
3955 p = XALLOCAVEC (char, size);
3956 memset (p, 1, size);
3957 coeff = c_readstr (p, mode);
3959 target = convert_to_mode (mode, (rtx) data, 1);
3960 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3961 return force_reg (mode, target);
3964 /* Expand expression EXP, which is a call to the memset builtin. Return
3965 NULL_RTX if we failed the caller should emit a normal call, otherwise
3966 try to get the result in TARGET, if convenient (and in mode MODE if that's
3967 convenient). */
3969 static rtx
3970 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3972 if (!validate_arglist (exp,
3973 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3974 return NULL_RTX;
3975 else
3977 tree dest = CALL_EXPR_ARG (exp, 0);
3978 tree val = CALL_EXPR_ARG (exp, 1);
3979 tree len = CALL_EXPR_ARG (exp, 2);
3980 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3984 /* Helper function to do the actual work for expand_builtin_memset. The
3985 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3986 so that this can also be called without constructing an actual CALL_EXPR.
3987 The other arguments and return value are the same as for
3988 expand_builtin_memset. */
3990 static rtx
3991 expand_builtin_memset_args (tree dest, tree val, tree len,
3992 rtx target, enum machine_mode mode, tree orig_exp)
3994 tree fndecl, fn;
3995 enum built_in_function fcode;
3996 char c;
3997 unsigned int dest_align;
3998 rtx dest_mem, dest_addr, len_rtx;
3999 HOST_WIDE_INT expected_size = -1;
4000 unsigned int expected_align = 0;
4001 tree_ann_common_t ann;
4003 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
4005 /* If DEST is not a pointer type, don't do this operation in-line. */
4006 if (dest_align == 0)
4007 return NULL_RTX;
4009 ann = tree_common_ann (orig_exp);
4010 if (ann)
4011 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
4013 if (expected_align < dest_align)
4014 expected_align = dest_align;
4016 /* If the LEN parameter is zero, return DEST. */
4017 if (integer_zerop (len))
4019 /* Evaluate and ignore VAL in case it has side-effects. */
4020 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4021 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4024 /* Stabilize the arguments in case we fail. */
4025 dest = builtin_save_expr (dest);
4026 val = builtin_save_expr (val);
4027 len = builtin_save_expr (len);
4029 len_rtx = expand_normal (len);
4030 dest_mem = get_memory_rtx (dest, len);
4032 if (TREE_CODE (val) != INTEGER_CST)
4034 rtx val_rtx;
4036 val_rtx = expand_normal (val);
4037 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4038 val_rtx, 0);
4040 /* Assume that we can memset by pieces if we can store
4041 * the coefficients by pieces (in the required modes).
4042 * We can't pass builtin_memset_gen_str as that emits RTL. */
4043 c = 1;
4044 if (host_integerp (len, 1)
4045 && can_store_by_pieces (tree_low_cst (len, 1),
4046 builtin_memset_read_str, &c, dest_align,
4047 true))
4049 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4050 val_rtx);
4051 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4052 builtin_memset_gen_str, val_rtx, dest_align,
4053 true, 0);
4055 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4056 dest_align, expected_align,
4057 expected_size))
4058 goto do_libcall;
4060 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4061 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4062 return dest_mem;
4065 if (target_char_cast (val, &c))
4066 goto do_libcall;
4068 if (c)
4070 if (host_integerp (len, 1)
4071 && can_store_by_pieces (tree_low_cst (len, 1),
4072 builtin_memset_read_str, &c, dest_align,
4073 true))
4074 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4075 builtin_memset_read_str, &c, dest_align, true, 0);
4076 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4077 dest_align, expected_align,
4078 expected_size))
4079 goto do_libcall;
4081 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4082 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4083 return dest_mem;
4086 set_mem_align (dest_mem, dest_align);
4087 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4088 CALL_EXPR_TAILCALL (orig_exp)
4089 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4090 expected_align, expected_size);
4092 if (dest_addr == 0)
4094 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4095 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4098 return dest_addr;
4100 do_libcall:
4101 fndecl = get_callee_fndecl (orig_exp);
4102 fcode = DECL_FUNCTION_CODE (fndecl);
4103 if (fcode == BUILT_IN_MEMSET)
4104 fn = build_call_expr (fndecl, 3, dest, val, len);
4105 else if (fcode == BUILT_IN_BZERO)
4106 fn = build_call_expr (fndecl, 2, dest, len);
4107 else
4108 gcc_unreachable ();
4109 if (TREE_CODE (fn) == CALL_EXPR)
4110 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4111 return expand_call (fn, target, target == const0_rtx);
4114 /* Expand expression EXP, which is a call to the bzero builtin. Return
4115 NULL_RTX if we failed the caller should emit a normal call. */
4117 static rtx
4118 expand_builtin_bzero (tree exp)
4120 tree dest, size;
4121 location_t loc = EXPR_LOCATION (exp);
4123 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4124 return NULL_RTX;
4126 dest = CALL_EXPR_ARG (exp, 0);
4127 size = CALL_EXPR_ARG (exp, 1);
4129 /* New argument list transforming bzero(ptr x, int y) to
4130 memset(ptr x, int 0, size_t y). This is done this way
4131 so that if it isn't expanded inline, we fallback to
4132 calling bzero instead of memset. */
4134 return expand_builtin_memset_args (dest, integer_zero_node,
4135 fold_convert_loc (loc, sizetype, size),
4136 const0_rtx, VOIDmode, exp);
4139 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4140 caller should emit a normal call, otherwise try to get the result
4141 in TARGET, if convenient (and in mode MODE if that's convenient). */
4143 static rtx
4144 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4146 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4147 INTEGER_TYPE, VOID_TYPE))
4149 tree type = TREE_TYPE (exp);
4150 tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
4151 CALL_EXPR_ARG (exp, 0),
4152 CALL_EXPR_ARG (exp, 1),
4153 CALL_EXPR_ARG (exp, 2), type);
4154 if (result)
4155 return expand_expr (result, target, mode, EXPAND_NORMAL);
4157 return NULL_RTX;
4160 /* Expand expression EXP, which is a call to the memcmp built-in function.
4161 Return NULL_RTX if we failed and the
4162 caller should emit a normal call, otherwise try to get the result in
4163 TARGET, if convenient (and in mode MODE, if that's convenient). */
4165 static rtx
4166 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4168 location_t loc = EXPR_LOCATION (exp);
4170 if (!validate_arglist (exp,
4171 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4172 return NULL_RTX;
4173 else
4175 tree result = fold_builtin_memcmp (loc,
4176 CALL_EXPR_ARG (exp, 0),
4177 CALL_EXPR_ARG (exp, 1),
4178 CALL_EXPR_ARG (exp, 2));
4179 if (result)
4180 return expand_expr (result, target, mode, EXPAND_NORMAL);
4183 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4185 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4186 rtx result;
4187 rtx insn;
4188 tree arg1 = CALL_EXPR_ARG (exp, 0);
4189 tree arg2 = CALL_EXPR_ARG (exp, 1);
4190 tree len = CALL_EXPR_ARG (exp, 2);
4192 int arg1_align
4193 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4194 int arg2_align
4195 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4196 enum machine_mode insn_mode;
4198 #ifdef HAVE_cmpmemsi
4199 if (HAVE_cmpmemsi)
4200 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4201 else
4202 #endif
4203 #ifdef HAVE_cmpstrnsi
4204 if (HAVE_cmpstrnsi)
4205 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4206 else
4207 #endif
4208 return NULL_RTX;
4210 /* If we don't have POINTER_TYPE, call the function. */
4211 if (arg1_align == 0 || arg2_align == 0)
4212 return NULL_RTX;
4214 /* Make a place to write the result of the instruction. */
4215 result = target;
4216 if (! (result != 0
4217 && REG_P (result) && GET_MODE (result) == insn_mode
4218 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4219 result = gen_reg_rtx (insn_mode);
4221 arg1_rtx = get_memory_rtx (arg1, len);
4222 arg2_rtx = get_memory_rtx (arg2, len);
4223 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4225 /* Set MEM_SIZE as appropriate. */
4226 if (CONST_INT_P (arg3_rtx))
4228 set_mem_size (arg1_rtx, arg3_rtx);
4229 set_mem_size (arg2_rtx, arg3_rtx);
4232 #ifdef HAVE_cmpmemsi
4233 if (HAVE_cmpmemsi)
4234 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4235 GEN_INT (MIN (arg1_align, arg2_align)));
4236 else
4237 #endif
4238 #ifdef HAVE_cmpstrnsi
4239 if (HAVE_cmpstrnsi)
4240 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4241 GEN_INT (MIN (arg1_align, arg2_align)));
4242 else
4243 #endif
4244 gcc_unreachable ();
4246 if (insn)
4247 emit_insn (insn);
4248 else
4249 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4250 TYPE_MODE (integer_type_node), 3,
4251 XEXP (arg1_rtx, 0), Pmode,
4252 XEXP (arg2_rtx, 0), Pmode,
4253 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4254 TYPE_UNSIGNED (sizetype)),
4255 TYPE_MODE (sizetype));
4257 /* Return the value in the proper mode for this function. */
4258 mode = TYPE_MODE (TREE_TYPE (exp));
4259 if (GET_MODE (result) == mode)
4260 return result;
4261 else if (target != 0)
4263 convert_move (target, result, 0);
4264 return target;
4266 else
4267 return convert_to_mode (mode, result, 0);
4269 #endif
4271 return NULL_RTX;
4274 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4275 if we failed the caller should emit a normal call, otherwise try to get
4276 the result in TARGET, if convenient. */
4278 static rtx
4279 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4281 location_t loc = EXPR_LOCATION (exp);
4283 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4284 return NULL_RTX;
4285 else
4287 tree result = fold_builtin_strcmp (loc,
4288 CALL_EXPR_ARG (exp, 0),
4289 CALL_EXPR_ARG (exp, 1));
4290 if (result)
4291 return expand_expr (result, target, mode, EXPAND_NORMAL);
4294 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4295 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4296 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4298 rtx arg1_rtx, arg2_rtx;
4299 rtx result, insn = NULL_RTX;
4300 tree fndecl, fn;
4301 tree arg1 = CALL_EXPR_ARG (exp, 0);
4302 tree arg2 = CALL_EXPR_ARG (exp, 1);
4304 int arg1_align
4305 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4306 int arg2_align
4307 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4309 /* If we don't have POINTER_TYPE, call the function. */
4310 if (arg1_align == 0 || arg2_align == 0)
4311 return NULL_RTX;
4313 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4314 arg1 = builtin_save_expr (arg1);
4315 arg2 = builtin_save_expr (arg2);
4317 arg1_rtx = get_memory_rtx (arg1, NULL);
4318 arg2_rtx = get_memory_rtx (arg2, NULL);
4320 #ifdef HAVE_cmpstrsi
4321 /* Try to call cmpstrsi. */
4322 if (HAVE_cmpstrsi)
4324 enum machine_mode insn_mode
4325 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4327 /* Make a place to write the result of the instruction. */
4328 result = target;
4329 if (! (result != 0
4330 && REG_P (result) && GET_MODE (result) == insn_mode
4331 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4332 result = gen_reg_rtx (insn_mode);
4334 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4335 GEN_INT (MIN (arg1_align, arg2_align)));
4337 #endif
4338 #ifdef HAVE_cmpstrnsi
4339 /* Try to determine at least one length and call cmpstrnsi. */
4340 if (!insn && HAVE_cmpstrnsi)
4342 tree len;
4343 rtx arg3_rtx;
4345 enum machine_mode insn_mode
4346 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4347 tree len1 = c_strlen (arg1, 1);
4348 tree len2 = c_strlen (arg2, 1);
4350 if (len1)
4351 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4352 if (len2)
4353 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4355 /* If we don't have a constant length for the first, use the length
4356 of the second, if we know it. We don't require a constant for
4357 this case; some cost analysis could be done if both are available
4358 but neither is constant. For now, assume they're equally cheap,
4359 unless one has side effects. If both strings have constant lengths,
4360 use the smaller. */
4362 if (!len1)
4363 len = len2;
4364 else if (!len2)
4365 len = len1;
4366 else if (TREE_SIDE_EFFECTS (len1))
4367 len = len2;
4368 else if (TREE_SIDE_EFFECTS (len2))
4369 len = len1;
4370 else if (TREE_CODE (len1) != INTEGER_CST)
4371 len = len2;
4372 else if (TREE_CODE (len2) != INTEGER_CST)
4373 len = len1;
4374 else if (tree_int_cst_lt (len1, len2))
4375 len = len1;
4376 else
4377 len = len2;
4379 /* If both arguments have side effects, we cannot optimize. */
4380 if (!len || TREE_SIDE_EFFECTS (len))
4381 goto do_libcall;
4383 arg3_rtx = expand_normal (len);
4385 /* Make a place to write the result of the instruction. */
4386 result = target;
4387 if (! (result != 0
4388 && REG_P (result) && GET_MODE (result) == insn_mode
4389 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4390 result = gen_reg_rtx (insn_mode);
4392 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4393 GEN_INT (MIN (arg1_align, arg2_align)));
4395 #endif
4397 if (insn)
4399 emit_insn (insn);
4401 /* Return the value in the proper mode for this function. */
4402 mode = TYPE_MODE (TREE_TYPE (exp));
4403 if (GET_MODE (result) == mode)
4404 return result;
4405 if (target == 0)
4406 return convert_to_mode (mode, result, 0);
4407 convert_move (target, result, 0);
4408 return target;
4411 /* Expand the library call ourselves using a stabilized argument
4412 list to avoid re-evaluating the function's arguments twice. */
4413 #ifdef HAVE_cmpstrnsi
4414 do_libcall:
4415 #endif
4416 fndecl = get_callee_fndecl (exp);
4417 fn = build_call_expr (fndecl, 2, arg1, arg2);
4418 if (TREE_CODE (fn) == CALL_EXPR)
4419 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4420 return expand_call (fn, target, target == const0_rtx);
4422 #endif
4423 return NULL_RTX;
4426 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4427 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4428 the result in TARGET, if convenient. */
4430 static rtx
4431 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4433 location_t loc = EXPR_LOCATION (exp);
4435 if (!validate_arglist (exp,
4436 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4437 return NULL_RTX;
4438 else
4440 tree result = fold_builtin_strncmp (loc,
4441 CALL_EXPR_ARG (exp, 0),
4442 CALL_EXPR_ARG (exp, 1),
4443 CALL_EXPR_ARG (exp, 2));
4444 if (result)
4445 return expand_expr (result, target, mode, EXPAND_NORMAL);
4448 /* If c_strlen can determine an expression for one of the string
4449 lengths, and it doesn't have side effects, then emit cmpstrnsi
4450 using length MIN(strlen(string)+1, arg3). */
4451 #ifdef HAVE_cmpstrnsi
4452 if (HAVE_cmpstrnsi)
4454 tree len, len1, len2;
4455 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4456 rtx result, insn;
4457 tree fndecl, fn;
4458 tree arg1 = CALL_EXPR_ARG (exp, 0);
4459 tree arg2 = CALL_EXPR_ARG (exp, 1);
4460 tree arg3 = CALL_EXPR_ARG (exp, 2);
4462 int arg1_align
4463 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4464 int arg2_align
4465 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4466 enum machine_mode insn_mode
4467 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4469 len1 = c_strlen (arg1, 1);
4470 len2 = c_strlen (arg2, 1);
4472 if (len1)
4473 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4474 if (len2)
4475 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4477 /* If we don't have a constant length for the first, use the length
4478 of the second, if we know it. We don't require a constant for
4479 this case; some cost analysis could be done if both are available
4480 but neither is constant. For now, assume they're equally cheap,
4481 unless one has side effects. If both strings have constant lengths,
4482 use the smaller. */
4484 if (!len1)
4485 len = len2;
4486 else if (!len2)
4487 len = len1;
4488 else if (TREE_SIDE_EFFECTS (len1))
4489 len = len2;
4490 else if (TREE_SIDE_EFFECTS (len2))
4491 len = len1;
4492 else if (TREE_CODE (len1) != INTEGER_CST)
4493 len = len2;
4494 else if (TREE_CODE (len2) != INTEGER_CST)
4495 len = len1;
4496 else if (tree_int_cst_lt (len1, len2))
4497 len = len1;
4498 else
4499 len = len2;
4501 /* If both arguments have side effects, we cannot optimize. */
4502 if (!len || TREE_SIDE_EFFECTS (len))
4503 return NULL_RTX;
4505 /* The actual new length parameter is MIN(len,arg3). */
4506 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4507 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4509 /* If we don't have POINTER_TYPE, call the function. */
4510 if (arg1_align == 0 || arg2_align == 0)
4511 return NULL_RTX;
4513 /* Make a place to write the result of the instruction. */
4514 result = target;
4515 if (! (result != 0
4516 && REG_P (result) && GET_MODE (result) == insn_mode
4517 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4518 result = gen_reg_rtx (insn_mode);
4520 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4521 arg1 = builtin_save_expr (arg1);
4522 arg2 = builtin_save_expr (arg2);
4523 len = builtin_save_expr (len);
4525 arg1_rtx = get_memory_rtx (arg1, len);
4526 arg2_rtx = get_memory_rtx (arg2, len);
4527 arg3_rtx = expand_normal (len);
4528 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4529 GEN_INT (MIN (arg1_align, arg2_align)));
4530 if (insn)
4532 emit_insn (insn);
4534 /* Return the value in the proper mode for this function. */
4535 mode = TYPE_MODE (TREE_TYPE (exp));
4536 if (GET_MODE (result) == mode)
4537 return result;
4538 if (target == 0)
4539 return convert_to_mode (mode, result, 0);
4540 convert_move (target, result, 0);
4541 return target;
4544 /* Expand the library call ourselves using a stabilized argument
4545 list to avoid re-evaluating the function's arguments twice. */
4546 fndecl = get_callee_fndecl (exp);
4547 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4548 if (TREE_CODE (fn) == CALL_EXPR)
4549 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4550 return expand_call (fn, target, target == const0_rtx);
4552 #endif
4553 return NULL_RTX;
4556 /* Expand expression EXP, which is a call to the strcat builtin.
4557 Return NULL_RTX if we failed the caller should emit a normal call,
4558 otherwise try to get the result in TARGET, if convenient. */
4560 static rtx
4561 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4563 location_t loc = EXPR_LOCATION (exp);
4565 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4566 return NULL_RTX;
4567 else
4569 tree dst = CALL_EXPR_ARG (exp, 0);
4570 tree src = CALL_EXPR_ARG (exp, 1);
4571 const char *p = c_getstr (src);
4573 /* If the string length is zero, return the dst parameter. */
4574 if (p && *p == '\0')
4575 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4577 if (optimize_insn_for_speed_p ())
4579 /* See if we can store by pieces into (dst + strlen(dst)). */
4580 tree newsrc, newdst,
4581 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4582 rtx insns;
4584 /* Stabilize the argument list. */
4585 newsrc = builtin_save_expr (src);
4586 dst = builtin_save_expr (dst);
4588 start_sequence ();
4590 /* Create strlen (dst). */
4591 newdst = build_call_expr (strlen_fn, 1, dst);
4592 /* Create (dst p+ strlen (dst)). */
4594 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
4595 TREE_TYPE (dst), dst, newdst);
4596 newdst = builtin_save_expr (newdst);
4598 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4600 end_sequence (); /* Stop sequence. */
4601 return NULL_RTX;
4604 /* Output the entire sequence. */
4605 insns = get_insns ();
4606 end_sequence ();
4607 emit_insn (insns);
4609 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4612 return NULL_RTX;
4616 /* Expand expression EXP, which is a call to the strncat builtin.
4617 Return NULL_RTX if we failed the caller should emit a normal call,
4618 otherwise try to get the result in TARGET, if convenient. */
4620 static rtx
4621 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4623 if (validate_arglist (exp,
4624 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4626 tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
4627 CALL_EXPR_ARG (exp, 0),
4628 CALL_EXPR_ARG (exp, 1),
4629 CALL_EXPR_ARG (exp, 2));
4630 if (result)
4631 return expand_expr (result, target, mode, EXPAND_NORMAL);
4633 return NULL_RTX;
4636 /* Expand expression EXP, which is a call to the strspn builtin.
4637 Return NULL_RTX if we failed the caller should emit a normal call,
4638 otherwise try to get the result in TARGET, if convenient. */
4640 static rtx
4641 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4643 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4645 tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
4646 CALL_EXPR_ARG (exp, 0),
4647 CALL_EXPR_ARG (exp, 1));
4648 if (result)
4649 return expand_expr (result, target, mode, EXPAND_NORMAL);
4651 return NULL_RTX;
4654 /* Expand expression EXP, which is a call to the strcspn builtin.
4655 Return NULL_RTX if we failed the caller should emit a normal call,
4656 otherwise try to get the result in TARGET, if convenient. */
4658 static rtx
4659 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4661 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4663 tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
4664 CALL_EXPR_ARG (exp, 0),
4665 CALL_EXPR_ARG (exp, 1));
4666 if (result)
4667 return expand_expr (result, target, mode, EXPAND_NORMAL);
4669 return NULL_RTX;
4672 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4673 if that's convenient. */
4676 expand_builtin_saveregs (void)
4678 rtx val, seq;
4680 /* Don't do __builtin_saveregs more than once in a function.
4681 Save the result of the first call and reuse it. */
4682 if (saveregs_value != 0)
4683 return saveregs_value;
4685 /* When this function is called, it means that registers must be
4686 saved on entry to this function. So we migrate the call to the
4687 first insn of this function. */
4689 start_sequence ();
4691 /* Do whatever the machine needs done in this case. */
4692 val = targetm.calls.expand_builtin_saveregs ();
4694 seq = get_insns ();
4695 end_sequence ();
4697 saveregs_value = val;
4699 /* Put the insns after the NOTE that starts the function. If this
4700 is inside a start_sequence, make the outer-level insn chain current, so
4701 the code is placed at the start of the function. */
4702 push_topmost_sequence ();
4703 emit_insn_after (seq, entry_of_function ());
4704 pop_topmost_sequence ();
4706 return val;
4709 /* __builtin_args_info (N) returns word N of the arg space info
4710 for the current function. The number and meanings of words
4711 is controlled by the definition of CUMULATIVE_ARGS. */
4713 static rtx
4714 expand_builtin_args_info (tree exp)
4716 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4717 int *word_ptr = (int *) &crtl->args.info;
4719 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4721 if (call_expr_nargs (exp) != 0)
4723 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4724 error ("argument of %<__builtin_args_info%> must be constant");
4725 else
4727 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4729 if (wordnum < 0 || wordnum >= nwords)
4730 error ("argument of %<__builtin_args_info%> out of range");
4731 else
4732 return GEN_INT (word_ptr[wordnum]);
4735 else
4736 error ("missing argument in %<__builtin_args_info%>");
4738 return const0_rtx;
4741 /* Expand a call to __builtin_next_arg. */
4743 static rtx
4744 expand_builtin_next_arg (void)
4746 /* Checking arguments is already done in fold_builtin_next_arg
4747 that must be called before this function. */
4748 return expand_binop (ptr_mode, add_optab,
4749 crtl->args.internal_arg_pointer,
4750 crtl->args.arg_offset_rtx,
4751 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4754 /* Make it easier for the backends by protecting the valist argument
4755 from multiple evaluations. */
4757 static tree
4758 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4760 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4762 gcc_assert (vatype != NULL_TREE);
4764 if (TREE_CODE (vatype) == ARRAY_TYPE)
4766 if (TREE_SIDE_EFFECTS (valist))
4767 valist = save_expr (valist);
4769 /* For this case, the backends will be expecting a pointer to
4770 vatype, but it's possible we've actually been given an array
4771 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4772 So fix it. */
4773 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4775 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4776 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4779 else
4781 tree pt;
4783 if (! needs_lvalue)
4785 if (! TREE_SIDE_EFFECTS (valist))
4786 return valist;
4788 pt = build_pointer_type (vatype);
4789 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4790 TREE_SIDE_EFFECTS (valist) = 1;
4793 if (TREE_SIDE_EFFECTS (valist))
4794 valist = save_expr (valist);
4795 valist = build_fold_indirect_ref_loc (loc, valist);
4798 return valist;
4801 /* The "standard" definition of va_list is void*. */
4803 tree
4804 std_build_builtin_va_list (void)
4806 return ptr_type_node;
4809 /* The "standard" abi va_list is va_list_type_node. */
4811 tree
4812 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4814 return va_list_type_node;
4817 /* The "standard" type of va_list is va_list_type_node. */
4819 tree
4820 std_canonical_va_list_type (tree type)
4822 tree wtype, htype;
4824 if (INDIRECT_REF_P (type))
4825 type = TREE_TYPE (type);
4826 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4827 type = TREE_TYPE (type);
4828 wtype = va_list_type_node;
4829 htype = type;
4830 /* Treat structure va_list types. */
4831 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4832 htype = TREE_TYPE (htype);
4833 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4835 /* If va_list is an array type, the argument may have decayed
4836 to a pointer type, e.g. by being passed to another function.
4837 In that case, unwrap both types so that we can compare the
4838 underlying records. */
4839 if (TREE_CODE (htype) == ARRAY_TYPE
4840 || POINTER_TYPE_P (htype))
4842 wtype = TREE_TYPE (wtype);
4843 htype = TREE_TYPE (htype);
4846 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4847 return va_list_type_node;
4849 return NULL_TREE;
4852 /* The "standard" implementation of va_start: just assign `nextarg' to
4853 the variable. */
4855 void
4856 std_expand_builtin_va_start (tree valist, rtx nextarg)
4858 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4859 convert_move (va_r, nextarg, 0);
4862 /* Expand EXP, a call to __builtin_va_start. */
4864 static rtx
4865 expand_builtin_va_start (tree exp)
4867 rtx nextarg;
4868 tree valist;
4869 location_t loc = EXPR_LOCATION (exp);
4871 if (call_expr_nargs (exp) < 2)
4873 error_at (loc, "too few arguments to function %<va_start%>");
4874 return const0_rtx;
4877 if (fold_builtin_next_arg (exp, true))
4878 return const0_rtx;
4880 nextarg = expand_builtin_next_arg ();
4881 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4883 if (targetm.expand_builtin_va_start)
4884 targetm.expand_builtin_va_start (valist, nextarg);
4885 else
4886 std_expand_builtin_va_start (valist, nextarg);
4888 return const0_rtx;
4891 /* The "standard" implementation of va_arg: read the value from the
4892 current (padded) address and increment by the (padded) size. */
4894 tree
4895 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4896 gimple_seq *post_p)
4898 tree addr, t, type_size, rounded_size, valist_tmp;
4899 unsigned HOST_WIDE_INT align, boundary;
4900 bool indirect;
4902 #ifdef ARGS_GROW_DOWNWARD
4903 /* All of the alignment and movement below is for args-grow-up machines.
4904 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4905 implement their own specialized gimplify_va_arg_expr routines. */
4906 gcc_unreachable ();
4907 #endif
4909 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4910 if (indirect)
4911 type = build_pointer_type (type);
4913 align = PARM_BOUNDARY / BITS_PER_UNIT;
4914 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4916 /* When we align parameter on stack for caller, if the parameter
4917 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4918 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4919 here with caller. */
4920 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4921 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4923 boundary /= BITS_PER_UNIT;
4925 /* Hoist the valist value into a temporary for the moment. */
4926 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4928 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4929 requires greater alignment, we must perform dynamic alignment. */
4930 if (boundary > align
4931 && !integer_zerop (TYPE_SIZE (type)))
4933 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4934 fold_build2 (POINTER_PLUS_EXPR,
4935 TREE_TYPE (valist),
4936 valist_tmp, size_int (boundary - 1)));
4937 gimplify_and_add (t, pre_p);
4939 t = fold_convert (sizetype, valist_tmp);
4940 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4941 fold_convert (TREE_TYPE (valist),
4942 fold_build2 (BIT_AND_EXPR, sizetype, t,
4943 size_int (-boundary))));
4944 gimplify_and_add (t, pre_p);
4946 else
4947 boundary = align;
4949 /* If the actual alignment is less than the alignment of the type,
4950 adjust the type accordingly so that we don't assume strict alignment
4951 when dereferencing the pointer. */
4952 boundary *= BITS_PER_UNIT;
4953 if (boundary < TYPE_ALIGN (type))
4955 type = build_variant_type_copy (type);
4956 TYPE_ALIGN (type) = boundary;
4959 /* Compute the rounded size of the type. */
4960 type_size = size_in_bytes (type);
4961 rounded_size = round_up (type_size, align);
4963 /* Reduce rounded_size so it's sharable with the postqueue. */
4964 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4966 /* Get AP. */
4967 addr = valist_tmp;
4968 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4970 /* Small args are padded downward. */
4971 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4972 rounded_size, size_int (align));
4973 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4974 size_binop (MINUS_EXPR, rounded_size, type_size));
4975 addr = fold_build2 (POINTER_PLUS_EXPR,
4976 TREE_TYPE (addr), addr, t);
4979 /* Compute new value for AP. */
4980 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4981 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4982 gimplify_and_add (t, pre_p);
4984 addr = fold_convert (build_pointer_type (type), addr);
4986 if (indirect)
4987 addr = build_va_arg_indirect_ref (addr);
4989 return build_va_arg_indirect_ref (addr);
4992 /* Build an indirect-ref expression over the given TREE, which represents a
4993 piece of a va_arg() expansion. */
4994 tree
4995 build_va_arg_indirect_ref (tree addr)
4997 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4999 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
5000 mf_mark (addr);
5002 return addr;
5005 /* Return a dummy expression of type TYPE in order to keep going after an
5006 error. */
5008 static tree
5009 dummy_object (tree type)
5011 tree t = build_int_cst (build_pointer_type (type), 0);
5012 return build1 (INDIRECT_REF, type, t);
5015 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
5016 builtin function, but a very special sort of operator. */
5018 enum gimplify_status
5019 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5021 tree promoted_type, have_va_type;
5022 tree valist = TREE_OPERAND (*expr_p, 0);
5023 tree type = TREE_TYPE (*expr_p);
5024 tree t;
5025 location_t loc = EXPR_LOCATION (*expr_p);
5027 /* Verify that valist is of the proper type. */
5028 have_va_type = TREE_TYPE (valist);
5029 if (have_va_type == error_mark_node)
5030 return GS_ERROR;
5031 have_va_type = targetm.canonical_va_list_type (have_va_type);
5033 if (have_va_type == NULL_TREE)
5035 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
5036 return GS_ERROR;
5039 /* Generate a diagnostic for requesting data of a type that cannot
5040 be passed through `...' due to type promotion at the call site. */
5041 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
5042 != type)
5044 static bool gave_help;
5045 bool warned;
5047 /* Unfortunately, this is merely undefined, rather than a constraint
5048 violation, so we cannot make this an error. If this call is never
5049 executed, the program is still strictly conforming. */
5050 warned = warning_at (loc, 0,
5051 "%qT is promoted to %qT when passed through %<...%>",
5052 type, promoted_type);
5053 if (!gave_help && warned)
5055 gave_help = true;
5056 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5057 promoted_type, type);
5060 /* We can, however, treat "undefined" any way we please.
5061 Call abort to encourage the user to fix the program. */
5062 if (warned)
5063 inform (loc, "if this code is reached, the program will abort");
5064 /* Before the abort, allow the evaluation of the va_list
5065 expression to exit or longjmp. */
5066 gimplify_and_add (valist, pre_p);
5067 t = build_call_expr_loc (loc,
5068 implicit_built_in_decls[BUILT_IN_TRAP], 0);
5069 gimplify_and_add (t, pre_p);
5071 /* This is dead code, but go ahead and finish so that the
5072 mode of the result comes out right. */
5073 *expr_p = dummy_object (type);
5074 return GS_ALL_DONE;
5076 else
5078 /* Make it easier for the backends by protecting the valist argument
5079 from multiple evaluations. */
5080 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5082 /* For this case, the backends will be expecting a pointer to
5083 TREE_TYPE (abi), but it's possible we've
5084 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5085 So fix it. */
5086 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5088 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5089 valist = fold_convert_loc (loc, p1,
5090 build_fold_addr_expr_loc (loc, valist));
5093 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5095 else
5096 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5098 if (!targetm.gimplify_va_arg_expr)
5099 /* FIXME: Once most targets are converted we should merely
5100 assert this is non-null. */
5101 return GS_ALL_DONE;
5103 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5104 return GS_OK;
5108 /* Expand EXP, a call to __builtin_va_end. */
5110 static rtx
5111 expand_builtin_va_end (tree exp)
5113 tree valist = CALL_EXPR_ARG (exp, 0);
5115 /* Evaluate for side effects, if needed. I hate macros that don't
5116 do that. */
5117 if (TREE_SIDE_EFFECTS (valist))
5118 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5120 return const0_rtx;
5123 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5124 builtin rather than just as an assignment in stdarg.h because of the
5125 nastiness of array-type va_list types. */
5127 static rtx
5128 expand_builtin_va_copy (tree exp)
5130 tree dst, src, t;
5131 location_t loc = EXPR_LOCATION (exp);
5133 dst = CALL_EXPR_ARG (exp, 0);
5134 src = CALL_EXPR_ARG (exp, 1);
5136 dst = stabilize_va_list_loc (loc, dst, 1);
5137 src = stabilize_va_list_loc (loc, src, 0);
5139 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5141 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5143 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5144 TREE_SIDE_EFFECTS (t) = 1;
5145 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5147 else
5149 rtx dstb, srcb, size;
5151 /* Evaluate to pointers. */
5152 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5153 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5154 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5155 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5157 dstb = convert_memory_address (Pmode, dstb);
5158 srcb = convert_memory_address (Pmode, srcb);
5160 /* "Dereference" to BLKmode memories. */
5161 dstb = gen_rtx_MEM (BLKmode, dstb);
5162 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5163 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5164 srcb = gen_rtx_MEM (BLKmode, srcb);
5165 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5166 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5168 /* Copy. */
5169 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5172 return const0_rtx;
5175 /* Expand a call to one of the builtin functions __builtin_frame_address or
5176 __builtin_return_address. */
5178 static rtx
5179 expand_builtin_frame_address (tree fndecl, tree exp)
5181 /* The argument must be a nonnegative integer constant.
5182 It counts the number of frames to scan up the stack.
5183 The value is the return address saved in that frame. */
5184 if (call_expr_nargs (exp) == 0)
5185 /* Warning about missing arg was already issued. */
5186 return const0_rtx;
5187 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5189 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5190 error ("invalid argument to %<__builtin_frame_address%>");
5191 else
5192 error ("invalid argument to %<__builtin_return_address%>");
5193 return const0_rtx;
5195 else
5197 rtx tem
5198 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5199 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5201 /* Some ports cannot access arbitrary stack frames. */
5202 if (tem == NULL)
5204 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5205 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5206 else
5207 warning (0, "unsupported argument to %<__builtin_return_address%>");
5208 return const0_rtx;
5211 /* For __builtin_frame_address, return what we've got. */
5212 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5213 return tem;
5215 if (!REG_P (tem)
5216 && ! CONSTANT_P (tem))
5217 tem = copy_to_mode_reg (Pmode, tem);
5218 return tem;
5222 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5223 we failed and the caller should emit a normal call, otherwise try to get
5224 the result in TARGET, if convenient. */
5226 static rtx
5227 expand_builtin_alloca (tree exp, rtx target)
5229 rtx op0;
5230 rtx result;
5232 /* Emit normal call if marked not-inlineable. */
5233 if (CALL_CANNOT_INLINE_P (exp))
5234 return NULL_RTX;
5236 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5237 return NULL_RTX;
5239 /* Compute the argument. */
5240 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5242 /* Allocate the desired space. */
5243 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5244 result = convert_memory_address (ptr_mode, result);
5246 return result;
5249 /* Expand a call to a bswap builtin with argument ARG0. MODE
5250 is the mode to expand with. */
5252 static rtx
5253 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5255 enum machine_mode mode;
5256 tree arg;
5257 rtx op0;
5259 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5260 return NULL_RTX;
5262 arg = CALL_EXPR_ARG (exp, 0);
5263 mode = TYPE_MODE (TREE_TYPE (arg));
5264 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5266 target = expand_unop (mode, bswap_optab, op0, target, 1);
5268 gcc_assert (target);
5270 return convert_to_mode (mode, target, 0);
5273 /* Expand a call to a unary builtin in EXP.
5274 Return NULL_RTX if a normal call should be emitted rather than expanding the
5275 function in-line. If convenient, the result should be placed in TARGET.
5276 SUBTARGET may be used as the target for computing one of EXP's operands. */
5278 static rtx
5279 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5280 rtx subtarget, optab op_optab)
5282 rtx op0;
5284 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5285 return NULL_RTX;
5287 /* Compute the argument. */
5288 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5289 VOIDmode, EXPAND_NORMAL);
5290 /* Compute op, into TARGET if possible.
5291 Set TARGET to wherever the result comes back. */
5292 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5293 op_optab, op0, target, 1);
5294 gcc_assert (target);
5296 return convert_to_mode (target_mode, target, 0);
5299 /* If the string passed to fputs is a constant and is one character
5300 long, we attempt to transform this call into __builtin_fputc(). */
5302 static rtx
5303 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5305 /* Verify the arguments in the original call. */
5306 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5308 tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
5309 CALL_EXPR_ARG (exp, 0),
5310 CALL_EXPR_ARG (exp, 1),
5311 (target == const0_rtx),
5312 unlocked, NULL_TREE);
5313 if (result)
5314 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5316 return NULL_RTX;
5319 /* Expand a call to __builtin_expect. We just return our argument
5320 as the builtin_expect semantic should've been already executed by
5321 tree branch prediction pass. */
5323 static rtx
5324 expand_builtin_expect (tree exp, rtx target)
5326 tree arg, c;
5328 if (call_expr_nargs (exp) < 2)
5329 return const0_rtx;
5330 arg = CALL_EXPR_ARG (exp, 0);
5331 c = CALL_EXPR_ARG (exp, 1);
5333 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5334 /* When guessing was done, the hints should be already stripped away. */
5335 gcc_assert (!flag_guess_branch_prob
5336 || optimize == 0 || errorcount || sorrycount);
5337 return target;
5340 void
5341 expand_builtin_trap (void)
5343 #ifdef HAVE_trap
5344 if (HAVE_trap)
5345 emit_insn (gen_trap ());
5346 else
5347 #endif
5348 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5349 emit_barrier ();
5352 /* Expand a call to __builtin_unreachable. We do nothing except emit
5353 a barrier saying that control flow will not pass here.
5355 It is the responsibility of the program being compiled to ensure
5356 that control flow does never reach __builtin_unreachable. */
5357 static void
5358 expand_builtin_unreachable (void)
5360 emit_barrier ();
5363 /* Expand EXP, a call to fabs, fabsf or fabsl.
5364 Return NULL_RTX if a normal call should be emitted rather than expanding
5365 the function inline. If convenient, the result should be placed
5366 in TARGET. SUBTARGET may be used as the target for computing
5367 the operand. */
5369 static rtx
5370 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5372 enum machine_mode mode;
5373 tree arg;
5374 rtx op0;
5376 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5377 return NULL_RTX;
5379 arg = CALL_EXPR_ARG (exp, 0);
5380 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5381 mode = TYPE_MODE (TREE_TYPE (arg));
5382 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5383 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5386 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5387 Return NULL is a normal call should be emitted rather than expanding the
5388 function inline. If convenient, the result should be placed in TARGET.
5389 SUBTARGET may be used as the target for computing the operand. */
5391 static rtx
5392 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5394 rtx op0, op1;
5395 tree arg;
5397 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5398 return NULL_RTX;
5400 arg = CALL_EXPR_ARG (exp, 0);
5401 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5403 arg = CALL_EXPR_ARG (exp, 1);
5404 op1 = expand_normal (arg);
5406 return expand_copysign (op0, op1, target);
5409 /* Create a new constant string literal and return a char* pointer to it.
5410 The STRING_CST value is the LEN characters at STR. */
5411 tree
5412 build_string_literal (int len, const char *str)
5414 tree t, elem, index, type;
5416 t = build_string (len, str);
5417 elem = build_type_variant (char_type_node, 1, 0);
5418 index = build_index_type (size_int (len - 1));
5419 type = build_array_type (elem, index);
5420 TREE_TYPE (t) = type;
5421 TREE_CONSTANT (t) = 1;
5422 TREE_READONLY (t) = 1;
5423 TREE_STATIC (t) = 1;
5425 type = build_pointer_type (elem);
5426 t = build1 (ADDR_EXPR, type,
5427 build4 (ARRAY_REF, elem,
5428 t, integer_zero_node, NULL_TREE, NULL_TREE));
5429 return t;
5432 /* Expand EXP, a call to printf or printf_unlocked.
5433 Return NULL_RTX if a normal call should be emitted rather than transforming
5434 the function inline. If convenient, the result should be placed in
5435 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5436 call. */
5437 static rtx
5438 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5439 bool unlocked)
5441 /* If we're using an unlocked function, assume the other unlocked
5442 functions exist explicitly. */
5443 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5444 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5445 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5446 : implicit_built_in_decls[BUILT_IN_PUTS];
5447 const char *fmt_str;
5448 tree fn = 0;
5449 tree fmt, arg;
5450 int nargs = call_expr_nargs (exp);
5452 /* If the return value is used, don't do the transformation. */
5453 if (target != const0_rtx)
5454 return NULL_RTX;
5456 /* Verify the required arguments in the original call. */
5457 if (nargs == 0)
5458 return NULL_RTX;
5459 fmt = CALL_EXPR_ARG (exp, 0);
5460 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5461 return NULL_RTX;
5463 /* Check whether the format is a literal string constant. */
5464 fmt_str = c_getstr (fmt);
5465 if (fmt_str == NULL)
5466 return NULL_RTX;
5468 if (!init_target_chars ())
5469 return NULL_RTX;
5471 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5472 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5474 if ((nargs != 2)
5475 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5476 return NULL_RTX;
5477 if (fn_puts)
5478 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5480 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5481 else if (strcmp (fmt_str, target_percent_c) == 0)
5483 if ((nargs != 2)
5484 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5485 return NULL_RTX;
5486 if (fn_putchar)
5487 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5489 else
5491 /* We can't handle anything else with % args or %% ... yet. */
5492 if (strchr (fmt_str, target_percent))
5493 return NULL_RTX;
5495 if (nargs > 1)
5496 return NULL_RTX;
5498 /* If the format specifier was "", printf does nothing. */
5499 if (fmt_str[0] == '\0')
5500 return const0_rtx;
5501 /* If the format specifier has length of 1, call putchar. */
5502 if (fmt_str[1] == '\0')
5504 /* Given printf("c"), (where c is any one character,)
5505 convert "c"[0] to an int and pass that to the replacement
5506 function. */
5507 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5508 if (fn_putchar)
5509 fn = build_call_expr (fn_putchar, 1, arg);
5511 else
5513 /* If the format specifier was "string\n", call puts("string"). */
5514 size_t len = strlen (fmt_str);
5515 if ((unsigned char)fmt_str[len - 1] == target_newline)
5517 /* Create a NUL-terminated string that's one char shorter
5518 than the original, stripping off the trailing '\n'. */
5519 char *newstr = XALLOCAVEC (char, len);
5520 memcpy (newstr, fmt_str, len - 1);
5521 newstr[len - 1] = 0;
5522 arg = build_string_literal (len, newstr);
5523 if (fn_puts)
5524 fn = build_call_expr (fn_puts, 1, arg);
5526 else
5527 /* We'd like to arrange to call fputs(string,stdout) here,
5528 but we need stdout and don't have a way to get it yet. */
5529 return NULL_RTX;
5533 if (!fn)
5534 return NULL_RTX;
5535 if (TREE_CODE (fn) == CALL_EXPR)
5536 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5537 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5540 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5541 Return NULL_RTX if a normal call should be emitted rather than transforming
5542 the function inline. If convenient, the result should be placed in
5543 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5544 call. */
5545 static rtx
5546 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5547 bool unlocked)
5549 /* If we're using an unlocked function, assume the other unlocked
5550 functions exist explicitly. */
5551 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5552 : implicit_built_in_decls[BUILT_IN_FPUTC];
5553 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5554 : implicit_built_in_decls[BUILT_IN_FPUTS];
5555 const char *fmt_str;
5556 tree fn = 0;
5557 tree fmt, fp, arg;
5558 int nargs = call_expr_nargs (exp);
5560 /* If the return value is used, don't do the transformation. */
5561 if (target != const0_rtx)
5562 return NULL_RTX;
5564 /* Verify the required arguments in the original call. */
5565 if (nargs < 2)
5566 return NULL_RTX;
5567 fp = CALL_EXPR_ARG (exp, 0);
5568 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5569 return NULL_RTX;
5570 fmt = CALL_EXPR_ARG (exp, 1);
5571 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5572 return NULL_RTX;
5574 /* Check whether the format is a literal string constant. */
5575 fmt_str = c_getstr (fmt);
5576 if (fmt_str == NULL)
5577 return NULL_RTX;
5579 if (!init_target_chars ())
5580 return NULL_RTX;
5582 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5583 if (strcmp (fmt_str, target_percent_s) == 0)
5585 if ((nargs != 3)
5586 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5587 return NULL_RTX;
5588 arg = CALL_EXPR_ARG (exp, 2);
5589 if (fn_fputs)
5590 fn = build_call_expr (fn_fputs, 2, arg, fp);
5592 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5593 else if (strcmp (fmt_str, target_percent_c) == 0)
5595 if ((nargs != 3)
5596 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5597 return NULL_RTX;
5598 arg = CALL_EXPR_ARG (exp, 2);
5599 if (fn_fputc)
5600 fn = build_call_expr (fn_fputc, 2, arg, fp);
5602 else
5604 /* We can't handle anything else with % args or %% ... yet. */
5605 if (strchr (fmt_str, target_percent))
5606 return NULL_RTX;
5608 if (nargs > 2)
5609 return NULL_RTX;
5611 /* If the format specifier was "", fprintf does nothing. */
5612 if (fmt_str[0] == '\0')
5614 /* Evaluate and ignore FILE* argument for side-effects. */
5615 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5616 return const0_rtx;
5619 /* When "string" doesn't contain %, replace all cases of
5620 fprintf(stream,string) with fputs(string,stream). The fputs
5621 builtin will take care of special cases like length == 1. */
5622 if (fn_fputs)
5623 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5626 if (!fn)
5627 return NULL_RTX;
5628 if (TREE_CODE (fn) == CALL_EXPR)
5629 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5630 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5633 /* Expand a call EXP to sprintf. Return NULL_RTX if
5634 a normal call should be emitted rather than expanding the function
5635 inline. If convenient, the result should be placed in TARGET with
5636 mode MODE. */
5638 static rtx
5639 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5641 tree dest, fmt;
5642 const char *fmt_str;
5643 int nargs = call_expr_nargs (exp);
5645 /* Verify the required arguments in the original call. */
5646 if (nargs < 2)
5647 return NULL_RTX;
5648 dest = CALL_EXPR_ARG (exp, 0);
5649 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5650 return NULL_RTX;
5651 fmt = CALL_EXPR_ARG (exp, 0);
5652 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5653 return NULL_RTX;
5655 /* Check whether the format is a literal string constant. */
5656 fmt_str = c_getstr (fmt);
5657 if (fmt_str == NULL)
5658 return NULL_RTX;
5660 if (!init_target_chars ())
5661 return NULL_RTX;
5663 /* If the format doesn't contain % args or %%, use strcpy. */
5664 if (strchr (fmt_str, target_percent) == 0)
5666 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5667 tree exp;
5669 if ((nargs > 2) || ! fn)
5670 return NULL_RTX;
5671 expand_expr (build_call_expr (fn, 2, dest, fmt),
5672 const0_rtx, VOIDmode, EXPAND_NORMAL);
5673 if (target == const0_rtx)
5674 return const0_rtx;
5675 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5676 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5678 /* If the format is "%s", use strcpy if the result isn't used. */
5679 else if (strcmp (fmt_str, target_percent_s) == 0)
5681 tree fn, arg, len;
5682 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5684 if (! fn)
5685 return NULL_RTX;
5686 if (nargs != 3)
5687 return NULL_RTX;
5688 arg = CALL_EXPR_ARG (exp, 2);
5689 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5690 return NULL_RTX;
5692 if (target != const0_rtx)
5694 len = c_strlen (arg, 1);
5695 if (! len || TREE_CODE (len) != INTEGER_CST)
5696 return NULL_RTX;
5698 else
5699 len = NULL_TREE;
5701 expand_expr (build_call_expr (fn, 2, dest, arg),
5702 const0_rtx, VOIDmode, EXPAND_NORMAL);
5704 if (target == const0_rtx)
5705 return const0_rtx;
5706 return expand_expr (len, target, mode, EXPAND_NORMAL);
5709 return NULL_RTX;
5712 /* Expand a call to either the entry or exit function profiler. */
5714 static rtx
5715 expand_builtin_profile_func (bool exitp)
5717 rtx this_rtx, which;
5719 this_rtx = DECL_RTL (current_function_decl);
5720 gcc_assert (MEM_P (this_rtx));
5721 this_rtx = XEXP (this_rtx, 0);
5723 if (exitp)
5724 which = profile_function_exit_libfunc;
5725 else
5726 which = profile_function_entry_libfunc;
5728 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5729 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5731 Pmode);
5733 return const0_rtx;
5736 /* Expand a call to __builtin___clear_cache. */
5738 static rtx
5739 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5741 #ifndef HAVE_clear_cache
5742 #ifdef CLEAR_INSN_CACHE
5743 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5744 does something. Just do the default expansion to a call to
5745 __clear_cache(). */
5746 return NULL_RTX;
5747 #else
5748 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5749 does nothing. There is no need to call it. Do nothing. */
5750 return const0_rtx;
5751 #endif /* CLEAR_INSN_CACHE */
5752 #else
5753 /* We have a "clear_cache" insn, and it will handle everything. */
5754 tree begin, end;
5755 rtx begin_rtx, end_rtx;
5756 enum insn_code icode;
5758 /* We must not expand to a library call. If we did, any
5759 fallback library function in libgcc that might contain a call to
5760 __builtin___clear_cache() would recurse infinitely. */
5761 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5763 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5764 return const0_rtx;
5767 if (HAVE_clear_cache)
5769 icode = CODE_FOR_clear_cache;
5771 begin = CALL_EXPR_ARG (exp, 0);
5772 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5773 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5774 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5775 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5777 end = CALL_EXPR_ARG (exp, 1);
5778 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5779 end_rtx = convert_memory_address (Pmode, end_rtx);
5780 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5781 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5783 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5785 return const0_rtx;
5786 #endif /* HAVE_clear_cache */
5789 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5791 static rtx
5792 round_trampoline_addr (rtx tramp)
5794 rtx temp, addend, mask;
5796 /* If we don't need too much alignment, we'll have been guaranteed
5797 proper alignment by get_trampoline_type. */
5798 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5799 return tramp;
5801 /* Round address up to desired boundary. */
5802 temp = gen_reg_rtx (Pmode);
5803 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5804 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5806 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5807 temp, 0, OPTAB_LIB_WIDEN);
5808 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5809 temp, 0, OPTAB_LIB_WIDEN);
5811 return tramp;
5814 static rtx
5815 expand_builtin_init_trampoline (tree exp)
5817 tree t_tramp, t_func, t_chain;
5818 rtx r_tramp, r_func, r_chain;
5819 #ifdef TRAMPOLINE_TEMPLATE
5820 rtx blktramp;
5821 #endif
5823 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5824 POINTER_TYPE, VOID_TYPE))
5825 return NULL_RTX;
5827 t_tramp = CALL_EXPR_ARG (exp, 0);
5828 t_func = CALL_EXPR_ARG (exp, 1);
5829 t_chain = CALL_EXPR_ARG (exp, 2);
5831 r_tramp = expand_normal (t_tramp);
5832 r_func = expand_normal (t_func);
5833 r_chain = expand_normal (t_chain);
5835 /* Generate insns to initialize the trampoline. */
5836 r_tramp = round_trampoline_addr (r_tramp);
5837 #ifdef TRAMPOLINE_TEMPLATE
5838 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5839 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5840 emit_block_move (blktramp, assemble_trampoline_template (),
5841 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5842 #endif
5843 trampolines_created = 1;
5844 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5846 return const0_rtx;
5849 static rtx
5850 expand_builtin_adjust_trampoline (tree exp)
5852 rtx tramp;
5854 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5855 return NULL_RTX;
5857 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5858 tramp = round_trampoline_addr (tramp);
5859 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5860 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5861 #endif
5863 return tramp;
5866 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5867 function. The function first checks whether the back end provides
5868 an insn to implement signbit for the respective mode. If not, it
5869 checks whether the floating point format of the value is such that
5870 the sign bit can be extracted. If that is not the case, the
5871 function returns NULL_RTX to indicate that a normal call should be
5872 emitted rather than expanding the function in-line. EXP is the
5873 expression that is a call to the builtin function; if convenient,
5874 the result should be placed in TARGET. */
5875 static rtx
5876 expand_builtin_signbit (tree exp, rtx target)
5878 const struct real_format *fmt;
5879 enum machine_mode fmode, imode, rmode;
5880 HOST_WIDE_INT hi, lo;
5881 tree arg;
5882 int word, bitpos;
5883 enum insn_code icode;
5884 rtx temp;
5885 location_t loc = EXPR_LOCATION (exp);
5887 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5888 return NULL_RTX;
5890 arg = CALL_EXPR_ARG (exp, 0);
5891 fmode = TYPE_MODE (TREE_TYPE (arg));
5892 rmode = TYPE_MODE (TREE_TYPE (exp));
5893 fmt = REAL_MODE_FORMAT (fmode);
5895 arg = builtin_save_expr (arg);
5897 /* Expand the argument yielding a RTX expression. */
5898 temp = expand_normal (arg);
5900 /* Check if the back end provides an insn that handles signbit for the
5901 argument's mode. */
5902 icode = signbit_optab->handlers [(int) fmode].insn_code;
5903 if (icode != CODE_FOR_nothing)
5905 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5906 emit_unop_insn (icode, target, temp, UNKNOWN);
5907 return target;
5910 /* For floating point formats without a sign bit, implement signbit
5911 as "ARG < 0.0". */
5912 bitpos = fmt->signbit_ro;
5913 if (bitpos < 0)
5915 /* But we can't do this if the format supports signed zero. */
5916 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5917 return NULL_RTX;
5919 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5920 build_real (TREE_TYPE (arg), dconst0));
5921 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5924 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5926 imode = int_mode_for_mode (fmode);
5927 if (imode == BLKmode)
5928 return NULL_RTX;
5929 temp = gen_lowpart (imode, temp);
5931 else
5933 imode = word_mode;
5934 /* Handle targets with different FP word orders. */
5935 if (FLOAT_WORDS_BIG_ENDIAN)
5936 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5937 else
5938 word = bitpos / BITS_PER_WORD;
5939 temp = operand_subword_force (temp, word, fmode);
5940 bitpos = bitpos % BITS_PER_WORD;
5943 /* Force the intermediate word_mode (or narrower) result into a
5944 register. This avoids attempting to create paradoxical SUBREGs
5945 of floating point modes below. */
5946 temp = force_reg (imode, temp);
5948 /* If the bitpos is within the "result mode" lowpart, the operation
5949 can be implement with a single bitwise AND. Otherwise, we need
5950 a right shift and an AND. */
5952 if (bitpos < GET_MODE_BITSIZE (rmode))
5954 if (bitpos < HOST_BITS_PER_WIDE_INT)
5956 hi = 0;
5957 lo = (HOST_WIDE_INT) 1 << bitpos;
5959 else
5961 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5962 lo = 0;
5965 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5966 temp = gen_lowpart (rmode, temp);
5967 temp = expand_binop (rmode, and_optab, temp,
5968 immed_double_const (lo, hi, rmode),
5969 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5971 else
5973 /* Perform a logical right shift to place the signbit in the least
5974 significant bit, then truncate the result to the desired mode
5975 and mask just this bit. */
5976 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5977 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5978 temp = gen_lowpart (rmode, temp);
5979 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5980 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5983 return temp;
5986 /* Expand fork or exec calls. TARGET is the desired target of the
5987 call. EXP is the call. FN is the
5988 identificator of the actual function. IGNORE is nonzero if the
5989 value is to be ignored. */
5991 static rtx
5992 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5994 tree id, decl;
5995 tree call;
5997 /* If we are not profiling, just call the function. */
5998 if (!profile_arc_flag)
5999 return NULL_RTX;
6001 /* Otherwise call the wrapper. This should be equivalent for the rest of
6002 compiler, so the code does not diverge, and the wrapper may run the
6003 code necessary for keeping the profiling sane. */
6005 switch (DECL_FUNCTION_CODE (fn))
6007 case BUILT_IN_FORK:
6008 id = get_identifier ("__gcov_fork");
6009 break;
6011 case BUILT_IN_EXECL:
6012 id = get_identifier ("__gcov_execl");
6013 break;
6015 case BUILT_IN_EXECV:
6016 id = get_identifier ("__gcov_execv");
6017 break;
6019 case BUILT_IN_EXECLP:
6020 id = get_identifier ("__gcov_execlp");
6021 break;
6023 case BUILT_IN_EXECLE:
6024 id = get_identifier ("__gcov_execle");
6025 break;
6027 case BUILT_IN_EXECVP:
6028 id = get_identifier ("__gcov_execvp");
6029 break;
6031 case BUILT_IN_EXECVE:
6032 id = get_identifier ("__gcov_execve");
6033 break;
6035 default:
6036 gcc_unreachable ();
6039 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6040 FUNCTION_DECL, id, TREE_TYPE (fn));
6041 DECL_EXTERNAL (decl) = 1;
6042 TREE_PUBLIC (decl) = 1;
6043 DECL_ARTIFICIAL (decl) = 1;
6044 TREE_NOTHROW (decl) = 1;
6045 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6046 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6047 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6048 return expand_call (call, target, ignore);
6053 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6054 the pointer in these functions is void*, the tree optimizers may remove
6055 casts. The mode computed in expand_builtin isn't reliable either, due
6056 to __sync_bool_compare_and_swap.
6058 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6059 group of builtins. This gives us log2 of the mode size. */
6061 static inline enum machine_mode
6062 get_builtin_sync_mode (int fcode_diff)
6064 /* The size is not negotiable, so ask not to get BLKmode in return
6065 if the target indicates that a smaller size would be better. */
6066 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6069 /* Expand the memory expression LOC and return the appropriate memory operand
6070 for the builtin_sync operations. */
6072 static rtx
6073 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6075 rtx addr, mem;
6077 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6079 /* Note that we explicitly do not want any alias information for this
6080 memory, so that we kill all other live memories. Otherwise we don't
6081 satisfy the full barrier semantics of the intrinsic. */
6082 mem = validize_mem (gen_rtx_MEM (mode, addr));
6084 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6085 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6086 MEM_VOLATILE_P (mem) = 1;
6088 return mem;
6091 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6092 EXP is the CALL_EXPR. CODE is the rtx code
6093 that corresponds to the arithmetic or logical operation from the name;
6094 an exception here is that NOT actually means NAND. TARGET is an optional
6095 place for us to store the results; AFTER is true if this is the
6096 fetch_and_xxx form. IGNORE is true if we don't actually care about
6097 the result of the operation at all. */
6099 static rtx
6100 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6101 enum rtx_code code, bool after,
6102 rtx target, bool ignore)
6104 rtx val, mem;
6105 enum machine_mode old_mode;
6106 location_t loc = EXPR_LOCATION (exp);
6108 if (code == NOT && warn_sync_nand)
6110 tree fndecl = get_callee_fndecl (exp);
6111 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6113 static bool warned_f_a_n, warned_n_a_f;
6115 switch (fcode)
6117 case BUILT_IN_FETCH_AND_NAND_1:
6118 case BUILT_IN_FETCH_AND_NAND_2:
6119 case BUILT_IN_FETCH_AND_NAND_4:
6120 case BUILT_IN_FETCH_AND_NAND_8:
6121 case BUILT_IN_FETCH_AND_NAND_16:
6123 if (warned_f_a_n)
6124 break;
6126 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6127 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6128 warned_f_a_n = true;
6129 break;
6131 case BUILT_IN_NAND_AND_FETCH_1:
6132 case BUILT_IN_NAND_AND_FETCH_2:
6133 case BUILT_IN_NAND_AND_FETCH_4:
6134 case BUILT_IN_NAND_AND_FETCH_8:
6135 case BUILT_IN_NAND_AND_FETCH_16:
6137 if (warned_n_a_f)
6138 break;
6140 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6141 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6142 warned_n_a_f = true;
6143 break;
6145 default:
6146 gcc_unreachable ();
6150 /* Expand the operands. */
6151 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6153 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6154 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6155 of CONST_INTs, where we know the old_mode only from the call argument. */
6156 old_mode = GET_MODE (val);
6157 if (old_mode == VOIDmode)
6158 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6159 val = convert_modes (mode, old_mode, val, 1);
6161 if (ignore)
6162 return expand_sync_operation (mem, val, code);
6163 else
6164 return expand_sync_fetch_operation (mem, val, code, after, target);
6167 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6168 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6169 true if this is the boolean form. TARGET is a place for us to store the
6170 results; this is NOT optional if IS_BOOL is true. */
6172 static rtx
6173 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6174 bool is_bool, rtx target)
6176 rtx old_val, new_val, mem;
6177 enum machine_mode old_mode;
6179 /* Expand the operands. */
6180 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6183 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6184 mode, EXPAND_NORMAL);
6185 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6186 of CONST_INTs, where we know the old_mode only from the call argument. */
6187 old_mode = GET_MODE (old_val);
6188 if (old_mode == VOIDmode)
6189 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6190 old_val = convert_modes (mode, old_mode, old_val, 1);
6192 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6193 mode, EXPAND_NORMAL);
6194 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6195 of CONST_INTs, where we know the old_mode only from the call argument. */
6196 old_mode = GET_MODE (new_val);
6197 if (old_mode == VOIDmode)
6198 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6199 new_val = convert_modes (mode, old_mode, new_val, 1);
6201 if (is_bool)
6202 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6203 else
6204 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6207 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6208 general form is actually an atomic exchange, and some targets only
6209 support a reduced form with the second argument being a constant 1.
6210 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6211 the results. */
6213 static rtx
6214 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6215 rtx target)
6217 rtx val, mem;
6218 enum machine_mode old_mode;
6220 /* Expand the operands. */
6221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6222 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6223 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6224 of CONST_INTs, where we know the old_mode only from the call argument. */
6225 old_mode = GET_MODE (val);
6226 if (old_mode == VOIDmode)
6227 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6228 val = convert_modes (mode, old_mode, val, 1);
6230 return expand_sync_lock_test_and_set (mem, val, target);
6233 /* Expand the __sync_synchronize intrinsic. */
6235 static void
6236 expand_builtin_synchronize (void)
6238 tree x;
6240 #ifdef HAVE_memory_barrier
6241 if (HAVE_memory_barrier)
6243 emit_insn (gen_memory_barrier ());
6244 return;
6246 #endif
6248 if (synchronize_libfunc != NULL_RTX)
6250 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6251 return;
6254 /* If no explicit memory barrier instruction is available, create an
6255 empty asm stmt with a memory clobber. */
6256 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6257 tree_cons (NULL, build_string (6, "memory"), NULL));
6258 ASM_VOLATILE_P (x) = 1;
6259 expand_asm_expr (x);
6262 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6264 static void
6265 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6267 enum insn_code icode;
6268 rtx mem, insn;
6269 rtx val = const0_rtx;
6271 /* Expand the operands. */
6272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6274 /* If there is an explicit operation in the md file, use it. */
6275 icode = sync_lock_release[mode];
6276 if (icode != CODE_FOR_nothing)
6278 if (!insn_data[icode].operand[1].predicate (val, mode))
6279 val = force_reg (mode, val);
6281 insn = GEN_FCN (icode) (mem, val);
6282 if (insn)
6284 emit_insn (insn);
6285 return;
6289 /* Otherwise we can implement this operation by emitting a barrier
6290 followed by a store of zero. */
6291 expand_builtin_synchronize ();
6292 emit_move_insn (mem, val);
6295 /* Expand an expression EXP that calls a built-in function,
6296 with result going to TARGET if that's convenient
6297 (and in mode MODE if that's convenient).
6298 SUBTARGET may be used as the target for computing one of EXP's operands.
6299 IGNORE is nonzero if the value is to be ignored. */
6302 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6303 int ignore)
6305 tree fndecl = get_callee_fndecl (exp);
6306 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6307 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6309 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6310 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6312 /* When not optimizing, generate calls to library functions for a certain
6313 set of builtins. */
6314 if (!optimize
6315 && !called_as_built_in (fndecl)
6316 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6317 && fcode != BUILT_IN_ALLOCA
6318 && fcode != BUILT_IN_FREE)
6319 return expand_call (exp, target, ignore);
6321 /* The built-in function expanders test for target == const0_rtx
6322 to determine whether the function's result will be ignored. */
6323 if (ignore)
6324 target = const0_rtx;
6326 /* If the result of a pure or const built-in function is ignored, and
6327 none of its arguments are volatile, we can avoid expanding the
6328 built-in call and just evaluate the arguments for side-effects. */
6329 if (target == const0_rtx
6330 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6332 bool volatilep = false;
6333 tree arg;
6334 call_expr_arg_iterator iter;
6336 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6337 if (TREE_THIS_VOLATILE (arg))
6339 volatilep = true;
6340 break;
6343 if (! volatilep)
6345 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6346 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6347 return const0_rtx;
6351 switch (fcode)
6353 CASE_FLT_FN (BUILT_IN_FABS):
6354 target = expand_builtin_fabs (exp, target, subtarget);
6355 if (target)
6356 return target;
6357 break;
6359 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6360 target = expand_builtin_copysign (exp, target, subtarget);
6361 if (target)
6362 return target;
6363 break;
6365 /* Just do a normal library call if we were unable to fold
6366 the values. */
6367 CASE_FLT_FN (BUILT_IN_CABS):
6368 break;
6370 CASE_FLT_FN (BUILT_IN_EXP):
6371 CASE_FLT_FN (BUILT_IN_EXP10):
6372 CASE_FLT_FN (BUILT_IN_POW10):
6373 CASE_FLT_FN (BUILT_IN_EXP2):
6374 CASE_FLT_FN (BUILT_IN_EXPM1):
6375 CASE_FLT_FN (BUILT_IN_LOGB):
6376 CASE_FLT_FN (BUILT_IN_LOG):
6377 CASE_FLT_FN (BUILT_IN_LOG10):
6378 CASE_FLT_FN (BUILT_IN_LOG2):
6379 CASE_FLT_FN (BUILT_IN_LOG1P):
6380 CASE_FLT_FN (BUILT_IN_TAN):
6381 CASE_FLT_FN (BUILT_IN_ASIN):
6382 CASE_FLT_FN (BUILT_IN_ACOS):
6383 CASE_FLT_FN (BUILT_IN_ATAN):
6384 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6385 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6386 because of possible accuracy problems. */
6387 if (! flag_unsafe_math_optimizations)
6388 break;
6389 CASE_FLT_FN (BUILT_IN_SQRT):
6390 CASE_FLT_FN (BUILT_IN_FLOOR):
6391 CASE_FLT_FN (BUILT_IN_CEIL):
6392 CASE_FLT_FN (BUILT_IN_TRUNC):
6393 CASE_FLT_FN (BUILT_IN_ROUND):
6394 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6395 CASE_FLT_FN (BUILT_IN_RINT):
6396 target = expand_builtin_mathfn (exp, target, subtarget);
6397 if (target)
6398 return target;
6399 break;
6401 CASE_FLT_FN (BUILT_IN_ILOGB):
6402 if (! flag_unsafe_math_optimizations)
6403 break;
6404 CASE_FLT_FN (BUILT_IN_ISINF):
6405 CASE_FLT_FN (BUILT_IN_FINITE):
6406 case BUILT_IN_ISFINITE:
6407 case BUILT_IN_ISNORMAL:
6408 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6409 if (target)
6410 return target;
6411 break;
6413 CASE_FLT_FN (BUILT_IN_LCEIL):
6414 CASE_FLT_FN (BUILT_IN_LLCEIL):
6415 CASE_FLT_FN (BUILT_IN_LFLOOR):
6416 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6417 target = expand_builtin_int_roundingfn (exp, target);
6418 if (target)
6419 return target;
6420 break;
6422 CASE_FLT_FN (BUILT_IN_LRINT):
6423 CASE_FLT_FN (BUILT_IN_LLRINT):
6424 CASE_FLT_FN (BUILT_IN_LROUND):
6425 CASE_FLT_FN (BUILT_IN_LLROUND):
6426 target = expand_builtin_int_roundingfn_2 (exp, target);
6427 if (target)
6428 return target;
6429 break;
6431 CASE_FLT_FN (BUILT_IN_POW):
6432 target = expand_builtin_pow (exp, target, subtarget);
6433 if (target)
6434 return target;
6435 break;
6437 CASE_FLT_FN (BUILT_IN_POWI):
6438 target = expand_builtin_powi (exp, target, subtarget);
6439 if (target)
6440 return target;
6441 break;
6443 CASE_FLT_FN (BUILT_IN_ATAN2):
6444 CASE_FLT_FN (BUILT_IN_LDEXP):
6445 CASE_FLT_FN (BUILT_IN_SCALB):
6446 CASE_FLT_FN (BUILT_IN_SCALBN):
6447 CASE_FLT_FN (BUILT_IN_SCALBLN):
6448 if (! flag_unsafe_math_optimizations)
6449 break;
6451 CASE_FLT_FN (BUILT_IN_FMOD):
6452 CASE_FLT_FN (BUILT_IN_REMAINDER):
6453 CASE_FLT_FN (BUILT_IN_DREM):
6454 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6455 if (target)
6456 return target;
6457 break;
6459 CASE_FLT_FN (BUILT_IN_CEXPI):
6460 target = expand_builtin_cexpi (exp, target, subtarget);
6461 gcc_assert (target);
6462 return target;
6464 CASE_FLT_FN (BUILT_IN_SIN):
6465 CASE_FLT_FN (BUILT_IN_COS):
6466 if (! flag_unsafe_math_optimizations)
6467 break;
6468 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6469 if (target)
6470 return target;
6471 break;
6473 CASE_FLT_FN (BUILT_IN_SINCOS):
6474 if (! flag_unsafe_math_optimizations)
6475 break;
6476 target = expand_builtin_sincos (exp);
6477 if (target)
6478 return target;
6479 break;
6481 case BUILT_IN_APPLY_ARGS:
6482 return expand_builtin_apply_args ();
6484 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6485 FUNCTION with a copy of the parameters described by
6486 ARGUMENTS, and ARGSIZE. It returns a block of memory
6487 allocated on the stack into which is stored all the registers
6488 that might possibly be used for returning the result of a
6489 function. ARGUMENTS is the value returned by
6490 __builtin_apply_args. ARGSIZE is the number of bytes of
6491 arguments that must be copied. ??? How should this value be
6492 computed? We'll also need a safe worst case value for varargs
6493 functions. */
6494 case BUILT_IN_APPLY:
6495 if (!validate_arglist (exp, POINTER_TYPE,
6496 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6497 && !validate_arglist (exp, REFERENCE_TYPE,
6498 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6499 return const0_rtx;
6500 else
6502 rtx ops[3];
6504 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6505 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6506 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6508 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6511 /* __builtin_return (RESULT) causes the function to return the
6512 value described by RESULT. RESULT is address of the block of
6513 memory returned by __builtin_apply. */
6514 case BUILT_IN_RETURN:
6515 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6516 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6517 return const0_rtx;
6519 case BUILT_IN_SAVEREGS:
6520 return expand_builtin_saveregs ();
6522 case BUILT_IN_ARGS_INFO:
6523 return expand_builtin_args_info (exp);
6525 case BUILT_IN_VA_ARG_PACK:
6526 /* All valid uses of __builtin_va_arg_pack () are removed during
6527 inlining. */
6528 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6529 return const0_rtx;
6531 case BUILT_IN_VA_ARG_PACK_LEN:
6532 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6533 inlining. */
6534 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6535 return const0_rtx;
6537 /* Return the address of the first anonymous stack arg. */
6538 case BUILT_IN_NEXT_ARG:
6539 if (fold_builtin_next_arg (exp, false))
6540 return const0_rtx;
6541 return expand_builtin_next_arg ();
6543 case BUILT_IN_CLEAR_CACHE:
6544 target = expand_builtin___clear_cache (exp);
6545 if (target)
6546 return target;
6547 break;
6549 case BUILT_IN_CLASSIFY_TYPE:
6550 return expand_builtin_classify_type (exp);
6552 case BUILT_IN_CONSTANT_P:
6553 return const0_rtx;
6555 case BUILT_IN_FRAME_ADDRESS:
6556 case BUILT_IN_RETURN_ADDRESS:
6557 return expand_builtin_frame_address (fndecl, exp);
6559 /* Returns the address of the area where the structure is returned.
6560 0 otherwise. */
6561 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6562 if (call_expr_nargs (exp) != 0
6563 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6564 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6565 return const0_rtx;
6566 else
6567 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6569 case BUILT_IN_ALLOCA:
6570 target = expand_builtin_alloca (exp, target);
6571 if (target)
6572 return target;
6573 break;
6575 case BUILT_IN_STACK_SAVE:
6576 return expand_stack_save ();
6578 case BUILT_IN_STACK_RESTORE:
6579 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6580 return const0_rtx;
6582 case BUILT_IN_BSWAP32:
6583 case BUILT_IN_BSWAP64:
6584 target = expand_builtin_bswap (exp, target, subtarget);
6586 if (target)
6587 return target;
6588 break;
6590 CASE_INT_FN (BUILT_IN_FFS):
6591 case BUILT_IN_FFSIMAX:
6592 target = expand_builtin_unop (target_mode, exp, target,
6593 subtarget, ffs_optab);
6594 if (target)
6595 return target;
6596 break;
6598 CASE_INT_FN (BUILT_IN_CLZ):
6599 case BUILT_IN_CLZIMAX:
6600 target = expand_builtin_unop (target_mode, exp, target,
6601 subtarget, clz_optab);
6602 if (target)
6603 return target;
6604 break;
6606 CASE_INT_FN (BUILT_IN_CTZ):
6607 case BUILT_IN_CTZIMAX:
6608 target = expand_builtin_unop (target_mode, exp, target,
6609 subtarget, ctz_optab);
6610 if (target)
6611 return target;
6612 break;
6614 CASE_INT_FN (BUILT_IN_POPCOUNT):
6615 case BUILT_IN_POPCOUNTIMAX:
6616 target = expand_builtin_unop (target_mode, exp, target,
6617 subtarget, popcount_optab);
6618 if (target)
6619 return target;
6620 break;
6622 CASE_INT_FN (BUILT_IN_PARITY):
6623 case BUILT_IN_PARITYIMAX:
6624 target = expand_builtin_unop (target_mode, exp, target,
6625 subtarget, parity_optab);
6626 if (target)
6627 return target;
6628 break;
6630 case BUILT_IN_STRLEN:
6631 target = expand_builtin_strlen (exp, target, target_mode);
6632 if (target)
6633 return target;
6634 break;
6636 case BUILT_IN_STRCPY:
6637 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6638 if (target)
6639 return target;
6640 break;
6642 case BUILT_IN_STRNCPY:
6643 target = expand_builtin_strncpy (exp, target, mode);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_STPCPY:
6649 target = expand_builtin_stpcpy (exp, target, mode);
6650 if (target)
6651 return target;
6652 break;
6654 case BUILT_IN_STRCAT:
6655 target = expand_builtin_strcat (fndecl, exp, target, mode);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_STRNCAT:
6661 target = expand_builtin_strncat (exp, target, mode);
6662 if (target)
6663 return target;
6664 break;
6666 case BUILT_IN_STRSPN:
6667 target = expand_builtin_strspn (exp, target, mode);
6668 if (target)
6669 return target;
6670 break;
6672 case BUILT_IN_STRCSPN:
6673 target = expand_builtin_strcspn (exp, target, mode);
6674 if (target)
6675 return target;
6676 break;
6678 case BUILT_IN_STRSTR:
6679 target = expand_builtin_strstr (exp, target, mode);
6680 if (target)
6681 return target;
6682 break;
6684 case BUILT_IN_STRPBRK:
6685 target = expand_builtin_strpbrk (exp, target, mode);
6686 if (target)
6687 return target;
6688 break;
6690 case BUILT_IN_INDEX:
6691 case BUILT_IN_STRCHR:
6692 target = expand_builtin_strchr (exp, target, mode);
6693 if (target)
6694 return target;
6695 break;
6697 case BUILT_IN_RINDEX:
6698 case BUILT_IN_STRRCHR:
6699 target = expand_builtin_strrchr (exp, target, mode);
6700 if (target)
6701 return target;
6702 break;
6704 case BUILT_IN_MEMCPY:
6705 target = expand_builtin_memcpy (exp, target, mode);
6706 if (target)
6707 return target;
6708 break;
6710 case BUILT_IN_MEMPCPY:
6711 target = expand_builtin_mempcpy (exp, target, mode);
6712 if (target)
6713 return target;
6714 break;
6716 case BUILT_IN_MEMMOVE:
6717 target = expand_builtin_memmove (exp, target, mode, ignore);
6718 if (target)
6719 return target;
6720 break;
6722 case BUILT_IN_BCOPY:
6723 target = expand_builtin_bcopy (exp, ignore);
6724 if (target)
6725 return target;
6726 break;
6728 case BUILT_IN_MEMSET:
6729 target = expand_builtin_memset (exp, target, mode);
6730 if (target)
6731 return target;
6732 break;
6734 case BUILT_IN_BZERO:
6735 target = expand_builtin_bzero (exp);
6736 if (target)
6737 return target;
6738 break;
6740 case BUILT_IN_STRCMP:
6741 target = expand_builtin_strcmp (exp, target, mode);
6742 if (target)
6743 return target;
6744 break;
6746 case BUILT_IN_STRNCMP:
6747 target = expand_builtin_strncmp (exp, target, mode);
6748 if (target)
6749 return target;
6750 break;
6752 case BUILT_IN_MEMCHR:
6753 target = expand_builtin_memchr (exp, target, mode);
6754 if (target)
6755 return target;
6756 break;
6758 case BUILT_IN_BCMP:
6759 case BUILT_IN_MEMCMP:
6760 target = expand_builtin_memcmp (exp, target, mode);
6761 if (target)
6762 return target;
6763 break;
6765 case BUILT_IN_SETJMP:
6766 /* This should have been lowered to the builtins below. */
6767 gcc_unreachable ();
6769 case BUILT_IN_SETJMP_SETUP:
6770 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6771 and the receiver label. */
6772 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6774 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6775 VOIDmode, EXPAND_NORMAL);
6776 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6777 rtx label_r = label_rtx (label);
6779 /* This is copied from the handling of non-local gotos. */
6780 expand_builtin_setjmp_setup (buf_addr, label_r);
6781 nonlocal_goto_handler_labels
6782 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6783 nonlocal_goto_handler_labels);
6784 /* ??? Do not let expand_label treat us as such since we would
6785 not want to be both on the list of non-local labels and on
6786 the list of forced labels. */
6787 FORCED_LABEL (label) = 0;
6788 return const0_rtx;
6790 break;
6792 case BUILT_IN_SETJMP_DISPATCHER:
6793 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6794 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6796 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6797 rtx label_r = label_rtx (label);
6799 /* Remove the dispatcher label from the list of non-local labels
6800 since the receiver labels have been added to it above. */
6801 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6802 return const0_rtx;
6804 break;
6806 case BUILT_IN_SETJMP_RECEIVER:
6807 /* __builtin_setjmp_receiver is passed the receiver label. */
6808 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6810 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6811 rtx label_r = label_rtx (label);
6813 expand_builtin_setjmp_receiver (label_r);
6814 return const0_rtx;
6816 break;
6818 /* __builtin_longjmp is passed a pointer to an array of five words.
6819 It's similar to the C library longjmp function but works with
6820 __builtin_setjmp above. */
6821 case BUILT_IN_LONGJMP:
6822 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6824 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6825 VOIDmode, EXPAND_NORMAL);
6826 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6828 if (value != const1_rtx)
6830 error ("%<__builtin_longjmp%> second argument must be 1");
6831 return const0_rtx;
6834 expand_builtin_longjmp (buf_addr, value);
6835 return const0_rtx;
6837 break;
6839 case BUILT_IN_NONLOCAL_GOTO:
6840 target = expand_builtin_nonlocal_goto (exp);
6841 if (target)
6842 return target;
6843 break;
6845 /* This updates the setjmp buffer that is its argument with the value
6846 of the current stack pointer. */
6847 case BUILT_IN_UPDATE_SETJMP_BUF:
6848 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6850 rtx buf_addr
6851 = expand_normal (CALL_EXPR_ARG (exp, 0));
6853 expand_builtin_update_setjmp_buf (buf_addr);
6854 return const0_rtx;
6856 break;
6858 case BUILT_IN_TRAP:
6859 expand_builtin_trap ();
6860 return const0_rtx;
6862 case BUILT_IN_UNREACHABLE:
6863 expand_builtin_unreachable ();
6864 return const0_rtx;
6866 case BUILT_IN_PRINTF:
6867 target = expand_builtin_printf (exp, target, mode, false);
6868 if (target)
6869 return target;
6870 break;
6872 case BUILT_IN_PRINTF_UNLOCKED:
6873 target = expand_builtin_printf (exp, target, mode, true);
6874 if (target)
6875 return target;
6876 break;
6878 case BUILT_IN_FPUTS:
6879 target = expand_builtin_fputs (exp, target, false);
6880 if (target)
6881 return target;
6882 break;
6883 case BUILT_IN_FPUTS_UNLOCKED:
6884 target = expand_builtin_fputs (exp, target, true);
6885 if (target)
6886 return target;
6887 break;
6889 case BUILT_IN_FPRINTF:
6890 target = expand_builtin_fprintf (exp, target, mode, false);
6891 if (target)
6892 return target;
6893 break;
6895 case BUILT_IN_FPRINTF_UNLOCKED:
6896 target = expand_builtin_fprintf (exp, target, mode, true);
6897 if (target)
6898 return target;
6899 break;
6901 case BUILT_IN_SPRINTF:
6902 target = expand_builtin_sprintf (exp, target, mode);
6903 if (target)
6904 return target;
6905 break;
6907 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6908 case BUILT_IN_SIGNBITD32:
6909 case BUILT_IN_SIGNBITD64:
6910 case BUILT_IN_SIGNBITD128:
6911 target = expand_builtin_signbit (exp, target);
6912 if (target)
6913 return target;
6914 break;
6916 /* Various hooks for the DWARF 2 __throw routine. */
6917 case BUILT_IN_UNWIND_INIT:
6918 expand_builtin_unwind_init ();
6919 return const0_rtx;
6920 case BUILT_IN_DWARF_CFA:
6921 return virtual_cfa_rtx;
6922 #ifdef DWARF2_UNWIND_INFO
6923 case BUILT_IN_DWARF_SP_COLUMN:
6924 return expand_builtin_dwarf_sp_column ();
6925 case BUILT_IN_INIT_DWARF_REG_SIZES:
6926 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6927 return const0_rtx;
6928 #endif
6929 case BUILT_IN_FROB_RETURN_ADDR:
6930 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6931 case BUILT_IN_EXTRACT_RETURN_ADDR:
6932 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6933 case BUILT_IN_EH_RETURN:
6934 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6935 CALL_EXPR_ARG (exp, 1));
6936 return const0_rtx;
6937 #ifdef EH_RETURN_DATA_REGNO
6938 case BUILT_IN_EH_RETURN_DATA_REGNO:
6939 return expand_builtin_eh_return_data_regno (exp);
6940 #endif
6941 case BUILT_IN_EXTEND_POINTER:
6942 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6944 case BUILT_IN_VA_START:
6945 return expand_builtin_va_start (exp);
6946 case BUILT_IN_VA_END:
6947 return expand_builtin_va_end (exp);
6948 case BUILT_IN_VA_COPY:
6949 return expand_builtin_va_copy (exp);
6950 case BUILT_IN_EXPECT:
6951 return expand_builtin_expect (exp, target);
6952 case BUILT_IN_PREFETCH:
6953 expand_builtin_prefetch (exp);
6954 return const0_rtx;
6956 case BUILT_IN_PROFILE_FUNC_ENTER:
6957 return expand_builtin_profile_func (false);
6958 case BUILT_IN_PROFILE_FUNC_EXIT:
6959 return expand_builtin_profile_func (true);
6961 case BUILT_IN_INIT_TRAMPOLINE:
6962 return expand_builtin_init_trampoline (exp);
6963 case BUILT_IN_ADJUST_TRAMPOLINE:
6964 return expand_builtin_adjust_trampoline (exp);
6966 case BUILT_IN_FORK:
6967 case BUILT_IN_EXECL:
6968 case BUILT_IN_EXECV:
6969 case BUILT_IN_EXECLP:
6970 case BUILT_IN_EXECLE:
6971 case BUILT_IN_EXECVP:
6972 case BUILT_IN_EXECVE:
6973 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6974 if (target)
6975 return target;
6976 break;
6978 case BUILT_IN_FETCH_AND_ADD_1:
6979 case BUILT_IN_FETCH_AND_ADD_2:
6980 case BUILT_IN_FETCH_AND_ADD_4:
6981 case BUILT_IN_FETCH_AND_ADD_8:
6982 case BUILT_IN_FETCH_AND_ADD_16:
6983 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6984 target = expand_builtin_sync_operation (mode, exp, PLUS,
6985 false, target, ignore);
6986 if (target)
6987 return target;
6988 break;
6990 case BUILT_IN_FETCH_AND_SUB_1:
6991 case BUILT_IN_FETCH_AND_SUB_2:
6992 case BUILT_IN_FETCH_AND_SUB_4:
6993 case BUILT_IN_FETCH_AND_SUB_8:
6994 case BUILT_IN_FETCH_AND_SUB_16:
6995 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6996 target = expand_builtin_sync_operation (mode, exp, MINUS,
6997 false, target, ignore);
6998 if (target)
6999 return target;
7000 break;
7002 case BUILT_IN_FETCH_AND_OR_1:
7003 case BUILT_IN_FETCH_AND_OR_2:
7004 case BUILT_IN_FETCH_AND_OR_4:
7005 case BUILT_IN_FETCH_AND_OR_8:
7006 case BUILT_IN_FETCH_AND_OR_16:
7007 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
7008 target = expand_builtin_sync_operation (mode, exp, IOR,
7009 false, target, ignore);
7010 if (target)
7011 return target;
7012 break;
7014 case BUILT_IN_FETCH_AND_AND_1:
7015 case BUILT_IN_FETCH_AND_AND_2:
7016 case BUILT_IN_FETCH_AND_AND_4:
7017 case BUILT_IN_FETCH_AND_AND_8:
7018 case BUILT_IN_FETCH_AND_AND_16:
7019 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
7020 target = expand_builtin_sync_operation (mode, exp, AND,
7021 false, target, ignore);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_FETCH_AND_XOR_1:
7027 case BUILT_IN_FETCH_AND_XOR_2:
7028 case BUILT_IN_FETCH_AND_XOR_4:
7029 case BUILT_IN_FETCH_AND_XOR_8:
7030 case BUILT_IN_FETCH_AND_XOR_16:
7031 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
7032 target = expand_builtin_sync_operation (mode, exp, XOR,
7033 false, target, ignore);
7034 if (target)
7035 return target;
7036 break;
7038 case BUILT_IN_FETCH_AND_NAND_1:
7039 case BUILT_IN_FETCH_AND_NAND_2:
7040 case BUILT_IN_FETCH_AND_NAND_4:
7041 case BUILT_IN_FETCH_AND_NAND_8:
7042 case BUILT_IN_FETCH_AND_NAND_16:
7043 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
7044 target = expand_builtin_sync_operation (mode, exp, NOT,
7045 false, target, ignore);
7046 if (target)
7047 return target;
7048 break;
7050 case BUILT_IN_ADD_AND_FETCH_1:
7051 case BUILT_IN_ADD_AND_FETCH_2:
7052 case BUILT_IN_ADD_AND_FETCH_4:
7053 case BUILT_IN_ADD_AND_FETCH_8:
7054 case BUILT_IN_ADD_AND_FETCH_16:
7055 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7056 target = expand_builtin_sync_operation (mode, exp, PLUS,
7057 true, target, ignore);
7058 if (target)
7059 return target;
7060 break;
7062 case BUILT_IN_SUB_AND_FETCH_1:
7063 case BUILT_IN_SUB_AND_FETCH_2:
7064 case BUILT_IN_SUB_AND_FETCH_4:
7065 case BUILT_IN_SUB_AND_FETCH_8:
7066 case BUILT_IN_SUB_AND_FETCH_16:
7067 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7068 target = expand_builtin_sync_operation (mode, exp, MINUS,
7069 true, target, ignore);
7070 if (target)
7071 return target;
7072 break;
7074 case BUILT_IN_OR_AND_FETCH_1:
7075 case BUILT_IN_OR_AND_FETCH_2:
7076 case BUILT_IN_OR_AND_FETCH_4:
7077 case BUILT_IN_OR_AND_FETCH_8:
7078 case BUILT_IN_OR_AND_FETCH_16:
7079 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7080 target = expand_builtin_sync_operation (mode, exp, IOR,
7081 true, target, ignore);
7082 if (target)
7083 return target;
7084 break;
7086 case BUILT_IN_AND_AND_FETCH_1:
7087 case BUILT_IN_AND_AND_FETCH_2:
7088 case BUILT_IN_AND_AND_FETCH_4:
7089 case BUILT_IN_AND_AND_FETCH_8:
7090 case BUILT_IN_AND_AND_FETCH_16:
7091 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7092 target = expand_builtin_sync_operation (mode, exp, AND,
7093 true, target, ignore);
7094 if (target)
7095 return target;
7096 break;
7098 case BUILT_IN_XOR_AND_FETCH_1:
7099 case BUILT_IN_XOR_AND_FETCH_2:
7100 case BUILT_IN_XOR_AND_FETCH_4:
7101 case BUILT_IN_XOR_AND_FETCH_8:
7102 case BUILT_IN_XOR_AND_FETCH_16:
7103 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7104 target = expand_builtin_sync_operation (mode, exp, XOR,
7105 true, target, ignore);
7106 if (target)
7107 return target;
7108 break;
7110 case BUILT_IN_NAND_AND_FETCH_1:
7111 case BUILT_IN_NAND_AND_FETCH_2:
7112 case BUILT_IN_NAND_AND_FETCH_4:
7113 case BUILT_IN_NAND_AND_FETCH_8:
7114 case BUILT_IN_NAND_AND_FETCH_16:
7115 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7116 target = expand_builtin_sync_operation (mode, exp, NOT,
7117 true, target, ignore);
7118 if (target)
7119 return target;
7120 break;
7122 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7123 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7124 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7125 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7126 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7127 if (mode == VOIDmode)
7128 mode = TYPE_MODE (boolean_type_node);
7129 if (!target || !register_operand (target, mode))
7130 target = gen_reg_rtx (mode);
7132 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7133 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7134 if (target)
7135 return target;
7136 break;
7138 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7139 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7140 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7141 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7142 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7143 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7144 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7145 if (target)
7146 return target;
7147 break;
7149 case BUILT_IN_LOCK_TEST_AND_SET_1:
7150 case BUILT_IN_LOCK_TEST_AND_SET_2:
7151 case BUILT_IN_LOCK_TEST_AND_SET_4:
7152 case BUILT_IN_LOCK_TEST_AND_SET_8:
7153 case BUILT_IN_LOCK_TEST_AND_SET_16:
7154 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7155 target = expand_builtin_lock_test_and_set (mode, exp, target);
7156 if (target)
7157 return target;
7158 break;
7160 case BUILT_IN_LOCK_RELEASE_1:
7161 case BUILT_IN_LOCK_RELEASE_2:
7162 case BUILT_IN_LOCK_RELEASE_4:
7163 case BUILT_IN_LOCK_RELEASE_8:
7164 case BUILT_IN_LOCK_RELEASE_16:
7165 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7166 expand_builtin_lock_release (mode, exp);
7167 return const0_rtx;
7169 case BUILT_IN_SYNCHRONIZE:
7170 expand_builtin_synchronize ();
7171 return const0_rtx;
7173 case BUILT_IN_OBJECT_SIZE:
7174 return expand_builtin_object_size (exp);
7176 case BUILT_IN_MEMCPY_CHK:
7177 case BUILT_IN_MEMPCPY_CHK:
7178 case BUILT_IN_MEMMOVE_CHK:
7179 case BUILT_IN_MEMSET_CHK:
7180 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7181 if (target)
7182 return target;
7183 break;
7185 case BUILT_IN_STRCPY_CHK:
7186 case BUILT_IN_STPCPY_CHK:
7187 case BUILT_IN_STRNCPY_CHK:
7188 case BUILT_IN_STRCAT_CHK:
7189 case BUILT_IN_STRNCAT_CHK:
7190 case BUILT_IN_SNPRINTF_CHK:
7191 case BUILT_IN_VSNPRINTF_CHK:
7192 maybe_emit_chk_warning (exp, fcode);
7193 break;
7195 case BUILT_IN_SPRINTF_CHK:
7196 case BUILT_IN_VSPRINTF_CHK:
7197 maybe_emit_sprintf_chk_warning (exp, fcode);
7198 break;
7200 case BUILT_IN_FREE:
7201 maybe_emit_free_warning (exp);
7202 break;
7204 default: /* just do library call, if unknown builtin */
7205 break;
7208 /* The switch statement above can drop through to cause the function
7209 to be called normally. */
7210 return expand_call (exp, target, ignore);
7213 /* Determine whether a tree node represents a call to a built-in
7214 function. If the tree T is a call to a built-in function with
7215 the right number of arguments of the appropriate types, return
7216 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7217 Otherwise the return value is END_BUILTINS. */
7219 enum built_in_function
7220 builtin_mathfn_code (const_tree t)
7222 const_tree fndecl, arg, parmlist;
7223 const_tree argtype, parmtype;
7224 const_call_expr_arg_iterator iter;
7226 if (TREE_CODE (t) != CALL_EXPR
7227 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7228 return END_BUILTINS;
7230 fndecl = get_callee_fndecl (t);
7231 if (fndecl == NULL_TREE
7232 || TREE_CODE (fndecl) != FUNCTION_DECL
7233 || ! DECL_BUILT_IN (fndecl)
7234 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7235 return END_BUILTINS;
7237 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7238 init_const_call_expr_arg_iterator (t, &iter);
7239 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7241 /* If a function doesn't take a variable number of arguments,
7242 the last element in the list will have type `void'. */
7243 parmtype = TREE_VALUE (parmlist);
7244 if (VOID_TYPE_P (parmtype))
7246 if (more_const_call_expr_args_p (&iter))
7247 return END_BUILTINS;
7248 return DECL_FUNCTION_CODE (fndecl);
7251 if (! more_const_call_expr_args_p (&iter))
7252 return END_BUILTINS;
7254 arg = next_const_call_expr_arg (&iter);
7255 argtype = TREE_TYPE (arg);
7257 if (SCALAR_FLOAT_TYPE_P (parmtype))
7259 if (! SCALAR_FLOAT_TYPE_P (argtype))
7260 return END_BUILTINS;
7262 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7264 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7265 return END_BUILTINS;
7267 else if (POINTER_TYPE_P (parmtype))
7269 if (! POINTER_TYPE_P (argtype))
7270 return END_BUILTINS;
7272 else if (INTEGRAL_TYPE_P (parmtype))
7274 if (! INTEGRAL_TYPE_P (argtype))
7275 return END_BUILTINS;
7277 else
7278 return END_BUILTINS;
7281 /* Variable-length argument list. */
7282 return DECL_FUNCTION_CODE (fndecl);
7285 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7286 evaluate to a constant. */
7288 static tree
7289 fold_builtin_constant_p (tree arg)
7291 /* We return 1 for a numeric type that's known to be a constant
7292 value at compile-time or for an aggregate type that's a
7293 literal constant. */
7294 STRIP_NOPS (arg);
7296 /* If we know this is a constant, emit the constant of one. */
7297 if (CONSTANT_CLASS_P (arg)
7298 || (TREE_CODE (arg) == CONSTRUCTOR
7299 && TREE_CONSTANT (arg)))
7300 return integer_one_node;
7301 if (TREE_CODE (arg) == ADDR_EXPR)
7303 tree op = TREE_OPERAND (arg, 0);
7304 if (TREE_CODE (op) == STRING_CST
7305 || (TREE_CODE (op) == ARRAY_REF
7306 && integer_zerop (TREE_OPERAND (op, 1))
7307 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7308 return integer_one_node;
7311 /* If this expression has side effects, show we don't know it to be a
7312 constant. Likewise if it's a pointer or aggregate type since in
7313 those case we only want literals, since those are only optimized
7314 when generating RTL, not later.
7315 And finally, if we are compiling an initializer, not code, we
7316 need to return a definite result now; there's not going to be any
7317 more optimization done. */
7318 if (TREE_SIDE_EFFECTS (arg)
7319 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7320 || POINTER_TYPE_P (TREE_TYPE (arg))
7321 || cfun == 0
7322 || folding_initializer)
7323 return integer_zero_node;
7325 return NULL_TREE;
7328 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7329 return it as a truthvalue. */
7331 static tree
7332 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7334 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7336 fn = built_in_decls[BUILT_IN_EXPECT];
7337 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7338 ret_type = TREE_TYPE (TREE_TYPE (fn));
7339 pred_type = TREE_VALUE (arg_types);
7340 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7342 pred = fold_convert_loc (loc, pred_type, pred);
7343 expected = fold_convert_loc (loc, expected_type, expected);
7344 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7346 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7347 build_int_cst (ret_type, 0));
7350 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7351 NULL_TREE if no simplification is possible. */
7353 static tree
7354 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7356 tree inner, fndecl;
7357 enum tree_code code;
7359 /* If this is a builtin_expect within a builtin_expect keep the
7360 inner one. See through a comparison against a constant. It
7361 might have been added to create a thruthvalue. */
7362 inner = arg0;
7363 if (COMPARISON_CLASS_P (inner)
7364 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7365 inner = TREE_OPERAND (inner, 0);
7367 if (TREE_CODE (inner) == CALL_EXPR
7368 && (fndecl = get_callee_fndecl (inner))
7369 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7370 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7371 return arg0;
7373 /* Distribute the expected value over short-circuiting operators.
7374 See through the cast from truthvalue_type_node to long. */
7375 inner = arg0;
7376 while (TREE_CODE (inner) == NOP_EXPR
7377 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7378 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7379 inner = TREE_OPERAND (inner, 0);
7381 code = TREE_CODE (inner);
7382 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7384 tree op0 = TREE_OPERAND (inner, 0);
7385 tree op1 = TREE_OPERAND (inner, 1);
7387 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7388 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7389 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7391 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7394 /* If the argument isn't invariant then there's nothing else we can do. */
7395 if (!TREE_CONSTANT (arg0))
7396 return NULL_TREE;
7398 /* If we expect that a comparison against the argument will fold to
7399 a constant return the constant. In practice, this means a true
7400 constant or the address of a non-weak symbol. */
7401 inner = arg0;
7402 STRIP_NOPS (inner);
7403 if (TREE_CODE (inner) == ADDR_EXPR)
7407 inner = TREE_OPERAND (inner, 0);
7409 while (TREE_CODE (inner) == COMPONENT_REF
7410 || TREE_CODE (inner) == ARRAY_REF);
7411 if ((TREE_CODE (inner) == VAR_DECL
7412 || TREE_CODE (inner) == FUNCTION_DECL)
7413 && DECL_WEAK (inner))
7414 return NULL_TREE;
7417 /* Otherwise, ARG0 already has the proper type for the return value. */
7418 return arg0;
7421 /* Fold a call to __builtin_classify_type with argument ARG. */
7423 static tree
7424 fold_builtin_classify_type (tree arg)
7426 if (arg == 0)
7427 return build_int_cst (NULL_TREE, no_type_class);
7429 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7432 /* Fold a call to __builtin_strlen with argument ARG. */
7434 static tree
7435 fold_builtin_strlen (location_t loc, tree arg)
7437 if (!validate_arg (arg, POINTER_TYPE))
7438 return NULL_TREE;
7439 else
7441 tree len = c_strlen (arg, 0);
7443 if (len)
7445 /* Convert from the internal "sizetype" type to "size_t". */
7446 if (size_type_node)
7447 len = fold_convert_loc (loc, size_type_node, len);
7448 return len;
7451 return NULL_TREE;
7455 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7457 static tree
7458 fold_builtin_inf (location_t loc, tree type, int warn)
7460 REAL_VALUE_TYPE real;
7462 /* __builtin_inff is intended to be usable to define INFINITY on all
7463 targets. If an infinity is not available, INFINITY expands "to a
7464 positive constant of type float that overflows at translation
7465 time", footnote "In this case, using INFINITY will violate the
7466 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7467 Thus we pedwarn to ensure this constraint violation is
7468 diagnosed. */
7469 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7470 pedwarn (loc, 0, "target format does not support infinity");
7472 real_inf (&real);
7473 return build_real (type, real);
7476 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7478 static tree
7479 fold_builtin_nan (tree arg, tree type, int quiet)
7481 REAL_VALUE_TYPE real;
7482 const char *str;
7484 if (!validate_arg (arg, POINTER_TYPE))
7485 return NULL_TREE;
7486 str = c_getstr (arg);
7487 if (!str)
7488 return NULL_TREE;
7490 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7491 return NULL_TREE;
7493 return build_real (type, real);
7496 /* Return true if the floating point expression T has an integer value.
7497 We also allow +Inf, -Inf and NaN to be considered integer values. */
7499 static bool
7500 integer_valued_real_p (tree t)
7502 switch (TREE_CODE (t))
7504 case FLOAT_EXPR:
7505 return true;
7507 case ABS_EXPR:
7508 case SAVE_EXPR:
7509 return integer_valued_real_p (TREE_OPERAND (t, 0));
7511 case COMPOUND_EXPR:
7512 case MODIFY_EXPR:
7513 case BIND_EXPR:
7514 return integer_valued_real_p (TREE_OPERAND (t, 1));
7516 case PLUS_EXPR:
7517 case MINUS_EXPR:
7518 case MULT_EXPR:
7519 case MIN_EXPR:
7520 case MAX_EXPR:
7521 return integer_valued_real_p (TREE_OPERAND (t, 0))
7522 && integer_valued_real_p (TREE_OPERAND (t, 1));
7524 case COND_EXPR:
7525 return integer_valued_real_p (TREE_OPERAND (t, 1))
7526 && integer_valued_real_p (TREE_OPERAND (t, 2));
7528 case REAL_CST:
7529 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7531 case NOP_EXPR:
7533 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7534 if (TREE_CODE (type) == INTEGER_TYPE)
7535 return true;
7536 if (TREE_CODE (type) == REAL_TYPE)
7537 return integer_valued_real_p (TREE_OPERAND (t, 0));
7538 break;
7541 case CALL_EXPR:
7542 switch (builtin_mathfn_code (t))
7544 CASE_FLT_FN (BUILT_IN_CEIL):
7545 CASE_FLT_FN (BUILT_IN_FLOOR):
7546 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7547 CASE_FLT_FN (BUILT_IN_RINT):
7548 CASE_FLT_FN (BUILT_IN_ROUND):
7549 CASE_FLT_FN (BUILT_IN_TRUNC):
7550 return true;
7552 CASE_FLT_FN (BUILT_IN_FMIN):
7553 CASE_FLT_FN (BUILT_IN_FMAX):
7554 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7555 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7557 default:
7558 break;
7560 break;
7562 default:
7563 break;
7565 return false;
7568 /* FNDECL is assumed to be a builtin where truncation can be propagated
7569 across (for instance floor((double)f) == (double)floorf (f).
7570 Do the transformation for a call with argument ARG. */
7572 static tree
7573 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7575 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7577 if (!validate_arg (arg, REAL_TYPE))
7578 return NULL_TREE;
7580 /* Integer rounding functions are idempotent. */
7581 if (fcode == builtin_mathfn_code (arg))
7582 return arg;
7584 /* If argument is already integer valued, and we don't need to worry
7585 about setting errno, there's no need to perform rounding. */
7586 if (! flag_errno_math && integer_valued_real_p (arg))
7587 return arg;
7589 if (optimize)
7591 tree arg0 = strip_float_extensions (arg);
7592 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7593 tree newtype = TREE_TYPE (arg0);
7594 tree decl;
7596 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7597 && (decl = mathfn_built_in (newtype, fcode)))
7598 return fold_convert_loc (loc, ftype,
7599 build_call_expr_loc (loc, decl, 1,
7600 fold_convert_loc (loc,
7601 newtype,
7602 arg0)));
7604 return NULL_TREE;
7607 /* FNDECL is assumed to be builtin which can narrow the FP type of
7608 the argument, for instance lround((double)f) -> lroundf (f).
7609 Do the transformation for a call with argument ARG. */
7611 static tree
7612 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7614 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7616 if (!validate_arg (arg, REAL_TYPE))
7617 return NULL_TREE;
7619 /* If argument is already integer valued, and we don't need to worry
7620 about setting errno, there's no need to perform rounding. */
7621 if (! flag_errno_math && integer_valued_real_p (arg))
7622 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7623 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7625 if (optimize)
7627 tree ftype = TREE_TYPE (arg);
7628 tree arg0 = strip_float_extensions (arg);
7629 tree newtype = TREE_TYPE (arg0);
7630 tree decl;
7632 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7633 && (decl = mathfn_built_in (newtype, fcode)))
7634 return build_call_expr_loc (loc, decl, 1,
7635 fold_convert_loc (loc, newtype, arg0));
7638 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7639 sizeof (long long) == sizeof (long). */
7640 if (TYPE_PRECISION (long_long_integer_type_node)
7641 == TYPE_PRECISION (long_integer_type_node))
7643 tree newfn = NULL_TREE;
7644 switch (fcode)
7646 CASE_FLT_FN (BUILT_IN_LLCEIL):
7647 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7648 break;
7650 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7651 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7652 break;
7654 CASE_FLT_FN (BUILT_IN_LLROUND):
7655 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7656 break;
7658 CASE_FLT_FN (BUILT_IN_LLRINT):
7659 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7660 break;
7662 default:
7663 break;
7666 if (newfn)
7668 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7669 return fold_convert_loc (loc,
7670 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7674 return NULL_TREE;
7677 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7678 return type. Return NULL_TREE if no simplification can be made. */
7680 static tree
7681 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7683 tree res;
7685 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7686 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7687 return NULL_TREE;
7689 /* Calculate the result when the argument is a constant. */
7690 if (TREE_CODE (arg) == COMPLEX_CST
7691 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7692 type, mpfr_hypot)))
7693 return res;
7695 if (TREE_CODE (arg) == COMPLEX_EXPR)
7697 tree real = TREE_OPERAND (arg, 0);
7698 tree imag = TREE_OPERAND (arg, 1);
7700 /* If either part is zero, cabs is fabs of the other. */
7701 if (real_zerop (real))
7702 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7703 if (real_zerop (imag))
7704 return fold_build1_loc (loc, ABS_EXPR, type, real);
7706 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7707 if (flag_unsafe_math_optimizations
7708 && operand_equal_p (real, imag, OEP_PURE_SAME))
7710 const REAL_VALUE_TYPE sqrt2_trunc
7711 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7712 STRIP_NOPS (real);
7713 return fold_build2_loc (loc, MULT_EXPR, type,
7714 fold_build1_loc (loc, ABS_EXPR, type, real),
7715 build_real (type, sqrt2_trunc));
7719 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7720 if (TREE_CODE (arg) == NEGATE_EXPR
7721 || TREE_CODE (arg) == CONJ_EXPR)
7722 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7724 /* Don't do this when optimizing for size. */
7725 if (flag_unsafe_math_optimizations
7726 && optimize && optimize_function_for_speed_p (cfun))
7728 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7730 if (sqrtfn != NULL_TREE)
7732 tree rpart, ipart, result;
7734 arg = builtin_save_expr (arg);
7736 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7737 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7739 rpart = builtin_save_expr (rpart);
7740 ipart = builtin_save_expr (ipart);
7742 result = fold_build2_loc (loc, PLUS_EXPR, type,
7743 fold_build2_loc (loc, MULT_EXPR, type,
7744 rpart, rpart),
7745 fold_build2_loc (loc, MULT_EXPR, type,
7746 ipart, ipart));
7748 return build_call_expr_loc (loc, sqrtfn, 1, result);
7752 return NULL_TREE;
7755 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7756 Return NULL_TREE if no simplification can be made. */
7758 static tree
7759 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7762 enum built_in_function fcode;
7763 tree res;
7765 if (!validate_arg (arg, REAL_TYPE))
7766 return NULL_TREE;
7768 /* Calculate the result when the argument is a constant. */
7769 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7770 return res;
7772 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7773 fcode = builtin_mathfn_code (arg);
7774 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7776 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7777 arg = fold_build2_loc (loc, MULT_EXPR, type,
7778 CALL_EXPR_ARG (arg, 0),
7779 build_real (type, dconsthalf));
7780 return build_call_expr_loc (loc, expfn, 1, arg);
7783 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7784 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7786 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7788 if (powfn)
7790 tree arg0 = CALL_EXPR_ARG (arg, 0);
7791 tree tree_root;
7792 /* The inner root was either sqrt or cbrt. */
7793 /* This was a conditional expression but it triggered a bug
7794 in Sun C 5.5. */
7795 REAL_VALUE_TYPE dconstroot;
7796 if (BUILTIN_SQRT_P (fcode))
7797 dconstroot = dconsthalf;
7798 else
7799 dconstroot = dconst_third ();
7801 /* Adjust for the outer root. */
7802 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7803 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7804 tree_root = build_real (type, dconstroot);
7805 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7809 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7810 if (flag_unsafe_math_optimizations
7811 && (fcode == BUILT_IN_POW
7812 || fcode == BUILT_IN_POWF
7813 || fcode == BUILT_IN_POWL))
7815 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7816 tree arg0 = CALL_EXPR_ARG (arg, 0);
7817 tree arg1 = CALL_EXPR_ARG (arg, 1);
7818 tree narg1;
7819 if (!tree_expr_nonnegative_p (arg0))
7820 arg0 = build1 (ABS_EXPR, type, arg0);
7821 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7822 build_real (type, dconsthalf));
7823 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7826 return NULL_TREE;
7829 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7830 Return NULL_TREE if no simplification can be made. */
7832 static tree
7833 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7835 const enum built_in_function fcode = builtin_mathfn_code (arg);
7836 tree res;
7838 if (!validate_arg (arg, REAL_TYPE))
7839 return NULL_TREE;
7841 /* Calculate the result when the argument is a constant. */
7842 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7843 return res;
7845 if (flag_unsafe_math_optimizations)
7847 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7848 if (BUILTIN_EXPONENT_P (fcode))
7850 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7851 const REAL_VALUE_TYPE third_trunc =
7852 real_value_truncate (TYPE_MODE (type), dconst_third ());
7853 arg = fold_build2_loc (loc, MULT_EXPR, type,
7854 CALL_EXPR_ARG (arg, 0),
7855 build_real (type, third_trunc));
7856 return build_call_expr_loc (loc, expfn, 1, arg);
7859 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7860 if (BUILTIN_SQRT_P (fcode))
7862 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7864 if (powfn)
7866 tree arg0 = CALL_EXPR_ARG (arg, 0);
7867 tree tree_root;
7868 REAL_VALUE_TYPE dconstroot = dconst_third ();
7870 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7871 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7872 tree_root = build_real (type, dconstroot);
7873 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7877 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7878 if (BUILTIN_CBRT_P (fcode))
7880 tree arg0 = CALL_EXPR_ARG (arg, 0);
7881 if (tree_expr_nonnegative_p (arg0))
7883 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7885 if (powfn)
7887 tree tree_root;
7888 REAL_VALUE_TYPE dconstroot;
7890 real_arithmetic (&dconstroot, MULT_EXPR,
7891 dconst_third_ptr (), dconst_third_ptr ());
7892 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7893 tree_root = build_real (type, dconstroot);
7894 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7899 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7900 if (fcode == BUILT_IN_POW
7901 || fcode == BUILT_IN_POWF
7902 || fcode == BUILT_IN_POWL)
7904 tree arg00 = CALL_EXPR_ARG (arg, 0);
7905 tree arg01 = CALL_EXPR_ARG (arg, 1);
7906 if (tree_expr_nonnegative_p (arg00))
7908 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7909 const REAL_VALUE_TYPE dconstroot
7910 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7911 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7912 build_real (type, dconstroot));
7913 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7917 return NULL_TREE;
7920 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7921 TYPE is the type of the return value. Return NULL_TREE if no
7922 simplification can be made. */
7924 static tree
7925 fold_builtin_cos (location_t loc,
7926 tree arg, tree type, tree fndecl)
7928 tree res, narg;
7930 if (!validate_arg (arg, REAL_TYPE))
7931 return NULL_TREE;
7933 /* Calculate the result when the argument is a constant. */
7934 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7935 return res;
7937 /* Optimize cos(-x) into cos (x). */
7938 if ((narg = fold_strip_sign_ops (arg)))
7939 return build_call_expr_loc (loc, fndecl, 1, narg);
7941 return NULL_TREE;
7944 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7945 Return NULL_TREE if no simplification can be made. */
7947 static tree
7948 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7950 if (validate_arg (arg, REAL_TYPE))
7952 tree res, narg;
7954 /* Calculate the result when the argument is a constant. */
7955 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7956 return res;
7958 /* Optimize cosh(-x) into cosh (x). */
7959 if ((narg = fold_strip_sign_ops (arg)))
7960 return build_call_expr_loc (loc, fndecl, 1, narg);
7963 return NULL_TREE;
7966 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7967 argument ARG. TYPE is the type of the return value. Return
7968 NULL_TREE if no simplification can be made. */
7970 static tree
7971 fold_builtin_ccos (location_t loc,
7972 tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7973 bool hyper ATTRIBUTE_UNUSED)
7975 if (validate_arg (arg, COMPLEX_TYPE)
7976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7978 tree tmp;
7980 #ifdef HAVE_mpc
7981 /* Calculate the result when the argument is a constant. */
7982 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7983 return tmp;
7984 #endif
7986 /* Optimize fn(-x) into fn(x). */
7987 if ((tmp = fold_strip_sign_ops (arg)))
7988 return build_call_expr_loc (loc, fndecl, 1, tmp);
7991 return NULL_TREE;
7994 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7995 Return NULL_TREE if no simplification can be made. */
7997 static tree
7998 fold_builtin_tan (tree arg, tree type)
8000 enum built_in_function fcode;
8001 tree res;
8003 if (!validate_arg (arg, REAL_TYPE))
8004 return NULL_TREE;
8006 /* Calculate the result when the argument is a constant. */
8007 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8008 return res;
8010 /* Optimize tan(atan(x)) = x. */
8011 fcode = builtin_mathfn_code (arg);
8012 if (flag_unsafe_math_optimizations
8013 && (fcode == BUILT_IN_ATAN
8014 || fcode == BUILT_IN_ATANF
8015 || fcode == BUILT_IN_ATANL))
8016 return CALL_EXPR_ARG (arg, 0);
8018 return NULL_TREE;
8021 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8022 NULL_TREE if no simplification can be made. */
8024 static tree
8025 fold_builtin_sincos (location_t loc,
8026 tree arg0, tree arg1, tree arg2)
8028 tree type;
8029 tree res, fn, call;
8031 if (!validate_arg (arg0, REAL_TYPE)
8032 || !validate_arg (arg1, POINTER_TYPE)
8033 || !validate_arg (arg2, POINTER_TYPE))
8034 return NULL_TREE;
8036 type = TREE_TYPE (arg0);
8038 /* Calculate the result when the argument is a constant. */
8039 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8040 return res;
8042 /* Canonicalize sincos to cexpi. */
8043 if (!TARGET_C99_FUNCTIONS)
8044 return NULL_TREE;
8045 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8046 if (!fn)
8047 return NULL_TREE;
8049 call = build_call_expr_loc (loc, fn, 1, arg0);
8050 call = builtin_save_expr (call);
8052 return build2 (COMPOUND_EXPR, void_type_node,
8053 build2 (MODIFY_EXPR, void_type_node,
8054 build_fold_indirect_ref_loc (loc, arg1),
8055 build1 (IMAGPART_EXPR, type, call)),
8056 build2 (MODIFY_EXPR, void_type_node,
8057 build_fold_indirect_ref_loc (loc, arg2),
8058 build1 (REALPART_EXPR, type, call)));
8061 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8062 NULL_TREE if no simplification can be made. */
8064 static tree
8065 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8067 tree rtype;
8068 tree realp, imagp, ifn;
8069 #ifdef HAVE_mpc
8070 tree res;
8071 #endif
8073 if (!validate_arg (arg0, COMPLEX_TYPE)
8074 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8075 return NULL_TREE;
8077 #ifdef HAVE_mpc
8078 /* Calculate the result when the argument is a constant. */
8079 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8080 return res;
8081 #endif
8083 rtype = TREE_TYPE (TREE_TYPE (arg0));
8085 /* In case we can figure out the real part of arg0 and it is constant zero
8086 fold to cexpi. */
8087 if (!TARGET_C99_FUNCTIONS)
8088 return NULL_TREE;
8089 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8090 if (!ifn)
8091 return NULL_TREE;
8093 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8094 && real_zerop (realp))
8096 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8097 return build_call_expr_loc (loc, ifn, 1, narg);
8100 /* In case we can easily decompose real and imaginary parts split cexp
8101 to exp (r) * cexpi (i). */
8102 if (flag_unsafe_math_optimizations
8103 && realp)
8105 tree rfn, rcall, icall;
8107 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8108 if (!rfn)
8109 return NULL_TREE;
8111 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8112 if (!imagp)
8113 return NULL_TREE;
8115 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8116 icall = builtin_save_expr (icall);
8117 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8118 rcall = builtin_save_expr (rcall);
8119 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8120 fold_build2_loc (loc, MULT_EXPR, rtype,
8121 rcall,
8122 fold_build1_loc (loc, REALPART_EXPR,
8123 rtype, icall)),
8124 fold_build2_loc (loc, MULT_EXPR, rtype,
8125 rcall,
8126 fold_build1_loc (loc, IMAGPART_EXPR,
8127 rtype, icall)));
8130 return NULL_TREE;
8133 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8134 Return NULL_TREE if no simplification can be made. */
8136 static tree
8137 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8139 if (!validate_arg (arg, REAL_TYPE))
8140 return NULL_TREE;
8142 /* Optimize trunc of constant value. */
8143 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8145 REAL_VALUE_TYPE r, x;
8146 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8148 x = TREE_REAL_CST (arg);
8149 real_trunc (&r, TYPE_MODE (type), &x);
8150 return build_real (type, r);
8153 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8156 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8157 Return NULL_TREE if no simplification can be made. */
8159 static tree
8160 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8162 if (!validate_arg (arg, REAL_TYPE))
8163 return NULL_TREE;
8165 /* Optimize floor of constant value. */
8166 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8168 REAL_VALUE_TYPE x;
8170 x = TREE_REAL_CST (arg);
8171 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8174 REAL_VALUE_TYPE r;
8176 real_floor (&r, TYPE_MODE (type), &x);
8177 return build_real (type, r);
8181 /* Fold floor (x) where x is nonnegative to trunc (x). */
8182 if (tree_expr_nonnegative_p (arg))
8184 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8185 if (truncfn)
8186 return build_call_expr_loc (loc, truncfn, 1, arg);
8189 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8192 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8193 Return NULL_TREE if no simplification can be made. */
8195 static tree
8196 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8198 if (!validate_arg (arg, REAL_TYPE))
8199 return NULL_TREE;
8201 /* Optimize ceil of constant value. */
8202 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8204 REAL_VALUE_TYPE x;
8206 x = TREE_REAL_CST (arg);
8207 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8209 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8210 REAL_VALUE_TYPE r;
8212 real_ceil (&r, TYPE_MODE (type), &x);
8213 return build_real (type, r);
8217 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8220 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8221 Return NULL_TREE if no simplification can be made. */
8223 static tree
8224 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8226 if (!validate_arg (arg, REAL_TYPE))
8227 return NULL_TREE;
8229 /* Optimize round of constant value. */
8230 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8232 REAL_VALUE_TYPE x;
8234 x = TREE_REAL_CST (arg);
8235 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 REAL_VALUE_TYPE r;
8240 real_round (&r, TYPE_MODE (type), &x);
8241 return build_real (type, r);
8245 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8248 /* Fold function call to builtin lround, lroundf or lroundl (or the
8249 corresponding long long versions) and other rounding functions. ARG
8250 is the argument to the call. Return NULL_TREE if no simplification
8251 can be made. */
8253 static tree
8254 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8256 if (!validate_arg (arg, REAL_TYPE))
8257 return NULL_TREE;
8259 /* Optimize lround of constant value. */
8260 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8262 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8264 if (real_isfinite (&x))
8266 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8267 tree ftype = TREE_TYPE (arg);
8268 unsigned HOST_WIDE_INT lo2;
8269 HOST_WIDE_INT hi, lo;
8270 REAL_VALUE_TYPE r;
8272 switch (DECL_FUNCTION_CODE (fndecl))
8274 CASE_FLT_FN (BUILT_IN_LFLOOR):
8275 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8276 real_floor (&r, TYPE_MODE (ftype), &x);
8277 break;
8279 CASE_FLT_FN (BUILT_IN_LCEIL):
8280 CASE_FLT_FN (BUILT_IN_LLCEIL):
8281 real_ceil (&r, TYPE_MODE (ftype), &x);
8282 break;
8284 CASE_FLT_FN (BUILT_IN_LROUND):
8285 CASE_FLT_FN (BUILT_IN_LLROUND):
8286 real_round (&r, TYPE_MODE (ftype), &x);
8287 break;
8289 default:
8290 gcc_unreachable ();
8293 REAL_VALUE_TO_INT (&lo, &hi, r);
8294 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8295 return build_int_cst_wide (itype, lo2, hi);
8299 switch (DECL_FUNCTION_CODE (fndecl))
8301 CASE_FLT_FN (BUILT_IN_LFLOOR):
8302 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8303 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8304 if (tree_expr_nonnegative_p (arg))
8305 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8306 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8307 break;
8308 default:;
8311 return fold_fixed_mathfn (loc, fndecl, arg);
8314 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8315 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8316 the argument to the call. Return NULL_TREE if no simplification can
8317 be made. */
8319 static tree
8320 fold_builtin_bitop (tree fndecl, tree arg)
8322 if (!validate_arg (arg, INTEGER_TYPE))
8323 return NULL_TREE;
8325 /* Optimize for constant argument. */
8326 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8328 HOST_WIDE_INT hi, width, result;
8329 unsigned HOST_WIDE_INT lo;
8330 tree type;
8332 type = TREE_TYPE (arg);
8333 width = TYPE_PRECISION (type);
8334 lo = TREE_INT_CST_LOW (arg);
8336 /* Clear all the bits that are beyond the type's precision. */
8337 if (width > HOST_BITS_PER_WIDE_INT)
8339 hi = TREE_INT_CST_HIGH (arg);
8340 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8341 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8343 else
8345 hi = 0;
8346 if (width < HOST_BITS_PER_WIDE_INT)
8347 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8350 switch (DECL_FUNCTION_CODE (fndecl))
8352 CASE_INT_FN (BUILT_IN_FFS):
8353 if (lo != 0)
8354 result = exact_log2 (lo & -lo) + 1;
8355 else if (hi != 0)
8356 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8357 else
8358 result = 0;
8359 break;
8361 CASE_INT_FN (BUILT_IN_CLZ):
8362 if (hi != 0)
8363 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8364 else if (lo != 0)
8365 result = width - floor_log2 (lo) - 1;
8366 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8367 result = width;
8368 break;
8370 CASE_INT_FN (BUILT_IN_CTZ):
8371 if (lo != 0)
8372 result = exact_log2 (lo & -lo);
8373 else if (hi != 0)
8374 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8375 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8376 result = width;
8377 break;
8379 CASE_INT_FN (BUILT_IN_POPCOUNT):
8380 result = 0;
8381 while (lo)
8382 result++, lo &= lo - 1;
8383 while (hi)
8384 result++, hi &= hi - 1;
8385 break;
8387 CASE_INT_FN (BUILT_IN_PARITY):
8388 result = 0;
8389 while (lo)
8390 result++, lo &= lo - 1;
8391 while (hi)
8392 result++, hi &= hi - 1;
8393 result &= 1;
8394 break;
8396 default:
8397 gcc_unreachable ();
8400 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8403 return NULL_TREE;
8406 /* Fold function call to builtin_bswap and the long and long long
8407 variants. Return NULL_TREE if no simplification can be made. */
8408 static tree
8409 fold_builtin_bswap (tree fndecl, tree arg)
8411 if (! validate_arg (arg, INTEGER_TYPE))
8412 return NULL_TREE;
8414 /* Optimize constant value. */
8415 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8417 HOST_WIDE_INT hi, width, r_hi = 0;
8418 unsigned HOST_WIDE_INT lo, r_lo = 0;
8419 tree type;
8421 type = TREE_TYPE (arg);
8422 width = TYPE_PRECISION (type);
8423 lo = TREE_INT_CST_LOW (arg);
8424 hi = TREE_INT_CST_HIGH (arg);
8426 switch (DECL_FUNCTION_CODE (fndecl))
8428 case BUILT_IN_BSWAP32:
8429 case BUILT_IN_BSWAP64:
8431 int s;
8433 for (s = 0; s < width; s += 8)
8435 int d = width - s - 8;
8436 unsigned HOST_WIDE_INT byte;
8438 if (s < HOST_BITS_PER_WIDE_INT)
8439 byte = (lo >> s) & 0xff;
8440 else
8441 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8443 if (d < HOST_BITS_PER_WIDE_INT)
8444 r_lo |= byte << d;
8445 else
8446 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8450 break;
8452 default:
8453 gcc_unreachable ();
8456 if (width < HOST_BITS_PER_WIDE_INT)
8457 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8458 else
8459 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8462 return NULL_TREE;
8465 /* A subroutine of fold_builtin to fold the various logarithmic
8466 functions. Return NULL_TREE if no simplification can me made.
8467 FUNC is the corresponding MPFR logarithm function. */
8469 static tree
8470 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8471 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8473 if (validate_arg (arg, REAL_TYPE))
8475 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8476 tree res;
8477 const enum built_in_function fcode = builtin_mathfn_code (arg);
8479 /* Calculate the result when the argument is a constant. */
8480 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8481 return res;
8483 /* Special case, optimize logN(expN(x)) = x. */
8484 if (flag_unsafe_math_optimizations
8485 && ((func == mpfr_log
8486 && (fcode == BUILT_IN_EXP
8487 || fcode == BUILT_IN_EXPF
8488 || fcode == BUILT_IN_EXPL))
8489 || (func == mpfr_log2
8490 && (fcode == BUILT_IN_EXP2
8491 || fcode == BUILT_IN_EXP2F
8492 || fcode == BUILT_IN_EXP2L))
8493 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8494 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8496 /* Optimize logN(func()) for various exponential functions. We
8497 want to determine the value "x" and the power "exponent" in
8498 order to transform logN(x**exponent) into exponent*logN(x). */
8499 if (flag_unsafe_math_optimizations)
8501 tree exponent = 0, x = 0;
8503 switch (fcode)
8505 CASE_FLT_FN (BUILT_IN_EXP):
8506 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8507 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8508 dconst_e ()));
8509 exponent = CALL_EXPR_ARG (arg, 0);
8510 break;
8511 CASE_FLT_FN (BUILT_IN_EXP2):
8512 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8513 x = build_real (type, dconst2);
8514 exponent = CALL_EXPR_ARG (arg, 0);
8515 break;
8516 CASE_FLT_FN (BUILT_IN_EXP10):
8517 CASE_FLT_FN (BUILT_IN_POW10):
8518 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8520 REAL_VALUE_TYPE dconst10;
8521 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8522 x = build_real (type, dconst10);
8524 exponent = CALL_EXPR_ARG (arg, 0);
8525 break;
8526 CASE_FLT_FN (BUILT_IN_SQRT):
8527 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8528 x = CALL_EXPR_ARG (arg, 0);
8529 exponent = build_real (type, dconsthalf);
8530 break;
8531 CASE_FLT_FN (BUILT_IN_CBRT):
8532 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8533 x = CALL_EXPR_ARG (arg, 0);
8534 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8535 dconst_third ()));
8536 break;
8537 CASE_FLT_FN (BUILT_IN_POW):
8538 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8539 x = CALL_EXPR_ARG (arg, 0);
8540 exponent = CALL_EXPR_ARG (arg, 1);
8541 break;
8542 default:
8543 break;
8546 /* Now perform the optimization. */
8547 if (x && exponent)
8549 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8550 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8555 return NULL_TREE;
8558 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8559 NULL_TREE if no simplification can be made. */
8561 static tree
8562 fold_builtin_hypot (location_t loc, tree fndecl,
8563 tree arg0, tree arg1, tree type)
8565 tree res, narg0, narg1;
8567 if (!validate_arg (arg0, REAL_TYPE)
8568 || !validate_arg (arg1, REAL_TYPE))
8569 return NULL_TREE;
8571 /* Calculate the result when the argument is a constant. */
8572 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8573 return res;
8575 /* If either argument to hypot has a negate or abs, strip that off.
8576 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8577 narg0 = fold_strip_sign_ops (arg0);
8578 narg1 = fold_strip_sign_ops (arg1);
8579 if (narg0 || narg1)
8581 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8582 narg1 ? narg1 : arg1);
8585 /* If either argument is zero, hypot is fabs of the other. */
8586 if (real_zerop (arg0))
8587 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8588 else if (real_zerop (arg1))
8589 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8591 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8592 if (flag_unsafe_math_optimizations
8593 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8595 const REAL_VALUE_TYPE sqrt2_trunc
8596 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8597 return fold_build2_loc (loc, MULT_EXPR, type,
8598 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8599 build_real (type, sqrt2_trunc));
8602 return NULL_TREE;
8606 /* Fold a builtin function call to pow, powf, or powl. Return
8607 NULL_TREE if no simplification can be made. */
8608 static tree
8609 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8611 tree res;
8613 if (!validate_arg (arg0, REAL_TYPE)
8614 || !validate_arg (arg1, REAL_TYPE))
8615 return NULL_TREE;
8617 /* Calculate the result when the argument is a constant. */
8618 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8619 return res;
8621 /* Optimize pow(1.0,y) = 1.0. */
8622 if (real_onep (arg0))
8623 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8625 if (TREE_CODE (arg1) == REAL_CST
8626 && !TREE_OVERFLOW (arg1))
8628 REAL_VALUE_TYPE cint;
8629 REAL_VALUE_TYPE c;
8630 HOST_WIDE_INT n;
8632 c = TREE_REAL_CST (arg1);
8634 /* Optimize pow(x,0.0) = 1.0. */
8635 if (REAL_VALUES_EQUAL (c, dconst0))
8636 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8637 arg0);
8639 /* Optimize pow(x,1.0) = x. */
8640 if (REAL_VALUES_EQUAL (c, dconst1))
8641 return arg0;
8643 /* Optimize pow(x,-1.0) = 1.0/x. */
8644 if (REAL_VALUES_EQUAL (c, dconstm1))
8645 return fold_build2_loc (loc, RDIV_EXPR, type,
8646 build_real (type, dconst1), arg0);
8648 /* Optimize pow(x,0.5) = sqrt(x). */
8649 if (flag_unsafe_math_optimizations
8650 && REAL_VALUES_EQUAL (c, dconsthalf))
8652 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8654 if (sqrtfn != NULL_TREE)
8655 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8658 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8659 if (flag_unsafe_math_optimizations)
8661 const REAL_VALUE_TYPE dconstroot
8662 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8664 if (REAL_VALUES_EQUAL (c, dconstroot))
8666 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8667 if (cbrtfn != NULL_TREE)
8668 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8672 /* Check for an integer exponent. */
8673 n = real_to_integer (&c);
8674 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8675 if (real_identical (&c, &cint))
8677 /* Attempt to evaluate pow at compile-time, unless this should
8678 raise an exception. */
8679 if (TREE_CODE (arg0) == REAL_CST
8680 && !TREE_OVERFLOW (arg0)
8681 && (n > 0
8682 || (!flag_trapping_math && !flag_errno_math)
8683 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8685 REAL_VALUE_TYPE x;
8686 bool inexact;
8688 x = TREE_REAL_CST (arg0);
8689 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8690 if (flag_unsafe_math_optimizations || !inexact)
8691 return build_real (type, x);
8694 /* Strip sign ops from even integer powers. */
8695 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8697 tree narg0 = fold_strip_sign_ops (arg0);
8698 if (narg0)
8699 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8704 if (flag_unsafe_math_optimizations)
8706 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8708 /* Optimize pow(expN(x),y) = expN(x*y). */
8709 if (BUILTIN_EXPONENT_P (fcode))
8711 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8712 tree arg = CALL_EXPR_ARG (arg0, 0);
8713 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8714 return build_call_expr_loc (loc, expfn, 1, arg);
8717 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8718 if (BUILTIN_SQRT_P (fcode))
8720 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8721 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8722 build_real (type, dconsthalf));
8723 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8726 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8727 if (BUILTIN_CBRT_P (fcode))
8729 tree arg = CALL_EXPR_ARG (arg0, 0);
8730 if (tree_expr_nonnegative_p (arg))
8732 const REAL_VALUE_TYPE dconstroot
8733 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8734 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8735 build_real (type, dconstroot));
8736 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8740 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8741 if (fcode == BUILT_IN_POW
8742 || fcode == BUILT_IN_POWF
8743 || fcode == BUILT_IN_POWL)
8745 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8746 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8747 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8748 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8752 return NULL_TREE;
8755 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8756 Return NULL_TREE if no simplification can be made. */
8757 static tree
8758 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8759 tree arg0, tree arg1, tree type)
8761 if (!validate_arg (arg0, REAL_TYPE)
8762 || !validate_arg (arg1, INTEGER_TYPE))
8763 return NULL_TREE;
8765 /* Optimize pow(1.0,y) = 1.0. */
8766 if (real_onep (arg0))
8767 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8769 if (host_integerp (arg1, 0))
8771 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8773 /* Evaluate powi at compile-time. */
8774 if (TREE_CODE (arg0) == REAL_CST
8775 && !TREE_OVERFLOW (arg0))
8777 REAL_VALUE_TYPE x;
8778 x = TREE_REAL_CST (arg0);
8779 real_powi (&x, TYPE_MODE (type), &x, c);
8780 return build_real (type, x);
8783 /* Optimize pow(x,0) = 1.0. */
8784 if (c == 0)
8785 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8786 arg0);
8788 /* Optimize pow(x,1) = x. */
8789 if (c == 1)
8790 return arg0;
8792 /* Optimize pow(x,-1) = 1.0/x. */
8793 if (c == -1)
8794 return fold_build2_loc (loc, RDIV_EXPR, type,
8795 build_real (type, dconst1), arg0);
8798 return NULL_TREE;
8801 /* A subroutine of fold_builtin to fold the various exponent
8802 functions. Return NULL_TREE if no simplification can be made.
8803 FUNC is the corresponding MPFR exponent function. */
8805 static tree
8806 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8807 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8809 if (validate_arg (arg, REAL_TYPE))
8811 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8812 tree res;
8814 /* Calculate the result when the argument is a constant. */
8815 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8816 return res;
8818 /* Optimize expN(logN(x)) = x. */
8819 if (flag_unsafe_math_optimizations)
8821 const enum built_in_function fcode = builtin_mathfn_code (arg);
8823 if ((func == mpfr_exp
8824 && (fcode == BUILT_IN_LOG
8825 || fcode == BUILT_IN_LOGF
8826 || fcode == BUILT_IN_LOGL))
8827 || (func == mpfr_exp2
8828 && (fcode == BUILT_IN_LOG2
8829 || fcode == BUILT_IN_LOG2F
8830 || fcode == BUILT_IN_LOG2L))
8831 || (func == mpfr_exp10
8832 && (fcode == BUILT_IN_LOG10
8833 || fcode == BUILT_IN_LOG10F
8834 || fcode == BUILT_IN_LOG10L)))
8835 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8839 return NULL_TREE;
8842 /* Return true if VAR is a VAR_DECL or a component thereof. */
8844 static bool
8845 var_decl_component_p (tree var)
8847 tree inner = var;
8848 while (handled_component_p (inner))
8849 inner = TREE_OPERAND (inner, 0);
8850 return SSA_VAR_P (inner);
8853 /* Fold function call to builtin memset. Return
8854 NULL_TREE if no simplification can be made. */
8856 static tree
8857 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8858 tree type, bool ignore)
8860 tree var, ret, etype;
8861 unsigned HOST_WIDE_INT length, cval;
8863 if (! validate_arg (dest, POINTER_TYPE)
8864 || ! validate_arg (c, INTEGER_TYPE)
8865 || ! validate_arg (len, INTEGER_TYPE))
8866 return NULL_TREE;
8868 if (! host_integerp (len, 1))
8869 return NULL_TREE;
8871 /* If the LEN parameter is zero, return DEST. */
8872 if (integer_zerop (len))
8873 return omit_one_operand_loc (loc, type, dest, c);
8875 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8876 return NULL_TREE;
8878 var = dest;
8879 STRIP_NOPS (var);
8880 if (TREE_CODE (var) != ADDR_EXPR)
8881 return NULL_TREE;
8883 var = TREE_OPERAND (var, 0);
8884 if (TREE_THIS_VOLATILE (var))
8885 return NULL_TREE;
8887 etype = TREE_TYPE (var);
8888 if (TREE_CODE (etype) == ARRAY_TYPE)
8889 etype = TREE_TYPE (etype);
8891 if (!INTEGRAL_TYPE_P (etype)
8892 && !POINTER_TYPE_P (etype))
8893 return NULL_TREE;
8895 if (! var_decl_component_p (var))
8896 return NULL_TREE;
8898 length = tree_low_cst (len, 1);
8899 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8900 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8901 < (int) length)
8902 return NULL_TREE;
8904 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8905 return NULL_TREE;
8907 if (integer_zerop (c))
8908 cval = 0;
8909 else
8911 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8912 return NULL_TREE;
8914 cval = tree_low_cst (c, 1);
8915 cval &= 0xff;
8916 cval |= cval << 8;
8917 cval |= cval << 16;
8918 cval |= (cval << 31) << 1;
8921 ret = build_int_cst_type (etype, cval);
8922 var = build_fold_indirect_ref_loc (loc,
8923 fold_convert_loc (loc,
8924 build_pointer_type (etype),
8925 dest));
8926 ret = build2 (MODIFY_EXPR, etype, var, ret);
8927 if (ignore)
8928 return ret;
8930 return omit_one_operand_loc (loc, type, dest, ret);
8933 /* Fold function call to builtin memset. Return
8934 NULL_TREE if no simplification can be made. */
8936 static tree
8937 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8939 if (! validate_arg (dest, POINTER_TYPE)
8940 || ! validate_arg (size, INTEGER_TYPE))
8941 return NULL_TREE;
8943 if (!ignore)
8944 return NULL_TREE;
8946 /* New argument list transforming bzero(ptr x, int y) to
8947 memset(ptr x, int 0, size_t y). This is done this way
8948 so that if it isn't expanded inline, we fallback to
8949 calling bzero instead of memset. */
8951 return fold_builtin_memset (loc, dest, integer_zero_node,
8952 fold_convert_loc (loc, sizetype, size),
8953 void_type_node, ignore);
8956 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8957 NULL_TREE if no simplification can be made.
8958 If ENDP is 0, return DEST (like memcpy).
8959 If ENDP is 1, return DEST+LEN (like mempcpy).
8960 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8961 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8962 (memmove). */
8964 static tree
8965 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8966 tree len, tree type, bool ignore, int endp)
8968 tree destvar, srcvar, expr;
8970 if (! validate_arg (dest, POINTER_TYPE)
8971 || ! validate_arg (src, POINTER_TYPE)
8972 || ! validate_arg (len, INTEGER_TYPE))
8973 return NULL_TREE;
8975 /* If the LEN parameter is zero, return DEST. */
8976 if (integer_zerop (len))
8977 return omit_one_operand_loc (loc, type, dest, src);
8979 /* If SRC and DEST are the same (and not volatile), return
8980 DEST{,+LEN,+LEN-1}. */
8981 if (operand_equal_p (src, dest, 0))
8982 expr = len;
8983 else
8985 tree srctype, desttype;
8986 int src_align, dest_align;
8988 if (endp == 3)
8990 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8991 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8993 /* Both DEST and SRC must be pointer types.
8994 ??? This is what old code did. Is the testing for pointer types
8995 really mandatory?
8997 If either SRC is readonly or length is 1, we can use memcpy. */
8998 if (!dest_align || !src_align)
8999 return NULL_TREE;
9000 if (readonly_data_expr (src)
9001 || (host_integerp (len, 1)
9002 && (MIN (src_align, dest_align) / BITS_PER_UNIT
9003 >= tree_low_cst (len, 1))))
9005 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9006 if (!fn)
9007 return NULL_TREE;
9008 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9011 /* If *src and *dest can't overlap, optimize into memcpy as well. */
9012 srcvar = build_fold_indirect_ref_loc (loc, src);
9013 destvar = build_fold_indirect_ref_loc (loc, dest);
9014 if (srcvar
9015 && !TREE_THIS_VOLATILE (srcvar)
9016 && destvar
9017 && !TREE_THIS_VOLATILE (destvar))
9019 tree src_base, dest_base, fn;
9020 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
9021 HOST_WIDE_INT size = -1;
9022 HOST_WIDE_INT maxsize = -1;
9024 src_base = srcvar;
9025 if (handled_component_p (src_base))
9026 src_base = get_ref_base_and_extent (src_base, &src_offset,
9027 &size, &maxsize);
9028 dest_base = destvar;
9029 if (handled_component_p (dest_base))
9030 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
9031 &size, &maxsize);
9032 if (host_integerp (len, 1))
9034 maxsize = tree_low_cst (len, 1);
9035 if (maxsize
9036 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
9037 maxsize = -1;
9038 else
9039 maxsize *= BITS_PER_UNIT;
9041 else
9042 maxsize = -1;
9043 if (SSA_VAR_P (src_base)
9044 && SSA_VAR_P (dest_base))
9046 if (operand_equal_p (src_base, dest_base, 0)
9047 && ranges_overlap_p (src_offset, maxsize,
9048 dest_offset, maxsize))
9049 return NULL_TREE;
9051 else if (TREE_CODE (src_base) == INDIRECT_REF
9052 && TREE_CODE (dest_base) == INDIRECT_REF)
9054 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
9055 TREE_OPERAND (dest_base, 0), 0)
9056 || ranges_overlap_p (src_offset, maxsize,
9057 dest_offset, maxsize))
9058 return NULL_TREE;
9060 else
9061 return NULL_TREE;
9063 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9064 if (!fn)
9065 return NULL_TREE;
9066 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9068 return NULL_TREE;
9071 if (!host_integerp (len, 0))
9072 return NULL_TREE;
9073 /* FIXME:
9074 This logic lose for arguments like (type *)malloc (sizeof (type)),
9075 since we strip the casts of up to VOID return value from malloc.
9076 Perhaps we ought to inherit type from non-VOID argument here? */
9077 STRIP_NOPS (src);
9078 STRIP_NOPS (dest);
9079 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9080 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9082 tree tem = TREE_OPERAND (src, 0);
9083 STRIP_NOPS (tem);
9084 if (tem != TREE_OPERAND (src, 0))
9085 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9087 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9089 tree tem = TREE_OPERAND (dest, 0);
9090 STRIP_NOPS (tem);
9091 if (tem != TREE_OPERAND (dest, 0))
9092 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9094 srctype = TREE_TYPE (TREE_TYPE (src));
9095 if (srctype
9096 && TREE_CODE (srctype) == ARRAY_TYPE
9097 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9099 srctype = TREE_TYPE (srctype);
9100 STRIP_NOPS (src);
9101 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9103 desttype = TREE_TYPE (TREE_TYPE (dest));
9104 if (desttype
9105 && TREE_CODE (desttype) == ARRAY_TYPE
9106 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9108 desttype = TREE_TYPE (desttype);
9109 STRIP_NOPS (dest);
9110 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9112 if (!srctype || !desttype
9113 || !TYPE_SIZE_UNIT (srctype)
9114 || !TYPE_SIZE_UNIT (desttype)
9115 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9116 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9117 || TYPE_VOLATILE (srctype)
9118 || TYPE_VOLATILE (desttype))
9119 return NULL_TREE;
9121 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9122 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9123 if (dest_align < (int) TYPE_ALIGN (desttype)
9124 || src_align < (int) TYPE_ALIGN (srctype))
9125 return NULL_TREE;
9127 if (!ignore)
9128 dest = builtin_save_expr (dest);
9130 srcvar = NULL_TREE;
9131 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9133 srcvar = build_fold_indirect_ref_loc (loc, src);
9134 if (TREE_THIS_VOLATILE (srcvar))
9135 return NULL_TREE;
9136 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9137 srcvar = NULL_TREE;
9138 /* With memcpy, it is possible to bypass aliasing rules, so without
9139 this check i.e. execute/20060930-2.c would be misoptimized,
9140 because it use conflicting alias set to hold argument for the
9141 memcpy call. This check is probably unnecessary with
9142 -fno-strict-aliasing. Similarly for destvar. See also
9143 PR29286. */
9144 else if (!var_decl_component_p (srcvar))
9145 srcvar = NULL_TREE;
9148 destvar = NULL_TREE;
9149 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9151 destvar = build_fold_indirect_ref_loc (loc, dest);
9152 if (TREE_THIS_VOLATILE (destvar))
9153 return NULL_TREE;
9154 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9155 destvar = NULL_TREE;
9156 else if (!var_decl_component_p (destvar))
9157 destvar = NULL_TREE;
9160 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9161 return NULL_TREE;
9163 if (srcvar == NULL_TREE)
9165 tree srcptype;
9166 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9167 return NULL_TREE;
9169 srctype = build_qualified_type (desttype, 0);
9170 if (src_align < (int) TYPE_ALIGN (srctype))
9172 if (AGGREGATE_TYPE_P (srctype)
9173 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9174 return NULL_TREE;
9176 srctype = build_variant_type_copy (srctype);
9177 TYPE_ALIGN (srctype) = src_align;
9178 TYPE_USER_ALIGN (srctype) = 1;
9179 TYPE_PACKED (srctype) = 1;
9181 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9182 src = fold_convert_loc (loc, srcptype, src);
9183 srcvar = build_fold_indirect_ref_loc (loc, src);
9185 else if (destvar == NULL_TREE)
9187 tree destptype;
9188 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9189 return NULL_TREE;
9191 desttype = build_qualified_type (srctype, 0);
9192 if (dest_align < (int) TYPE_ALIGN (desttype))
9194 if (AGGREGATE_TYPE_P (desttype)
9195 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9196 return NULL_TREE;
9198 desttype = build_variant_type_copy (desttype);
9199 TYPE_ALIGN (desttype) = dest_align;
9200 TYPE_USER_ALIGN (desttype) = 1;
9201 TYPE_PACKED (desttype) = 1;
9203 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9204 dest = fold_convert_loc (loc, destptype, dest);
9205 destvar = build_fold_indirect_ref_loc (loc, dest);
9208 if (srctype == desttype
9209 || (gimple_in_ssa_p (cfun)
9210 && useless_type_conversion_p (desttype, srctype)))
9211 expr = srcvar;
9212 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9213 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9214 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9215 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9216 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
9217 else
9218 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
9219 TREE_TYPE (destvar), srcvar);
9220 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9223 if (ignore)
9224 return expr;
9226 if (endp == 0 || endp == 3)
9227 return omit_one_operand_loc (loc, type, dest, expr);
9229 if (expr == len)
9230 expr = NULL_TREE;
9232 if (endp == 2)
9233 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9234 ssize_int (1));
9236 len = fold_convert_loc (loc, sizetype, len);
9237 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9238 dest = fold_convert_loc (loc, type, dest);
9239 if (expr)
9240 dest = omit_one_operand_loc (loc, type, dest, expr);
9241 return dest;
9244 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9245 If LEN is not NULL, it represents the length of the string to be
9246 copied. Return NULL_TREE if no simplification can be made. */
9248 tree
9249 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9251 tree fn;
9253 if (!validate_arg (dest, POINTER_TYPE)
9254 || !validate_arg (src, POINTER_TYPE))
9255 return NULL_TREE;
9257 /* If SRC and DEST are the same (and not volatile), return DEST. */
9258 if (operand_equal_p (src, dest, 0))
9259 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9261 if (optimize_function_for_size_p (cfun))
9262 return NULL_TREE;
9264 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9265 if (!fn)
9266 return NULL_TREE;
9268 if (!len)
9270 len = c_strlen (src, 1);
9271 if (! len || TREE_SIDE_EFFECTS (len))
9272 return NULL_TREE;
9275 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
9276 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9277 build_call_expr_loc (loc, fn, 3, dest, src, len));
9280 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9281 If SLEN is not NULL, it represents the length of the source string.
9282 Return NULL_TREE if no simplification can be made. */
9284 tree
9285 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9286 tree src, tree len, tree slen)
9288 tree fn;
9290 if (!validate_arg (dest, POINTER_TYPE)
9291 || !validate_arg (src, POINTER_TYPE)
9292 || !validate_arg (len, INTEGER_TYPE))
9293 return NULL_TREE;
9295 /* If the LEN parameter is zero, return DEST. */
9296 if (integer_zerop (len))
9297 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9299 /* We can't compare slen with len as constants below if len is not a
9300 constant. */
9301 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9302 return NULL_TREE;
9304 if (!slen)
9305 slen = c_strlen (src, 1);
9307 /* Now, we must be passed a constant src ptr parameter. */
9308 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9309 return NULL_TREE;
9311 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9313 /* We do not support simplification of this case, though we do
9314 support it when expanding trees into RTL. */
9315 /* FIXME: generate a call to __builtin_memset. */
9316 if (tree_int_cst_lt (slen, len))
9317 return NULL_TREE;
9319 /* OK transform into builtin memcpy. */
9320 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9321 if (!fn)
9322 return NULL_TREE;
9323 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9324 build_call_expr_loc (loc, fn, 3, dest, src, len));
9327 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9328 arguments to the call, and TYPE is its return type.
9329 Return NULL_TREE if no simplification can be made. */
9331 static tree
9332 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9334 if (!validate_arg (arg1, POINTER_TYPE)
9335 || !validate_arg (arg2, INTEGER_TYPE)
9336 || !validate_arg (len, INTEGER_TYPE))
9337 return NULL_TREE;
9338 else
9340 const char *p1;
9342 if (TREE_CODE (arg2) != INTEGER_CST
9343 || !host_integerp (len, 1))
9344 return NULL_TREE;
9346 p1 = c_getstr (arg1);
9347 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9349 char c;
9350 const char *r;
9351 tree tem;
9353 if (target_char_cast (arg2, &c))
9354 return NULL_TREE;
9356 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9358 if (r == NULL)
9359 return build_int_cst (TREE_TYPE (arg1), 0);
9361 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9362 size_int (r - p1));
9363 return fold_convert_loc (loc, type, tem);
9365 return NULL_TREE;
9369 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9370 Return NULL_TREE if no simplification can be made. */
9372 static tree
9373 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9375 const char *p1, *p2;
9377 if (!validate_arg (arg1, POINTER_TYPE)
9378 || !validate_arg (arg2, POINTER_TYPE)
9379 || !validate_arg (len, INTEGER_TYPE))
9380 return NULL_TREE;
9382 /* If the LEN parameter is zero, return zero. */
9383 if (integer_zerop (len))
9384 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9385 arg1, arg2);
9387 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9388 if (operand_equal_p (arg1, arg2, 0))
9389 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9391 p1 = c_getstr (arg1);
9392 p2 = c_getstr (arg2);
9394 /* If all arguments are constant, and the value of len is not greater
9395 than the lengths of arg1 and arg2, evaluate at compile-time. */
9396 if (host_integerp (len, 1) && p1 && p2
9397 && compare_tree_int (len, strlen (p1) + 1) <= 0
9398 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9400 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9402 if (r > 0)
9403 return integer_one_node;
9404 else if (r < 0)
9405 return integer_minus_one_node;
9406 else
9407 return integer_zero_node;
9410 /* If len parameter is one, return an expression corresponding to
9411 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9412 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9414 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9415 tree cst_uchar_ptr_node
9416 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9418 tree ind1
9419 = fold_convert_loc (loc, integer_type_node,
9420 build1 (INDIRECT_REF, cst_uchar_node,
9421 fold_convert_loc (loc,
9422 cst_uchar_ptr_node,
9423 arg1)));
9424 tree ind2
9425 = fold_convert_loc (loc, integer_type_node,
9426 build1 (INDIRECT_REF, cst_uchar_node,
9427 fold_convert_loc (loc,
9428 cst_uchar_ptr_node,
9429 arg2)));
9430 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9433 return NULL_TREE;
9436 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9437 Return NULL_TREE if no simplification can be made. */
9439 static tree
9440 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9442 const char *p1, *p2;
9444 if (!validate_arg (arg1, POINTER_TYPE)
9445 || !validate_arg (arg2, POINTER_TYPE))
9446 return NULL_TREE;
9448 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9449 if (operand_equal_p (arg1, arg2, 0))
9450 return integer_zero_node;
9452 p1 = c_getstr (arg1);
9453 p2 = c_getstr (arg2);
9455 if (p1 && p2)
9457 const int i = strcmp (p1, p2);
9458 if (i < 0)
9459 return integer_minus_one_node;
9460 else if (i > 0)
9461 return integer_one_node;
9462 else
9463 return integer_zero_node;
9466 /* If the second arg is "", return *(const unsigned char*)arg1. */
9467 if (p2 && *p2 == '\0')
9469 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9470 tree cst_uchar_ptr_node
9471 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9473 return fold_convert_loc (loc, integer_type_node,
9474 build1 (INDIRECT_REF, cst_uchar_node,
9475 fold_convert_loc (loc,
9476 cst_uchar_ptr_node,
9477 arg1)));
9480 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9481 if (p1 && *p1 == '\0')
9483 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9484 tree cst_uchar_ptr_node
9485 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9487 tree temp
9488 = fold_convert_loc (loc, integer_type_node,
9489 build1 (INDIRECT_REF, cst_uchar_node,
9490 fold_convert_loc (loc,
9491 cst_uchar_ptr_node,
9492 arg2)));
9493 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9496 return NULL_TREE;
9499 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9500 Return NULL_TREE if no simplification can be made. */
9502 static tree
9503 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9505 const char *p1, *p2;
9507 if (!validate_arg (arg1, POINTER_TYPE)
9508 || !validate_arg (arg2, POINTER_TYPE)
9509 || !validate_arg (len, INTEGER_TYPE))
9510 return NULL_TREE;
9512 /* If the LEN parameter is zero, return zero. */
9513 if (integer_zerop (len))
9514 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9515 arg1, arg2);
9517 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9518 if (operand_equal_p (arg1, arg2, 0))
9519 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9521 p1 = c_getstr (arg1);
9522 p2 = c_getstr (arg2);
9524 if (host_integerp (len, 1) && p1 && p2)
9526 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9527 if (i > 0)
9528 return integer_one_node;
9529 else if (i < 0)
9530 return integer_minus_one_node;
9531 else
9532 return integer_zero_node;
9535 /* If the second arg is "", and the length is greater than zero,
9536 return *(const unsigned char*)arg1. */
9537 if (p2 && *p2 == '\0'
9538 && TREE_CODE (len) == INTEGER_CST
9539 && tree_int_cst_sgn (len) == 1)
9541 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9542 tree cst_uchar_ptr_node
9543 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9545 return fold_convert_loc (loc, integer_type_node,
9546 build1 (INDIRECT_REF, cst_uchar_node,
9547 fold_convert_loc (loc,
9548 cst_uchar_ptr_node,
9549 arg1)));
9552 /* If the first arg is "", and the length is greater than zero,
9553 return -*(const unsigned char*)arg2. */
9554 if (p1 && *p1 == '\0'
9555 && TREE_CODE (len) == INTEGER_CST
9556 && tree_int_cst_sgn (len) == 1)
9558 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9559 tree cst_uchar_ptr_node
9560 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9562 tree temp = fold_convert_loc (loc, integer_type_node,
9563 build1 (INDIRECT_REF, cst_uchar_node,
9564 fold_convert_loc (loc,
9565 cst_uchar_ptr_node,
9566 arg2)));
9567 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9570 /* If len parameter is one, return an expression corresponding to
9571 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9572 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9574 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9575 tree cst_uchar_ptr_node
9576 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9578 tree ind1 = fold_convert_loc (loc, integer_type_node,
9579 build1 (INDIRECT_REF, cst_uchar_node,
9580 fold_convert_loc (loc,
9581 cst_uchar_ptr_node,
9582 arg1)));
9583 tree ind2 = fold_convert_loc (loc, integer_type_node,
9584 build1 (INDIRECT_REF, cst_uchar_node,
9585 fold_convert_loc (loc,
9586 cst_uchar_ptr_node,
9587 arg2)));
9588 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9591 return NULL_TREE;
9594 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9595 ARG. Return NULL_TREE if no simplification can be made. */
9597 static tree
9598 fold_builtin_signbit (location_t loc, tree arg, tree type)
9600 tree temp;
9602 if (!validate_arg (arg, REAL_TYPE))
9603 return NULL_TREE;
9605 /* If ARG is a compile-time constant, determine the result. */
9606 if (TREE_CODE (arg) == REAL_CST
9607 && !TREE_OVERFLOW (arg))
9609 REAL_VALUE_TYPE c;
9611 c = TREE_REAL_CST (arg);
9612 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9613 return fold_convert_loc (loc, type, temp);
9616 /* If ARG is non-negative, the result is always zero. */
9617 if (tree_expr_nonnegative_p (arg))
9618 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9620 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9621 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9622 return fold_build2_loc (loc, LT_EXPR, type, arg,
9623 build_real (TREE_TYPE (arg), dconst0));
9625 return NULL_TREE;
9628 /* Fold function call to builtin copysign, copysignf or copysignl with
9629 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9630 be made. */
9632 static tree
9633 fold_builtin_copysign (location_t loc, tree fndecl,
9634 tree arg1, tree arg2, tree type)
9636 tree tem;
9638 if (!validate_arg (arg1, REAL_TYPE)
9639 || !validate_arg (arg2, REAL_TYPE))
9640 return NULL_TREE;
9642 /* copysign(X,X) is X. */
9643 if (operand_equal_p (arg1, arg2, 0))
9644 return fold_convert_loc (loc, type, arg1);
9646 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9647 if (TREE_CODE (arg1) == REAL_CST
9648 && TREE_CODE (arg2) == REAL_CST
9649 && !TREE_OVERFLOW (arg1)
9650 && !TREE_OVERFLOW (arg2))
9652 REAL_VALUE_TYPE c1, c2;
9654 c1 = TREE_REAL_CST (arg1);
9655 c2 = TREE_REAL_CST (arg2);
9656 /* c1.sign := c2.sign. */
9657 real_copysign (&c1, &c2);
9658 return build_real (type, c1);
9661 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9662 Remember to evaluate Y for side-effects. */
9663 if (tree_expr_nonnegative_p (arg2))
9664 return omit_one_operand_loc (loc, type,
9665 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9666 arg2);
9668 /* Strip sign changing operations for the first argument. */
9669 tem = fold_strip_sign_ops (arg1);
9670 if (tem)
9671 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9673 return NULL_TREE;
9676 /* Fold a call to builtin isascii with argument ARG. */
9678 static tree
9679 fold_builtin_isascii (location_t loc, tree arg)
9681 if (!validate_arg (arg, INTEGER_TYPE))
9682 return NULL_TREE;
9683 else
9685 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9686 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9687 build_int_cst (NULL_TREE,
9688 ~ (unsigned HOST_WIDE_INT) 0x7f));
9689 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9690 arg, integer_zero_node);
9694 /* Fold a call to builtin toascii with argument ARG. */
9696 static tree
9697 fold_builtin_toascii (location_t loc, tree arg)
9699 if (!validate_arg (arg, INTEGER_TYPE))
9700 return NULL_TREE;
9702 /* Transform toascii(c) -> (c & 0x7f). */
9703 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9704 build_int_cst (NULL_TREE, 0x7f));
9707 /* Fold a call to builtin isdigit with argument ARG. */
9709 static tree
9710 fold_builtin_isdigit (location_t loc, tree arg)
9712 if (!validate_arg (arg, INTEGER_TYPE))
9713 return NULL_TREE;
9714 else
9716 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9717 /* According to the C standard, isdigit is unaffected by locale.
9718 However, it definitely is affected by the target character set. */
9719 unsigned HOST_WIDE_INT target_digit0
9720 = lang_hooks.to_target_charset ('0');
9722 if (target_digit0 == 0)
9723 return NULL_TREE;
9725 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9726 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9727 build_int_cst (unsigned_type_node, target_digit0));
9728 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9729 build_int_cst (unsigned_type_node, 9));
9733 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9735 static tree
9736 fold_builtin_fabs (location_t loc, tree arg, tree type)
9738 if (!validate_arg (arg, REAL_TYPE))
9739 return NULL_TREE;
9741 arg = fold_convert_loc (loc, type, arg);
9742 if (TREE_CODE (arg) == REAL_CST)
9743 return fold_abs_const (arg, type);
9744 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9747 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9749 static tree
9750 fold_builtin_abs (location_t loc, tree arg, tree type)
9752 if (!validate_arg (arg, INTEGER_TYPE))
9753 return NULL_TREE;
9755 arg = fold_convert_loc (loc, type, arg);
9756 if (TREE_CODE (arg) == INTEGER_CST)
9757 return fold_abs_const (arg, type);
9758 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9761 /* Fold a call to builtin fmin or fmax. */
9763 static tree
9764 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9765 tree type, bool max)
9767 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9769 /* Calculate the result when the argument is a constant. */
9770 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9772 if (res)
9773 return res;
9775 /* If either argument is NaN, return the other one. Avoid the
9776 transformation if we get (and honor) a signalling NaN. Using
9777 omit_one_operand() ensures we create a non-lvalue. */
9778 if (TREE_CODE (arg0) == REAL_CST
9779 && real_isnan (&TREE_REAL_CST (arg0))
9780 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9781 || ! TREE_REAL_CST (arg0).signalling))
9782 return omit_one_operand_loc (loc, type, arg1, arg0);
9783 if (TREE_CODE (arg1) == REAL_CST
9784 && real_isnan (&TREE_REAL_CST (arg1))
9785 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9786 || ! TREE_REAL_CST (arg1).signalling))
9787 return omit_one_operand_loc (loc, type, arg0, arg1);
9789 /* Transform fmin/fmax(x,x) -> x. */
9790 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9791 return omit_one_operand_loc (loc, type, arg0, arg1);
9793 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9794 functions to return the numeric arg if the other one is NaN.
9795 These tree codes don't honor that, so only transform if
9796 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9797 handled, so we don't have to worry about it either. */
9798 if (flag_finite_math_only)
9799 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9800 fold_convert_loc (loc, type, arg0),
9801 fold_convert_loc (loc, type, arg1));
9803 return NULL_TREE;
9806 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9808 static tree
9809 fold_builtin_carg (location_t loc, tree arg, tree type)
9811 if (validate_arg (arg, COMPLEX_TYPE)
9812 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9814 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9816 if (atan2_fn)
9818 tree new_arg = builtin_save_expr (arg);
9819 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9820 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9821 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9825 return NULL_TREE;
9828 /* Fold a call to builtin logb/ilogb. */
9830 static tree
9831 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9833 if (! validate_arg (arg, REAL_TYPE))
9834 return NULL_TREE;
9836 STRIP_NOPS (arg);
9838 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9840 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9842 switch (value->cl)
9844 case rvc_nan:
9845 case rvc_inf:
9846 /* If arg is Inf or NaN and we're logb, return it. */
9847 if (TREE_CODE (rettype) == REAL_TYPE)
9848 return fold_convert_loc (loc, rettype, arg);
9849 /* Fall through... */
9850 case rvc_zero:
9851 /* Zero may set errno and/or raise an exception for logb, also
9852 for ilogb we don't know FP_ILOGB0. */
9853 return NULL_TREE;
9854 case rvc_normal:
9855 /* For normal numbers, proceed iff radix == 2. In GCC,
9856 normalized significands are in the range [0.5, 1.0). We
9857 want the exponent as if they were [1.0, 2.0) so get the
9858 exponent and subtract 1. */
9859 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9860 return fold_convert_loc (loc, rettype,
9861 build_int_cst (NULL_TREE,
9862 REAL_EXP (value)-1));
9863 break;
9867 return NULL_TREE;
9870 /* Fold a call to builtin significand, if radix == 2. */
9872 static tree
9873 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9875 if (! validate_arg (arg, REAL_TYPE))
9876 return NULL_TREE;
9878 STRIP_NOPS (arg);
9880 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9882 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9884 switch (value->cl)
9886 case rvc_zero:
9887 case rvc_nan:
9888 case rvc_inf:
9889 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9890 return fold_convert_loc (loc, rettype, arg);
9891 case rvc_normal:
9892 /* For normal numbers, proceed iff radix == 2. */
9893 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9895 REAL_VALUE_TYPE result = *value;
9896 /* In GCC, normalized significands are in the range [0.5,
9897 1.0). We want them to be [1.0, 2.0) so set the
9898 exponent to 1. */
9899 SET_REAL_EXP (&result, 1);
9900 return build_real (rettype, result);
9902 break;
9906 return NULL_TREE;
9909 /* Fold a call to builtin frexp, we can assume the base is 2. */
9911 static tree
9912 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9914 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9915 return NULL_TREE;
9917 STRIP_NOPS (arg0);
9919 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9920 return NULL_TREE;
9922 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9924 /* Proceed if a valid pointer type was passed in. */
9925 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9927 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9928 tree frac, exp;
9930 switch (value->cl)
9932 case rvc_zero:
9933 /* For +-0, return (*exp = 0, +-0). */
9934 exp = integer_zero_node;
9935 frac = arg0;
9936 break;
9937 case rvc_nan:
9938 case rvc_inf:
9939 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9940 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9941 case rvc_normal:
9943 /* Since the frexp function always expects base 2, and in
9944 GCC normalized significands are already in the range
9945 [0.5, 1.0), we have exactly what frexp wants. */
9946 REAL_VALUE_TYPE frac_rvt = *value;
9947 SET_REAL_EXP (&frac_rvt, 0);
9948 frac = build_real (rettype, frac_rvt);
9949 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9951 break;
9952 default:
9953 gcc_unreachable ();
9956 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9957 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9958 TREE_SIDE_EFFECTS (arg1) = 1;
9959 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9962 return NULL_TREE;
9965 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9966 then we can assume the base is two. If it's false, then we have to
9967 check the mode of the TYPE parameter in certain cases. */
9969 static tree
9970 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9971 tree type, bool ldexp)
9973 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9975 STRIP_NOPS (arg0);
9976 STRIP_NOPS (arg1);
9978 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9979 if (real_zerop (arg0) || integer_zerop (arg1)
9980 || (TREE_CODE (arg0) == REAL_CST
9981 && !real_isfinite (&TREE_REAL_CST (arg0))))
9982 return omit_one_operand_loc (loc, type, arg0, arg1);
9984 /* If both arguments are constant, then try to evaluate it. */
9985 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9986 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9987 && host_integerp (arg1, 0))
9989 /* Bound the maximum adjustment to twice the range of the
9990 mode's valid exponents. Use abs to ensure the range is
9991 positive as a sanity check. */
9992 const long max_exp_adj = 2 *
9993 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9994 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9996 /* Get the user-requested adjustment. */
9997 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9999 /* The requested adjustment must be inside this range. This
10000 is a preliminary cap to avoid things like overflow, we
10001 may still fail to compute the result for other reasons. */
10002 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
10004 REAL_VALUE_TYPE initial_result;
10006 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
10008 /* Ensure we didn't overflow. */
10009 if (! real_isinf (&initial_result))
10011 const REAL_VALUE_TYPE trunc_result
10012 = real_value_truncate (TYPE_MODE (type), initial_result);
10014 /* Only proceed if the target mode can hold the
10015 resulting value. */
10016 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
10017 return build_real (type, trunc_result);
10023 return NULL_TREE;
10026 /* Fold a call to builtin modf. */
10028 static tree
10029 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10031 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10032 return NULL_TREE;
10034 STRIP_NOPS (arg0);
10036 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10037 return NULL_TREE;
10039 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10041 /* Proceed if a valid pointer type was passed in. */
10042 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10044 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10045 REAL_VALUE_TYPE trunc, frac;
10047 switch (value->cl)
10049 case rvc_nan:
10050 case rvc_zero:
10051 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10052 trunc = frac = *value;
10053 break;
10054 case rvc_inf:
10055 /* For +-Inf, return (*arg1 = arg0, +-0). */
10056 frac = dconst0;
10057 frac.sign = value->sign;
10058 trunc = *value;
10059 break;
10060 case rvc_normal:
10061 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10062 real_trunc (&trunc, VOIDmode, value);
10063 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10064 /* If the original number was negative and already
10065 integral, then the fractional part is -0.0. */
10066 if (value->sign && frac.cl == rvc_zero)
10067 frac.sign = value->sign;
10068 break;
10071 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10072 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10073 build_real (rettype, trunc));
10074 TREE_SIDE_EFFECTS (arg1) = 1;
10075 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10076 build_real (rettype, frac));
10079 return NULL_TREE;
10082 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10083 ARG is the argument for the call. */
10085 static tree
10086 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10088 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10089 REAL_VALUE_TYPE r;
10091 if (!validate_arg (arg, REAL_TYPE))
10092 return NULL_TREE;
10094 switch (builtin_index)
10096 case BUILT_IN_ISINF:
10097 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10098 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10100 if (TREE_CODE (arg) == REAL_CST)
10102 r = TREE_REAL_CST (arg);
10103 if (real_isinf (&r))
10104 return real_compare (GT_EXPR, &r, &dconst0)
10105 ? integer_one_node : integer_minus_one_node;
10106 else
10107 return integer_zero_node;
10110 return NULL_TREE;
10112 case BUILT_IN_ISINF_SIGN:
10114 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10115 /* In a boolean context, GCC will fold the inner COND_EXPR to
10116 1. So e.g. "if (isinf_sign(x))" would be folded to just
10117 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10118 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10119 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10120 tree tmp = NULL_TREE;
10122 arg = builtin_save_expr (arg);
10124 if (signbit_fn && isinf_fn)
10126 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10127 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10129 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10130 signbit_call, integer_zero_node);
10131 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10132 isinf_call, integer_zero_node);
10134 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10135 integer_minus_one_node, integer_one_node);
10136 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10137 isinf_call, tmp,
10138 integer_zero_node);
10141 return tmp;
10144 case BUILT_IN_ISFINITE:
10145 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10146 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10147 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10149 if (TREE_CODE (arg) == REAL_CST)
10151 r = TREE_REAL_CST (arg);
10152 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10155 return NULL_TREE;
10157 case BUILT_IN_ISNAN:
10158 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10159 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10161 if (TREE_CODE (arg) == REAL_CST)
10163 r = TREE_REAL_CST (arg);
10164 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10167 arg = builtin_save_expr (arg);
10168 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10170 default:
10171 gcc_unreachable ();
10175 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10176 This builtin will generate code to return the appropriate floating
10177 point classification depending on the value of the floating point
10178 number passed in. The possible return values must be supplied as
10179 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10180 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10181 one floating point argument which is "type generic". */
10183 static tree
10184 fold_builtin_fpclassify (location_t loc, tree exp)
10186 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10187 arg, type, res, tmp;
10188 enum machine_mode mode;
10189 REAL_VALUE_TYPE r;
10190 char buf[128];
10192 /* Verify the required arguments in the original call. */
10193 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10194 INTEGER_TYPE, INTEGER_TYPE,
10195 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10196 return NULL_TREE;
10198 fp_nan = CALL_EXPR_ARG (exp, 0);
10199 fp_infinite = CALL_EXPR_ARG (exp, 1);
10200 fp_normal = CALL_EXPR_ARG (exp, 2);
10201 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10202 fp_zero = CALL_EXPR_ARG (exp, 4);
10203 arg = CALL_EXPR_ARG (exp, 5);
10204 type = TREE_TYPE (arg);
10205 mode = TYPE_MODE (type);
10206 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10208 /* fpclassify(x) ->
10209 isnan(x) ? FP_NAN :
10210 (fabs(x) == Inf ? FP_INFINITE :
10211 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10212 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10214 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10215 build_real (type, dconst0));
10216 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10217 tmp, fp_zero, fp_subnormal);
10219 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10220 real_from_string (&r, buf);
10221 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10222 arg, build_real (type, r));
10223 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10225 if (HONOR_INFINITIES (mode))
10227 real_inf (&r);
10228 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10229 build_real (type, r));
10230 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10231 fp_infinite, res);
10234 if (HONOR_NANS (mode))
10236 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10237 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10240 return res;
10243 /* Fold a call to an unordered comparison function such as
10244 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10245 being called and ARG0 and ARG1 are the arguments for the call.
10246 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10247 the opposite of the desired result. UNORDERED_CODE is used
10248 for modes that can hold NaNs and ORDERED_CODE is used for
10249 the rest. */
10251 static tree
10252 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10253 enum tree_code unordered_code,
10254 enum tree_code ordered_code)
10256 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10257 enum tree_code code;
10258 tree type0, type1;
10259 enum tree_code code0, code1;
10260 tree cmp_type = NULL_TREE;
10262 type0 = TREE_TYPE (arg0);
10263 type1 = TREE_TYPE (arg1);
10265 code0 = TREE_CODE (type0);
10266 code1 = TREE_CODE (type1);
10268 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10269 /* Choose the wider of two real types. */
10270 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10271 ? type0 : type1;
10272 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10273 cmp_type = type0;
10274 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10275 cmp_type = type1;
10277 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10278 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10280 if (unordered_code == UNORDERED_EXPR)
10282 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10283 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10284 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10287 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10288 : ordered_code;
10289 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10290 fold_build2_loc (loc, code, type, arg0, arg1));
10293 /* Fold a call to built-in function FNDECL with 0 arguments.
10294 IGNORE is true if the result of the function call is ignored. This
10295 function returns NULL_TREE if no simplification was possible. */
10297 static tree
10298 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10300 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10301 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10302 switch (fcode)
10304 CASE_FLT_FN (BUILT_IN_INF):
10305 case BUILT_IN_INFD32:
10306 case BUILT_IN_INFD64:
10307 case BUILT_IN_INFD128:
10308 return fold_builtin_inf (loc, type, true);
10310 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10311 return fold_builtin_inf (loc, type, false);
10313 case BUILT_IN_CLASSIFY_TYPE:
10314 return fold_builtin_classify_type (NULL_TREE);
10316 default:
10317 break;
10319 return NULL_TREE;
10322 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10323 IGNORE is true if the result of the function call is ignored. This
10324 function returns NULL_TREE if no simplification was possible. */
10326 static tree
10327 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10329 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10330 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10331 switch (fcode)
10334 case BUILT_IN_CONSTANT_P:
10336 tree val = fold_builtin_constant_p (arg0);
10338 /* Gimplification will pull the CALL_EXPR for the builtin out of
10339 an if condition. When not optimizing, we'll not CSE it back.
10340 To avoid link error types of regressions, return false now. */
10341 if (!val && !optimize)
10342 val = integer_zero_node;
10344 return val;
10347 case BUILT_IN_CLASSIFY_TYPE:
10348 return fold_builtin_classify_type (arg0);
10350 case BUILT_IN_STRLEN:
10351 return fold_builtin_strlen (loc, arg0);
10353 CASE_FLT_FN (BUILT_IN_FABS):
10354 return fold_builtin_fabs (loc, arg0, type);
10356 case BUILT_IN_ABS:
10357 case BUILT_IN_LABS:
10358 case BUILT_IN_LLABS:
10359 case BUILT_IN_IMAXABS:
10360 return fold_builtin_abs (loc, arg0, type);
10362 CASE_FLT_FN (BUILT_IN_CONJ):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10365 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10366 break;
10368 CASE_FLT_FN (BUILT_IN_CREAL):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10371 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10372 break;
10374 CASE_FLT_FN (BUILT_IN_CIMAG):
10375 if (validate_arg (arg0, COMPLEX_TYPE))
10376 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10377 break;
10379 CASE_FLT_FN (BUILT_IN_CCOS):
10380 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10382 CASE_FLT_FN (BUILT_IN_CCOSH):
10383 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10385 #ifdef HAVE_mpc
10386 CASE_FLT_FN (BUILT_IN_CSIN):
10387 if (validate_arg (arg0, COMPLEX_TYPE)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10389 return do_mpc_arg1 (arg0, type, mpc_sin);
10390 break;
10392 CASE_FLT_FN (BUILT_IN_CSINH):
10393 if (validate_arg (arg0, COMPLEX_TYPE)
10394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10395 return do_mpc_arg1 (arg0, type, mpc_sinh);
10396 break;
10398 CASE_FLT_FN (BUILT_IN_CTAN):
10399 if (validate_arg (arg0, COMPLEX_TYPE)
10400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10401 return do_mpc_arg1 (arg0, type, mpc_tan);
10402 break;
10404 CASE_FLT_FN (BUILT_IN_CTANH):
10405 if (validate_arg (arg0, COMPLEX_TYPE)
10406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10407 return do_mpc_arg1 (arg0, type, mpc_tanh);
10408 break;
10410 CASE_FLT_FN (BUILT_IN_CLOG):
10411 if (validate_arg (arg0, COMPLEX_TYPE)
10412 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10413 return do_mpc_arg1 (arg0, type, mpc_log);
10414 break;
10416 CASE_FLT_FN (BUILT_IN_CSQRT):
10417 if (validate_arg (arg0, COMPLEX_TYPE)
10418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10419 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10420 break;
10421 #endif
10423 CASE_FLT_FN (BUILT_IN_CABS):
10424 return fold_builtin_cabs (loc, arg0, type, fndecl);
10426 CASE_FLT_FN (BUILT_IN_CARG):
10427 return fold_builtin_carg (loc, arg0, type);
10429 CASE_FLT_FN (BUILT_IN_SQRT):
10430 return fold_builtin_sqrt (loc, arg0, type);
10432 CASE_FLT_FN (BUILT_IN_CBRT):
10433 return fold_builtin_cbrt (loc, arg0, type);
10435 CASE_FLT_FN (BUILT_IN_ASIN):
10436 if (validate_arg (arg0, REAL_TYPE))
10437 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10438 &dconstm1, &dconst1, true);
10439 break;
10441 CASE_FLT_FN (BUILT_IN_ACOS):
10442 if (validate_arg (arg0, REAL_TYPE))
10443 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10444 &dconstm1, &dconst1, true);
10445 break;
10447 CASE_FLT_FN (BUILT_IN_ATAN):
10448 if (validate_arg (arg0, REAL_TYPE))
10449 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10450 break;
10452 CASE_FLT_FN (BUILT_IN_ASINH):
10453 if (validate_arg (arg0, REAL_TYPE))
10454 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10455 break;
10457 CASE_FLT_FN (BUILT_IN_ACOSH):
10458 if (validate_arg (arg0, REAL_TYPE))
10459 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10460 &dconst1, NULL, true);
10461 break;
10463 CASE_FLT_FN (BUILT_IN_ATANH):
10464 if (validate_arg (arg0, REAL_TYPE))
10465 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10466 &dconstm1, &dconst1, false);
10467 break;
10469 CASE_FLT_FN (BUILT_IN_SIN):
10470 if (validate_arg (arg0, REAL_TYPE))
10471 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10472 break;
10474 CASE_FLT_FN (BUILT_IN_COS):
10475 return fold_builtin_cos (loc, arg0, type, fndecl);
10477 CASE_FLT_FN (BUILT_IN_TAN):
10478 return fold_builtin_tan (arg0, type);
10480 CASE_FLT_FN (BUILT_IN_CEXP):
10481 return fold_builtin_cexp (loc, arg0, type);
10483 CASE_FLT_FN (BUILT_IN_CEXPI):
10484 if (validate_arg (arg0, REAL_TYPE))
10485 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10486 break;
10488 CASE_FLT_FN (BUILT_IN_SINH):
10489 if (validate_arg (arg0, REAL_TYPE))
10490 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10491 break;
10493 CASE_FLT_FN (BUILT_IN_COSH):
10494 return fold_builtin_cosh (loc, arg0, type, fndecl);
10496 CASE_FLT_FN (BUILT_IN_TANH):
10497 if (validate_arg (arg0, REAL_TYPE))
10498 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10499 break;
10501 CASE_FLT_FN (BUILT_IN_ERF):
10502 if (validate_arg (arg0, REAL_TYPE))
10503 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10504 break;
10506 CASE_FLT_FN (BUILT_IN_ERFC):
10507 if (validate_arg (arg0, REAL_TYPE))
10508 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10509 break;
10511 CASE_FLT_FN (BUILT_IN_TGAMMA):
10512 if (validate_arg (arg0, REAL_TYPE))
10513 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10514 break;
10516 CASE_FLT_FN (BUILT_IN_EXP):
10517 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10519 CASE_FLT_FN (BUILT_IN_EXP2):
10520 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10522 CASE_FLT_FN (BUILT_IN_EXP10):
10523 CASE_FLT_FN (BUILT_IN_POW10):
10524 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10526 CASE_FLT_FN (BUILT_IN_EXPM1):
10527 if (validate_arg (arg0, REAL_TYPE))
10528 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10529 break;
10531 CASE_FLT_FN (BUILT_IN_LOG):
10532 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10534 CASE_FLT_FN (BUILT_IN_LOG2):
10535 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10537 CASE_FLT_FN (BUILT_IN_LOG10):
10538 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10540 CASE_FLT_FN (BUILT_IN_LOG1P):
10541 if (validate_arg (arg0, REAL_TYPE))
10542 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10543 &dconstm1, NULL, false);
10544 break;
10546 CASE_FLT_FN (BUILT_IN_J0):
10547 if (validate_arg (arg0, REAL_TYPE))
10548 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10549 NULL, NULL, 0);
10550 break;
10552 CASE_FLT_FN (BUILT_IN_J1):
10553 if (validate_arg (arg0, REAL_TYPE))
10554 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10555 NULL, NULL, 0);
10556 break;
10558 CASE_FLT_FN (BUILT_IN_Y0):
10559 if (validate_arg (arg0, REAL_TYPE))
10560 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10561 &dconst0, NULL, false);
10562 break;
10564 CASE_FLT_FN (BUILT_IN_Y1):
10565 if (validate_arg (arg0, REAL_TYPE))
10566 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10567 &dconst0, NULL, false);
10568 break;
10570 CASE_FLT_FN (BUILT_IN_NAN):
10571 case BUILT_IN_NAND32:
10572 case BUILT_IN_NAND64:
10573 case BUILT_IN_NAND128:
10574 return fold_builtin_nan (arg0, type, true);
10576 CASE_FLT_FN (BUILT_IN_NANS):
10577 return fold_builtin_nan (arg0, type, false);
10579 CASE_FLT_FN (BUILT_IN_FLOOR):
10580 return fold_builtin_floor (loc, fndecl, arg0);
10582 CASE_FLT_FN (BUILT_IN_CEIL):
10583 return fold_builtin_ceil (loc, fndecl, arg0);
10585 CASE_FLT_FN (BUILT_IN_TRUNC):
10586 return fold_builtin_trunc (loc, fndecl, arg0);
10588 CASE_FLT_FN (BUILT_IN_ROUND):
10589 return fold_builtin_round (loc, fndecl, arg0);
10591 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10592 CASE_FLT_FN (BUILT_IN_RINT):
10593 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10595 CASE_FLT_FN (BUILT_IN_LCEIL):
10596 CASE_FLT_FN (BUILT_IN_LLCEIL):
10597 CASE_FLT_FN (BUILT_IN_LFLOOR):
10598 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10599 CASE_FLT_FN (BUILT_IN_LROUND):
10600 CASE_FLT_FN (BUILT_IN_LLROUND):
10601 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10603 CASE_FLT_FN (BUILT_IN_LRINT):
10604 CASE_FLT_FN (BUILT_IN_LLRINT):
10605 return fold_fixed_mathfn (loc, fndecl, arg0);
10607 case BUILT_IN_BSWAP32:
10608 case BUILT_IN_BSWAP64:
10609 return fold_builtin_bswap (fndecl, arg0);
10611 CASE_INT_FN (BUILT_IN_FFS):
10612 CASE_INT_FN (BUILT_IN_CLZ):
10613 CASE_INT_FN (BUILT_IN_CTZ):
10614 CASE_INT_FN (BUILT_IN_POPCOUNT):
10615 CASE_INT_FN (BUILT_IN_PARITY):
10616 return fold_builtin_bitop (fndecl, arg0);
10618 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10619 return fold_builtin_signbit (loc, arg0, type);
10621 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10622 return fold_builtin_significand (loc, arg0, type);
10624 CASE_FLT_FN (BUILT_IN_ILOGB):
10625 CASE_FLT_FN (BUILT_IN_LOGB):
10626 return fold_builtin_logb (loc, arg0, type);
10628 case BUILT_IN_ISASCII:
10629 return fold_builtin_isascii (loc, arg0);
10631 case BUILT_IN_TOASCII:
10632 return fold_builtin_toascii (loc, arg0);
10634 case BUILT_IN_ISDIGIT:
10635 return fold_builtin_isdigit (loc, arg0);
10637 CASE_FLT_FN (BUILT_IN_FINITE):
10638 case BUILT_IN_FINITED32:
10639 case BUILT_IN_FINITED64:
10640 case BUILT_IN_FINITED128:
10641 case BUILT_IN_ISFINITE:
10642 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10644 CASE_FLT_FN (BUILT_IN_ISINF):
10645 case BUILT_IN_ISINFD32:
10646 case BUILT_IN_ISINFD64:
10647 case BUILT_IN_ISINFD128:
10648 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10650 case BUILT_IN_ISINF_SIGN:
10651 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10653 CASE_FLT_FN (BUILT_IN_ISNAN):
10654 case BUILT_IN_ISNAND32:
10655 case BUILT_IN_ISNAND64:
10656 case BUILT_IN_ISNAND128:
10657 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10659 case BUILT_IN_PRINTF:
10660 case BUILT_IN_PRINTF_UNLOCKED:
10661 case BUILT_IN_VPRINTF:
10662 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10664 default:
10665 break;
10668 return NULL_TREE;
10672 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10673 IGNORE is true if the result of the function call is ignored. This
10674 function returns NULL_TREE if no simplification was possible. */
10676 static tree
10677 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10679 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10680 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10682 switch (fcode)
10684 CASE_FLT_FN (BUILT_IN_JN):
10685 if (validate_arg (arg0, INTEGER_TYPE)
10686 && validate_arg (arg1, REAL_TYPE))
10687 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10688 break;
10690 CASE_FLT_FN (BUILT_IN_YN):
10691 if (validate_arg (arg0, INTEGER_TYPE)
10692 && validate_arg (arg1, REAL_TYPE))
10693 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10694 &dconst0, false);
10695 break;
10697 CASE_FLT_FN (BUILT_IN_DREM):
10698 CASE_FLT_FN (BUILT_IN_REMAINDER):
10699 if (validate_arg (arg0, REAL_TYPE)
10700 && validate_arg(arg1, REAL_TYPE))
10701 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10702 break;
10704 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10705 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10706 if (validate_arg (arg0, REAL_TYPE)
10707 && validate_arg(arg1, POINTER_TYPE))
10708 return do_mpfr_lgamma_r (arg0, arg1, type);
10709 break;
10711 CASE_FLT_FN (BUILT_IN_ATAN2):
10712 if (validate_arg (arg0, REAL_TYPE)
10713 && validate_arg(arg1, REAL_TYPE))
10714 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10715 break;
10717 CASE_FLT_FN (BUILT_IN_FDIM):
10718 if (validate_arg (arg0, REAL_TYPE)
10719 && validate_arg(arg1, REAL_TYPE))
10720 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10721 break;
10723 CASE_FLT_FN (BUILT_IN_HYPOT):
10724 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10726 #ifdef HAVE_mpc_pow
10727 CASE_FLT_FN (BUILT_IN_CPOW):
10728 if (validate_arg (arg0, COMPLEX_TYPE)
10729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10730 && validate_arg (arg1, COMPLEX_TYPE)
10731 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10732 return do_mpc_arg2 (arg0, arg1, type, mpc_pow);
10733 break;
10734 #endif
10736 CASE_FLT_FN (BUILT_IN_LDEXP):
10737 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10738 CASE_FLT_FN (BUILT_IN_SCALBN):
10739 CASE_FLT_FN (BUILT_IN_SCALBLN):
10740 return fold_builtin_load_exponent (loc, arg0, arg1,
10741 type, /*ldexp=*/false);
10743 CASE_FLT_FN (BUILT_IN_FREXP):
10744 return fold_builtin_frexp (loc, arg0, arg1, type);
10746 CASE_FLT_FN (BUILT_IN_MODF):
10747 return fold_builtin_modf (loc, arg0, arg1, type);
10749 case BUILT_IN_BZERO:
10750 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10752 case BUILT_IN_FPUTS:
10753 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10755 case BUILT_IN_FPUTS_UNLOCKED:
10756 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10758 case BUILT_IN_STRSTR:
10759 return fold_builtin_strstr (loc, arg0, arg1, type);
10761 case BUILT_IN_STRCAT:
10762 return fold_builtin_strcat (loc, arg0, arg1);
10764 case BUILT_IN_STRSPN:
10765 return fold_builtin_strspn (loc, arg0, arg1);
10767 case BUILT_IN_STRCSPN:
10768 return fold_builtin_strcspn (loc, arg0, arg1);
10770 case BUILT_IN_STRCHR:
10771 case BUILT_IN_INDEX:
10772 return fold_builtin_strchr (loc, arg0, arg1, type);
10774 case BUILT_IN_STRRCHR:
10775 case BUILT_IN_RINDEX:
10776 return fold_builtin_strrchr (loc, arg0, arg1, type);
10778 case BUILT_IN_STRCPY:
10779 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10781 case BUILT_IN_STPCPY:
10782 if (ignore)
10784 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10785 if (!fn)
10786 break;
10788 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10790 break;
10792 case BUILT_IN_STRCMP:
10793 return fold_builtin_strcmp (loc, arg0, arg1);
10795 case BUILT_IN_STRPBRK:
10796 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10798 case BUILT_IN_EXPECT:
10799 return fold_builtin_expect (loc, arg0, arg1);
10801 CASE_FLT_FN (BUILT_IN_POW):
10802 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10804 CASE_FLT_FN (BUILT_IN_POWI):
10805 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10807 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10808 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10810 CASE_FLT_FN (BUILT_IN_FMIN):
10811 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10813 CASE_FLT_FN (BUILT_IN_FMAX):
10814 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10816 case BUILT_IN_ISGREATER:
10817 return fold_builtin_unordered_cmp (loc, fndecl,
10818 arg0, arg1, UNLE_EXPR, LE_EXPR);
10819 case BUILT_IN_ISGREATEREQUAL:
10820 return fold_builtin_unordered_cmp (loc, fndecl,
10821 arg0, arg1, UNLT_EXPR, LT_EXPR);
10822 case BUILT_IN_ISLESS:
10823 return fold_builtin_unordered_cmp (loc, fndecl,
10824 arg0, arg1, UNGE_EXPR, GE_EXPR);
10825 case BUILT_IN_ISLESSEQUAL:
10826 return fold_builtin_unordered_cmp (loc, fndecl,
10827 arg0, arg1, UNGT_EXPR, GT_EXPR);
10828 case BUILT_IN_ISLESSGREATER:
10829 return fold_builtin_unordered_cmp (loc, fndecl,
10830 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10831 case BUILT_IN_ISUNORDERED:
10832 return fold_builtin_unordered_cmp (loc, fndecl,
10833 arg0, arg1, UNORDERED_EXPR,
10834 NOP_EXPR);
10836 /* We do the folding for va_start in the expander. */
10837 case BUILT_IN_VA_START:
10838 break;
10840 case BUILT_IN_SPRINTF:
10841 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10843 case BUILT_IN_OBJECT_SIZE:
10844 return fold_builtin_object_size (arg0, arg1);
10846 case BUILT_IN_PRINTF:
10847 case BUILT_IN_PRINTF_UNLOCKED:
10848 case BUILT_IN_VPRINTF:
10849 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10851 case BUILT_IN_PRINTF_CHK:
10852 case BUILT_IN_VPRINTF_CHK:
10853 if (!validate_arg (arg0, INTEGER_TYPE)
10854 || TREE_SIDE_EFFECTS (arg0))
10855 return NULL_TREE;
10856 else
10857 return fold_builtin_printf (loc, fndecl,
10858 arg1, NULL_TREE, ignore, fcode);
10859 break;
10861 case BUILT_IN_FPRINTF:
10862 case BUILT_IN_FPRINTF_UNLOCKED:
10863 case BUILT_IN_VFPRINTF:
10864 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10865 ignore, fcode);
10867 default:
10868 break;
10870 return NULL_TREE;
10873 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10874 and ARG2. IGNORE is true if the result of the function call is ignored.
10875 This function returns NULL_TREE if no simplification was possible. */
10877 static tree
10878 fold_builtin_3 (location_t loc, tree fndecl,
10879 tree arg0, tree arg1, tree arg2, bool ignore)
10881 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10882 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10883 switch (fcode)
10886 CASE_FLT_FN (BUILT_IN_SINCOS):
10887 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10889 CASE_FLT_FN (BUILT_IN_FMA):
10890 if (validate_arg (arg0, REAL_TYPE)
10891 && validate_arg(arg1, REAL_TYPE)
10892 && validate_arg(arg2, REAL_TYPE))
10893 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10894 break;
10896 CASE_FLT_FN (BUILT_IN_REMQUO):
10897 if (validate_arg (arg0, REAL_TYPE)
10898 && validate_arg(arg1, REAL_TYPE)
10899 && validate_arg(arg2, POINTER_TYPE))
10900 return do_mpfr_remquo (arg0, arg1, arg2);
10901 break;
10903 case BUILT_IN_MEMSET:
10904 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10906 case BUILT_IN_BCOPY:
10907 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10908 void_type_node, true, /*endp=*/3);
10910 case BUILT_IN_MEMCPY:
10911 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10912 type, ignore, /*endp=*/0);
10914 case BUILT_IN_MEMPCPY:
10915 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10916 type, ignore, /*endp=*/1);
10918 case BUILT_IN_MEMMOVE:
10919 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10920 type, ignore, /*endp=*/3);
10922 case BUILT_IN_STRNCAT:
10923 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10925 case BUILT_IN_STRNCPY:
10926 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10928 case BUILT_IN_STRNCMP:
10929 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10931 case BUILT_IN_MEMCHR:
10932 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10934 case BUILT_IN_BCMP:
10935 case BUILT_IN_MEMCMP:
10936 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10938 case BUILT_IN_SPRINTF:
10939 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10941 case BUILT_IN_STRCPY_CHK:
10942 case BUILT_IN_STPCPY_CHK:
10943 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10944 ignore, fcode);
10946 case BUILT_IN_STRCAT_CHK:
10947 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10949 case BUILT_IN_PRINTF_CHK:
10950 case BUILT_IN_VPRINTF_CHK:
10951 if (!validate_arg (arg0, INTEGER_TYPE)
10952 || TREE_SIDE_EFFECTS (arg0))
10953 return NULL_TREE;
10954 else
10955 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10956 break;
10958 case BUILT_IN_FPRINTF:
10959 case BUILT_IN_FPRINTF_UNLOCKED:
10960 case BUILT_IN_VFPRINTF:
10961 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10962 ignore, fcode);
10964 case BUILT_IN_FPRINTF_CHK:
10965 case BUILT_IN_VFPRINTF_CHK:
10966 if (!validate_arg (arg1, INTEGER_TYPE)
10967 || TREE_SIDE_EFFECTS (arg1))
10968 return NULL_TREE;
10969 else
10970 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10971 ignore, fcode);
10973 default:
10974 break;
10976 return NULL_TREE;
10979 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10980 ARG2, and ARG3. IGNORE is true if the result of the function call is
10981 ignored. This function returns NULL_TREE if no simplification was
10982 possible. */
10984 static tree
10985 fold_builtin_4 (location_t loc, tree fndecl,
10986 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10988 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10990 switch (fcode)
10992 case BUILT_IN_MEMCPY_CHK:
10993 case BUILT_IN_MEMPCPY_CHK:
10994 case BUILT_IN_MEMMOVE_CHK:
10995 case BUILT_IN_MEMSET_CHK:
10996 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10997 NULL_TREE, ignore,
10998 DECL_FUNCTION_CODE (fndecl));
11000 case BUILT_IN_STRNCPY_CHK:
11001 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
11003 case BUILT_IN_STRNCAT_CHK:
11004 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11006 case BUILT_IN_FPRINTF_CHK:
11007 case BUILT_IN_VFPRINTF_CHK:
11008 if (!validate_arg (arg1, INTEGER_TYPE)
11009 || TREE_SIDE_EFFECTS (arg1))
11010 return NULL_TREE;
11011 else
11012 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11013 ignore, fcode);
11014 break;
11016 default:
11017 break;
11019 return NULL_TREE;
11022 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11023 arguments, where NARGS <= 4. IGNORE is true if the result of the
11024 function call is ignored. This function returns NULL_TREE if no
11025 simplification was possible. Note that this only folds builtins with
11026 fixed argument patterns. Foldings that do varargs-to-varargs
11027 transformations, or that match calls with more than 4 arguments,
11028 need to be handled with fold_builtin_varargs instead. */
11030 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11032 static tree
11033 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11035 tree ret = NULL_TREE;
11037 switch (nargs)
11039 case 0:
11040 ret = fold_builtin_0 (loc, fndecl, ignore);
11041 break;
11042 case 1:
11043 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11044 break;
11045 case 2:
11046 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11047 break;
11048 case 3:
11049 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11050 break;
11051 case 4:
11052 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11053 ignore);
11054 break;
11055 default:
11056 break;
11058 if (ret)
11060 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11061 SET_EXPR_LOCATION (ret, loc);
11062 TREE_NO_WARNING (ret) = 1;
11063 return ret;
11065 return NULL_TREE;
11068 /* Builtins with folding operations that operate on "..." arguments
11069 need special handling; we need to store the arguments in a convenient
11070 data structure before attempting any folding. Fortunately there are
11071 only a few builtins that fall into this category. FNDECL is the
11072 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11073 result of the function call is ignored. */
11075 static tree
11076 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11077 bool ignore ATTRIBUTE_UNUSED)
11079 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11080 tree ret = NULL_TREE;
11082 switch (fcode)
11084 case BUILT_IN_SPRINTF_CHK:
11085 case BUILT_IN_VSPRINTF_CHK:
11086 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11087 break;
11089 case BUILT_IN_SNPRINTF_CHK:
11090 case BUILT_IN_VSNPRINTF_CHK:
11091 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11092 break;
11094 case BUILT_IN_FPCLASSIFY:
11095 ret = fold_builtin_fpclassify (loc, exp);
11096 break;
11098 default:
11099 break;
11101 if (ret)
11103 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11104 SET_EXPR_LOCATION (ret, loc);
11105 TREE_NO_WARNING (ret) = 1;
11106 return ret;
11108 return NULL_TREE;
11111 /* Return true if FNDECL shouldn't be folded right now.
11112 If a built-in function has an inline attribute always_inline
11113 wrapper, defer folding it after always_inline functions have
11114 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11115 might not be performed. */
11117 static bool
11118 avoid_folding_inline_builtin (tree fndecl)
11120 return (DECL_DECLARED_INLINE_P (fndecl)
11121 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11122 && cfun
11123 && !cfun->always_inline_functions_inlined
11124 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11127 /* A wrapper function for builtin folding that prevents warnings for
11128 "statement without effect" and the like, caused by removing the
11129 call node earlier than the warning is generated. */
11131 tree
11132 fold_call_expr (location_t loc, tree exp, bool ignore)
11134 tree ret = NULL_TREE;
11135 tree fndecl = get_callee_fndecl (exp);
11136 if (fndecl
11137 && TREE_CODE (fndecl) == FUNCTION_DECL
11138 && DECL_BUILT_IN (fndecl)
11139 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11140 yet. Defer folding until we see all the arguments
11141 (after inlining). */
11142 && !CALL_EXPR_VA_ARG_PACK (exp))
11144 int nargs = call_expr_nargs (exp);
11146 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11147 instead last argument is __builtin_va_arg_pack (). Defer folding
11148 even in that case, until arguments are finalized. */
11149 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11151 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11152 if (fndecl2
11153 && TREE_CODE (fndecl2) == FUNCTION_DECL
11154 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11155 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11156 return NULL_TREE;
11159 if (avoid_folding_inline_builtin (fndecl))
11160 return NULL_TREE;
11162 /* FIXME: Don't use a list in this interface. */
11163 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11164 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11165 else
11167 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11169 tree *args = CALL_EXPR_ARGP (exp);
11170 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11172 if (!ret)
11173 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11174 if (ret)
11175 return ret;
11178 return NULL_TREE;
11181 /* Conveniently construct a function call expression. FNDECL names the
11182 function to be called and ARGLIST is a TREE_LIST of arguments. */
11184 tree
11185 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
11187 tree fntype = TREE_TYPE (fndecl);
11188 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11189 int n = list_length (arglist);
11190 tree *argarray = (tree *) alloca (n * sizeof (tree));
11191 int i;
11193 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11194 argarray[i] = TREE_VALUE (arglist);
11195 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11198 /* Conveniently construct a function call expression. FNDECL names the
11199 function to be called, N is the number of arguments, and the "..."
11200 parameters are the argument expressions. */
11202 tree
11203 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11205 va_list ap;
11206 tree fntype = TREE_TYPE (fndecl);
11207 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11208 tree *argarray = (tree *) alloca (n * sizeof (tree));
11209 int i;
11211 va_start (ap, n);
11212 for (i = 0; i < n; i++)
11213 argarray[i] = va_arg (ap, tree);
11214 va_end (ap);
11215 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11218 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11219 N arguments are passed in the array ARGARRAY. */
11221 tree
11222 fold_builtin_call_array (location_t loc, tree type,
11223 tree fn,
11224 int n,
11225 tree *argarray)
11227 tree ret = NULL_TREE;
11228 int i;
11229 tree exp;
11231 if (TREE_CODE (fn) == ADDR_EXPR)
11233 tree fndecl = TREE_OPERAND (fn, 0);
11234 if (TREE_CODE (fndecl) == FUNCTION_DECL
11235 && DECL_BUILT_IN (fndecl))
11237 /* If last argument is __builtin_va_arg_pack (), arguments to this
11238 function are not finalized yet. Defer folding until they are. */
11239 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11241 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11242 if (fndecl2
11243 && TREE_CODE (fndecl2) == FUNCTION_DECL
11244 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11245 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11246 return build_call_array_loc (loc, type, fn, n, argarray);
11248 if (avoid_folding_inline_builtin (fndecl))
11249 return build_call_array_loc (loc, type, fn, n, argarray);
11250 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11252 tree arglist = NULL_TREE;
11253 for (i = n - 1; i >= 0; i--)
11254 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11255 ret = targetm.fold_builtin (fndecl, arglist, false);
11256 if (ret)
11257 return ret;
11258 return build_call_array_loc (loc, type, fn, n, argarray);
11260 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11262 /* First try the transformations that don't require consing up
11263 an exp. */
11264 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11265 if (ret)
11266 return ret;
11269 /* If we got this far, we need to build an exp. */
11270 exp = build_call_array_loc (loc, type, fn, n, argarray);
11271 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11272 return ret ? ret : exp;
11276 return build_call_array_loc (loc, type, fn, n, argarray);
11279 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11280 along with N new arguments specified as the "..." parameters. SKIP
11281 is the number of arguments in EXP to be omitted. This function is used
11282 to do varargs-to-varargs transformations. */
11284 static tree
11285 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11287 int oldnargs = call_expr_nargs (exp);
11288 int nargs = oldnargs - skip + n;
11289 tree fntype = TREE_TYPE (fndecl);
11290 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11291 tree *buffer;
11293 if (n > 0)
11295 int i, j;
11296 va_list ap;
11298 buffer = XALLOCAVEC (tree, nargs);
11299 va_start (ap, n);
11300 for (i = 0; i < n; i++)
11301 buffer[i] = va_arg (ap, tree);
11302 va_end (ap);
11303 for (j = skip; j < oldnargs; j++, i++)
11304 buffer[i] = CALL_EXPR_ARG (exp, j);
11306 else
11307 buffer = CALL_EXPR_ARGP (exp) + skip;
11309 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
11312 /* Validate a single argument ARG against a tree code CODE representing
11313 a type. */
11315 static bool
11316 validate_arg (const_tree arg, enum tree_code code)
11318 if (!arg)
11319 return false;
11320 else if (code == POINTER_TYPE)
11321 return POINTER_TYPE_P (TREE_TYPE (arg));
11322 else if (code == INTEGER_TYPE)
11323 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11324 return code == TREE_CODE (TREE_TYPE (arg));
11327 /* This function validates the types of a function call argument list
11328 against a specified list of tree_codes. If the last specifier is a 0,
11329 that represents an ellipses, otherwise the last specifier must be a
11330 VOID_TYPE.
11332 This is the GIMPLE version of validate_arglist. Eventually we want to
11333 completely convert builtins.c to work from GIMPLEs and the tree based
11334 validate_arglist will then be removed. */
11336 bool
11337 validate_gimple_arglist (const_gimple call, ...)
11339 enum tree_code code;
11340 bool res = 0;
11341 va_list ap;
11342 const_tree arg;
11343 size_t i;
11345 va_start (ap, call);
11346 i = 0;
11350 code = (enum tree_code) va_arg (ap, int);
11351 switch (code)
11353 case 0:
11354 /* This signifies an ellipses, any further arguments are all ok. */
11355 res = true;
11356 goto end;
11357 case VOID_TYPE:
11358 /* This signifies an endlink, if no arguments remain, return
11359 true, otherwise return false. */
11360 res = (i == gimple_call_num_args (call));
11361 goto end;
11362 default:
11363 /* If no parameters remain or the parameter's code does not
11364 match the specified code, return false. Otherwise continue
11365 checking any remaining arguments. */
11366 arg = gimple_call_arg (call, i++);
11367 if (!validate_arg (arg, code))
11368 goto end;
11369 break;
11372 while (1);
11374 /* We need gotos here since we can only have one VA_CLOSE in a
11375 function. */
11376 end: ;
11377 va_end (ap);
11379 return res;
11382 /* This function validates the types of a function call argument list
11383 against a specified list of tree_codes. If the last specifier is a 0,
11384 that represents an ellipses, otherwise the last specifier must be a
11385 VOID_TYPE. */
11387 bool
11388 validate_arglist (const_tree callexpr, ...)
11390 enum tree_code code;
11391 bool res = 0;
11392 va_list ap;
11393 const_call_expr_arg_iterator iter;
11394 const_tree arg;
11396 va_start (ap, callexpr);
11397 init_const_call_expr_arg_iterator (callexpr, &iter);
11401 code = (enum tree_code) va_arg (ap, int);
11402 switch (code)
11404 case 0:
11405 /* This signifies an ellipses, any further arguments are all ok. */
11406 res = true;
11407 goto end;
11408 case VOID_TYPE:
11409 /* This signifies an endlink, if no arguments remain, return
11410 true, otherwise return false. */
11411 res = !more_const_call_expr_args_p (&iter);
11412 goto end;
11413 default:
11414 /* If no parameters remain or the parameter's code does not
11415 match the specified code, return false. Otherwise continue
11416 checking any remaining arguments. */
11417 arg = next_const_call_expr_arg (&iter);
11418 if (!validate_arg (arg, code))
11419 goto end;
11420 break;
11423 while (1);
11425 /* We need gotos here since we can only have one VA_CLOSE in a
11426 function. */
11427 end: ;
11428 va_end (ap);
11430 return res;
11433 /* Default target-specific builtin expander that does nothing. */
11436 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11437 rtx target ATTRIBUTE_UNUSED,
11438 rtx subtarget ATTRIBUTE_UNUSED,
11439 enum machine_mode mode ATTRIBUTE_UNUSED,
11440 int ignore ATTRIBUTE_UNUSED)
11442 return NULL_RTX;
11445 /* Returns true is EXP represents data that would potentially reside
11446 in a readonly section. */
11448 static bool
11449 readonly_data_expr (tree exp)
11451 STRIP_NOPS (exp);
11453 if (TREE_CODE (exp) != ADDR_EXPR)
11454 return false;
11456 exp = get_base_address (TREE_OPERAND (exp, 0));
11457 if (!exp)
11458 return false;
11460 /* Make sure we call decl_readonly_section only for trees it
11461 can handle (since it returns true for everything it doesn't
11462 understand). */
11463 if (TREE_CODE (exp) == STRING_CST
11464 || TREE_CODE (exp) == CONSTRUCTOR
11465 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11466 return decl_readonly_section (exp, 0);
11467 else
11468 return false;
11471 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11472 to the call, and TYPE is its return type.
11474 Return NULL_TREE if no simplification was possible, otherwise return the
11475 simplified form of the call as a tree.
11477 The simplified form may be a constant or other expression which
11478 computes the same value, but in a more efficient manner (including
11479 calls to other builtin functions).
11481 The call may contain arguments which need to be evaluated, but
11482 which are not useful to determine the result of the call. In
11483 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11484 COMPOUND_EXPR will be an argument which must be evaluated.
11485 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11486 COMPOUND_EXPR in the chain will contain the tree for the simplified
11487 form of the builtin function call. */
11489 static tree
11490 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11492 if (!validate_arg (s1, POINTER_TYPE)
11493 || !validate_arg (s2, POINTER_TYPE))
11494 return NULL_TREE;
11495 else
11497 tree fn;
11498 const char *p1, *p2;
11500 p2 = c_getstr (s2);
11501 if (p2 == NULL)
11502 return NULL_TREE;
11504 p1 = c_getstr (s1);
11505 if (p1 != NULL)
11507 const char *r = strstr (p1, p2);
11508 tree tem;
11510 if (r == NULL)
11511 return build_int_cst (TREE_TYPE (s1), 0);
11513 /* Return an offset into the constant string argument. */
11514 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11515 s1, size_int (r - p1));
11516 return fold_convert_loc (loc, type, tem);
11519 /* The argument is const char *, and the result is char *, so we need
11520 a type conversion here to avoid a warning. */
11521 if (p2[0] == '\0')
11522 return fold_convert_loc (loc, type, s1);
11524 if (p2[1] != '\0')
11525 return NULL_TREE;
11527 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11528 if (!fn)
11529 return NULL_TREE;
11531 /* New argument list transforming strstr(s1, s2) to
11532 strchr(s1, s2[0]). */
11533 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11537 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11538 the call, and TYPE is its return type.
11540 Return NULL_TREE if no simplification was possible, otherwise return the
11541 simplified form of the call as a tree.
11543 The simplified form may be a constant or other expression which
11544 computes the same value, but in a more efficient manner (including
11545 calls to other builtin functions).
11547 The call may contain arguments which need to be evaluated, but
11548 which are not useful to determine the result of the call. In
11549 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11550 COMPOUND_EXPR will be an argument which must be evaluated.
11551 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11552 COMPOUND_EXPR in the chain will contain the tree for the simplified
11553 form of the builtin function call. */
11555 static tree
11556 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11558 if (!validate_arg (s1, POINTER_TYPE)
11559 || !validate_arg (s2, INTEGER_TYPE))
11560 return NULL_TREE;
11561 else
11563 const char *p1;
11565 if (TREE_CODE (s2) != INTEGER_CST)
11566 return NULL_TREE;
11568 p1 = c_getstr (s1);
11569 if (p1 != NULL)
11571 char c;
11572 const char *r;
11573 tree tem;
11575 if (target_char_cast (s2, &c))
11576 return NULL_TREE;
11578 r = strchr (p1, c);
11580 if (r == NULL)
11581 return build_int_cst (TREE_TYPE (s1), 0);
11583 /* Return an offset into the constant string argument. */
11584 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11585 s1, size_int (r - p1));
11586 return fold_convert_loc (loc, type, tem);
11588 return NULL_TREE;
11592 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11593 the call, and TYPE is its return type.
11595 Return NULL_TREE if no simplification was possible, otherwise return the
11596 simplified form of the call as a tree.
11598 The simplified form may be a constant or other expression which
11599 computes the same value, but in a more efficient manner (including
11600 calls to other builtin functions).
11602 The call may contain arguments which need to be evaluated, but
11603 which are not useful to determine the result of the call. In
11604 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11605 COMPOUND_EXPR will be an argument which must be evaluated.
11606 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11607 COMPOUND_EXPR in the chain will contain the tree for the simplified
11608 form of the builtin function call. */
11610 static tree
11611 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11613 if (!validate_arg (s1, POINTER_TYPE)
11614 || !validate_arg (s2, INTEGER_TYPE))
11615 return NULL_TREE;
11616 else
11618 tree fn;
11619 const char *p1;
11621 if (TREE_CODE (s2) != INTEGER_CST)
11622 return NULL_TREE;
11624 p1 = c_getstr (s1);
11625 if (p1 != NULL)
11627 char c;
11628 const char *r;
11629 tree tem;
11631 if (target_char_cast (s2, &c))
11632 return NULL_TREE;
11634 r = strrchr (p1, c);
11636 if (r == NULL)
11637 return build_int_cst (TREE_TYPE (s1), 0);
11639 /* Return an offset into the constant string argument. */
11640 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11641 s1, size_int (r - p1));
11642 return fold_convert_loc (loc, type, tem);
11645 if (! integer_zerop (s2))
11646 return NULL_TREE;
11648 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11649 if (!fn)
11650 return NULL_TREE;
11652 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11653 return build_call_expr_loc (loc, fn, 2, s1, s2);
11657 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11658 to the call, and TYPE is its return type.
11660 Return NULL_TREE if no simplification was possible, otherwise return the
11661 simplified form of the call as a tree.
11663 The simplified form may be a constant or other expression which
11664 computes the same value, but in a more efficient manner (including
11665 calls to other builtin functions).
11667 The call may contain arguments which need to be evaluated, but
11668 which are not useful to determine the result of the call. In
11669 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11670 COMPOUND_EXPR will be an argument which must be evaluated.
11671 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11672 COMPOUND_EXPR in the chain will contain the tree for the simplified
11673 form of the builtin function call. */
11675 static tree
11676 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11678 if (!validate_arg (s1, POINTER_TYPE)
11679 || !validate_arg (s2, POINTER_TYPE))
11680 return NULL_TREE;
11681 else
11683 tree fn;
11684 const char *p1, *p2;
11686 p2 = c_getstr (s2);
11687 if (p2 == NULL)
11688 return NULL_TREE;
11690 p1 = c_getstr (s1);
11691 if (p1 != NULL)
11693 const char *r = strpbrk (p1, p2);
11694 tree tem;
11696 if (r == NULL)
11697 return build_int_cst (TREE_TYPE (s1), 0);
11699 /* Return an offset into the constant string argument. */
11700 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11701 s1, size_int (r - p1));
11702 return fold_convert_loc (loc, type, tem);
11705 if (p2[0] == '\0')
11706 /* strpbrk(x, "") == NULL.
11707 Evaluate and ignore s1 in case it had side-effects. */
11708 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11710 if (p2[1] != '\0')
11711 return NULL_TREE; /* Really call strpbrk. */
11713 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11714 if (!fn)
11715 return NULL_TREE;
11717 /* New argument list transforming strpbrk(s1, s2) to
11718 strchr(s1, s2[0]). */
11719 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11723 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11724 to the call.
11726 Return NULL_TREE if no simplification was possible, otherwise return the
11727 simplified form of the call as a tree.
11729 The simplified form may be a constant or other expression which
11730 computes the same value, but in a more efficient manner (including
11731 calls to other builtin functions).
11733 The call may contain arguments which need to be evaluated, but
11734 which are not useful to determine the result of the call. In
11735 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11736 COMPOUND_EXPR will be an argument which must be evaluated.
11737 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11738 COMPOUND_EXPR in the chain will contain the tree for the simplified
11739 form of the builtin function call. */
11741 static tree
11742 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11744 if (!validate_arg (dst, POINTER_TYPE)
11745 || !validate_arg (src, POINTER_TYPE))
11746 return NULL_TREE;
11747 else
11749 const char *p = c_getstr (src);
11751 /* If the string length is zero, return the dst parameter. */
11752 if (p && *p == '\0')
11753 return dst;
11755 return NULL_TREE;
11759 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11760 arguments to the call.
11762 Return NULL_TREE if no simplification was possible, otherwise return the
11763 simplified form of the call as a tree.
11765 The simplified form may be a constant or other expression which
11766 computes the same value, but in a more efficient manner (including
11767 calls to other builtin functions).
11769 The call may contain arguments which need to be evaluated, but
11770 which are not useful to determine the result of the call. In
11771 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11772 COMPOUND_EXPR will be an argument which must be evaluated.
11773 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11774 COMPOUND_EXPR in the chain will contain the tree for the simplified
11775 form of the builtin function call. */
11777 static tree
11778 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11780 if (!validate_arg (dst, POINTER_TYPE)
11781 || !validate_arg (src, POINTER_TYPE)
11782 || !validate_arg (len, INTEGER_TYPE))
11783 return NULL_TREE;
11784 else
11786 const char *p = c_getstr (src);
11788 /* If the requested length is zero, or the src parameter string
11789 length is zero, return the dst parameter. */
11790 if (integer_zerop (len) || (p && *p == '\0'))
11791 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11793 /* If the requested len is greater than or equal to the string
11794 length, call strcat. */
11795 if (TREE_CODE (len) == INTEGER_CST && p
11796 && compare_tree_int (len, strlen (p)) >= 0)
11798 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11800 /* If the replacement _DECL isn't initialized, don't do the
11801 transformation. */
11802 if (!fn)
11803 return NULL_TREE;
11805 return build_call_expr_loc (loc, fn, 2, dst, src);
11807 return NULL_TREE;
11811 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11812 to the call.
11814 Return NULL_TREE if no simplification was possible, otherwise return the
11815 simplified form of the call as a tree.
11817 The simplified form may be a constant or other expression which
11818 computes the same value, but in a more efficient manner (including
11819 calls to other builtin functions).
11821 The call may contain arguments which need to be evaluated, but
11822 which are not useful to determine the result of the call. In
11823 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11824 COMPOUND_EXPR will be an argument which must be evaluated.
11825 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11826 COMPOUND_EXPR in the chain will contain the tree for the simplified
11827 form of the builtin function call. */
11829 static tree
11830 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11832 if (!validate_arg (s1, POINTER_TYPE)
11833 || !validate_arg (s2, POINTER_TYPE))
11834 return NULL_TREE;
11835 else
11837 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11839 /* If both arguments are constants, evaluate at compile-time. */
11840 if (p1 && p2)
11842 const size_t r = strspn (p1, p2);
11843 return size_int (r);
11846 /* If either argument is "", return NULL_TREE. */
11847 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11848 /* Evaluate and ignore both arguments in case either one has
11849 side-effects. */
11850 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11851 s1, s2);
11852 return NULL_TREE;
11856 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11857 to the call.
11859 Return NULL_TREE if no simplification was possible, otherwise return the
11860 simplified form of the call as a tree.
11862 The simplified form may be a constant or other expression which
11863 computes the same value, but in a more efficient manner (including
11864 calls to other builtin functions).
11866 The call may contain arguments which need to be evaluated, but
11867 which are not useful to determine the result of the call. In
11868 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11869 COMPOUND_EXPR will be an argument which must be evaluated.
11870 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11871 COMPOUND_EXPR in the chain will contain the tree for the simplified
11872 form of the builtin function call. */
11874 static tree
11875 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11877 if (!validate_arg (s1, POINTER_TYPE)
11878 || !validate_arg (s2, POINTER_TYPE))
11879 return NULL_TREE;
11880 else
11882 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11884 /* If both arguments are constants, evaluate at compile-time. */
11885 if (p1 && p2)
11887 const size_t r = strcspn (p1, p2);
11888 return size_int (r);
11891 /* If the first argument is "", return NULL_TREE. */
11892 if (p1 && *p1 == '\0')
11894 /* Evaluate and ignore argument s2 in case it has
11895 side-effects. */
11896 return omit_one_operand_loc (loc, size_type_node,
11897 size_zero_node, s2);
11900 /* If the second argument is "", return __builtin_strlen(s1). */
11901 if (p2 && *p2 == '\0')
11903 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11905 /* If the replacement _DECL isn't initialized, don't do the
11906 transformation. */
11907 if (!fn)
11908 return NULL_TREE;
11910 return build_call_expr_loc (loc, fn, 1, s1);
11912 return NULL_TREE;
11916 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11917 to the call. IGNORE is true if the value returned
11918 by the builtin will be ignored. UNLOCKED is true is true if this
11919 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11920 the known length of the string. Return NULL_TREE if no simplification
11921 was possible. */
11923 tree
11924 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11925 bool ignore, bool unlocked, tree len)
11927 /* If we're using an unlocked function, assume the other unlocked
11928 functions exist explicitly. */
11929 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11930 : implicit_built_in_decls[BUILT_IN_FPUTC];
11931 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11932 : implicit_built_in_decls[BUILT_IN_FWRITE];
11934 /* If the return value is used, don't do the transformation. */
11935 if (!ignore)
11936 return NULL_TREE;
11938 /* Verify the arguments in the original call. */
11939 if (!validate_arg (arg0, POINTER_TYPE)
11940 || !validate_arg (arg1, POINTER_TYPE))
11941 return NULL_TREE;
11943 if (! len)
11944 len = c_strlen (arg0, 0);
11946 /* Get the length of the string passed to fputs. If the length
11947 can't be determined, punt. */
11948 if (!len
11949 || TREE_CODE (len) != INTEGER_CST)
11950 return NULL_TREE;
11952 switch (compare_tree_int (len, 1))
11954 case -1: /* length is 0, delete the call entirely . */
11955 return omit_one_operand_loc (loc, integer_type_node,
11956 integer_zero_node, arg1);;
11958 case 0: /* length is 1, call fputc. */
11960 const char *p = c_getstr (arg0);
11962 if (p != NULL)
11964 if (fn_fputc)
11965 return build_call_expr_loc (loc, fn_fputc, 2,
11966 build_int_cst (NULL_TREE, p[0]), arg1);
11967 else
11968 return NULL_TREE;
11971 /* FALLTHROUGH */
11972 case 1: /* length is greater than 1, call fwrite. */
11974 /* If optimizing for size keep fputs. */
11975 if (optimize_function_for_size_p (cfun))
11976 return NULL_TREE;
11977 /* New argument list transforming fputs(string, stream) to
11978 fwrite(string, 1, len, stream). */
11979 if (fn_fwrite)
11980 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11981 size_one_node, len, arg1);
11982 else
11983 return NULL_TREE;
11985 default:
11986 gcc_unreachable ();
11988 return NULL_TREE;
11991 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11992 produced. False otherwise. This is done so that we don't output the error
11993 or warning twice or three times. */
11995 bool
11996 fold_builtin_next_arg (tree exp, bool va_start_p)
11998 tree fntype = TREE_TYPE (current_function_decl);
11999 int nargs = call_expr_nargs (exp);
12000 tree arg;
12002 if (TYPE_ARG_TYPES (fntype) == 0
12003 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
12004 == void_type_node))
12006 error ("%<va_start%> used in function with fixed args");
12007 return true;
12010 if (va_start_p)
12012 if (va_start_p && (nargs != 2))
12014 error ("wrong number of arguments to function %<va_start%>");
12015 return true;
12017 arg = CALL_EXPR_ARG (exp, 1);
12019 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12020 when we checked the arguments and if needed issued a warning. */
12021 else
12023 if (nargs == 0)
12025 /* Evidently an out of date version of <stdarg.h>; can't validate
12026 va_start's second argument, but can still work as intended. */
12027 warning (0, "%<__builtin_next_arg%> called without an argument");
12028 return true;
12030 else if (nargs > 1)
12032 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12033 return true;
12035 arg = CALL_EXPR_ARG (exp, 0);
12038 if (TREE_CODE (arg) == SSA_NAME)
12039 arg = SSA_NAME_VAR (arg);
12041 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12042 or __builtin_next_arg (0) the first time we see it, after checking
12043 the arguments and if needed issuing a warning. */
12044 if (!integer_zerop (arg))
12046 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12048 /* Strip off all nops for the sake of the comparison. This
12049 is not quite the same as STRIP_NOPS. It does more.
12050 We must also strip off INDIRECT_EXPR for C++ reference
12051 parameters. */
12052 while (CONVERT_EXPR_P (arg)
12053 || TREE_CODE (arg) == INDIRECT_REF)
12054 arg = TREE_OPERAND (arg, 0);
12055 if (arg != last_parm)
12057 /* FIXME: Sometimes with the tree optimizers we can get the
12058 not the last argument even though the user used the last
12059 argument. We just warn and set the arg to be the last
12060 argument so that we will get wrong-code because of
12061 it. */
12062 warning (0, "second parameter of %<va_start%> not last named argument");
12065 /* Undefined by C99 7.15.1.4p4 (va_start):
12066 "If the parameter parmN is declared with the register storage
12067 class, with a function or array type, or with a type that is
12068 not compatible with the type that results after application of
12069 the default argument promotions, the behavior is undefined."
12071 else if (DECL_REGISTER (arg))
12072 warning (0, "undefined behaviour when second parameter of "
12073 "%<va_start%> is declared with %<register%> storage");
12075 /* We want to verify the second parameter just once before the tree
12076 optimizers are run and then avoid keeping it in the tree,
12077 as otherwise we could warn even for correct code like:
12078 void foo (int i, ...)
12079 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12080 if (va_start_p)
12081 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12082 else
12083 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12085 return false;
12089 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12090 ORIG may be null if this is a 2-argument call. We don't attempt to
12091 simplify calls with more than 3 arguments.
12093 Return NULL_TREE if no simplification was possible, otherwise return the
12094 simplified form of the call as a tree. If IGNORED is true, it means that
12095 the caller does not use the returned value of the function. */
12097 static tree
12098 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12099 tree orig, int ignored)
12101 tree call, retval;
12102 const char *fmt_str = NULL;
12104 /* Verify the required arguments in the original call. We deal with two
12105 types of sprintf() calls: 'sprintf (str, fmt)' and
12106 'sprintf (dest, "%s", orig)'. */
12107 if (!validate_arg (dest, POINTER_TYPE)
12108 || !validate_arg (fmt, POINTER_TYPE))
12109 return NULL_TREE;
12110 if (orig && !validate_arg (orig, POINTER_TYPE))
12111 return NULL_TREE;
12113 /* Check whether the format is a literal string constant. */
12114 fmt_str = c_getstr (fmt);
12115 if (fmt_str == NULL)
12116 return NULL_TREE;
12118 call = NULL_TREE;
12119 retval = NULL_TREE;
12121 if (!init_target_chars ())
12122 return NULL_TREE;
12124 /* If the format doesn't contain % args or %%, use strcpy. */
12125 if (strchr (fmt_str, target_percent) == NULL)
12127 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12129 if (!fn)
12130 return NULL_TREE;
12132 /* Don't optimize sprintf (buf, "abc", ptr++). */
12133 if (orig)
12134 return NULL_TREE;
12136 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12137 'format' is known to contain no % formats. */
12138 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12139 if (!ignored)
12140 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12143 /* If the format is "%s", use strcpy if the result isn't used. */
12144 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12146 tree fn;
12147 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12149 if (!fn)
12150 return NULL_TREE;
12152 /* Don't crash on sprintf (str1, "%s"). */
12153 if (!orig)
12154 return NULL_TREE;
12156 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12157 if (!ignored)
12159 retval = c_strlen (orig, 1);
12160 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12161 return NULL_TREE;
12163 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12166 if (call && retval)
12168 retval = fold_convert_loc
12169 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12170 retval);
12171 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12173 else
12174 return call;
12177 /* Expand a call EXP to __builtin_object_size. */
12180 expand_builtin_object_size (tree exp)
12182 tree ost;
12183 int object_size_type;
12184 tree fndecl = get_callee_fndecl (exp);
12186 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12188 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12189 exp, fndecl);
12190 expand_builtin_trap ();
12191 return const0_rtx;
12194 ost = CALL_EXPR_ARG (exp, 1);
12195 STRIP_NOPS (ost);
12197 if (TREE_CODE (ost) != INTEGER_CST
12198 || tree_int_cst_sgn (ost) < 0
12199 || compare_tree_int (ost, 3) > 0)
12201 error ("%Klast argument of %D is not integer constant between 0 and 3",
12202 exp, fndecl);
12203 expand_builtin_trap ();
12204 return const0_rtx;
12207 object_size_type = tree_low_cst (ost, 0);
12209 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12212 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12213 FCODE is the BUILT_IN_* to use.
12214 Return NULL_RTX if we failed; the caller should emit a normal call,
12215 otherwise try to get the result in TARGET, if convenient (and in
12216 mode MODE if that's convenient). */
12218 static rtx
12219 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12220 enum built_in_function fcode)
12222 tree dest, src, len, size;
12224 if (!validate_arglist (exp,
12225 POINTER_TYPE,
12226 fcode == BUILT_IN_MEMSET_CHK
12227 ? INTEGER_TYPE : POINTER_TYPE,
12228 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12229 return NULL_RTX;
12231 dest = CALL_EXPR_ARG (exp, 0);
12232 src = CALL_EXPR_ARG (exp, 1);
12233 len = CALL_EXPR_ARG (exp, 2);
12234 size = CALL_EXPR_ARG (exp, 3);
12236 if (! host_integerp (size, 1))
12237 return NULL_RTX;
12239 if (host_integerp (len, 1) || integer_all_onesp (size))
12241 tree fn;
12243 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12245 warning_at (tree_nonartificial_location (exp),
12246 0, "%Kcall to %D will always overflow destination buffer",
12247 exp, get_callee_fndecl (exp));
12248 return NULL_RTX;
12251 fn = NULL_TREE;
12252 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12253 mem{cpy,pcpy,move,set} is available. */
12254 switch (fcode)
12256 case BUILT_IN_MEMCPY_CHK:
12257 fn = built_in_decls[BUILT_IN_MEMCPY];
12258 break;
12259 case BUILT_IN_MEMPCPY_CHK:
12260 fn = built_in_decls[BUILT_IN_MEMPCPY];
12261 break;
12262 case BUILT_IN_MEMMOVE_CHK:
12263 fn = built_in_decls[BUILT_IN_MEMMOVE];
12264 break;
12265 case BUILT_IN_MEMSET_CHK:
12266 fn = built_in_decls[BUILT_IN_MEMSET];
12267 break;
12268 default:
12269 break;
12272 if (! fn)
12273 return NULL_RTX;
12275 fn = build_call_expr (fn, 3, dest, src, len);
12276 STRIP_TYPE_NOPS (fn);
12277 while (TREE_CODE (fn) == COMPOUND_EXPR)
12279 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12280 EXPAND_NORMAL);
12281 fn = TREE_OPERAND (fn, 1);
12283 if (TREE_CODE (fn) == CALL_EXPR)
12284 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12285 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12287 else if (fcode == BUILT_IN_MEMSET_CHK)
12288 return NULL_RTX;
12289 else
12291 unsigned int dest_align
12292 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12294 /* If DEST is not a pointer type, call the normal function. */
12295 if (dest_align == 0)
12296 return NULL_RTX;
12298 /* If SRC and DEST are the same (and not volatile), do nothing. */
12299 if (operand_equal_p (src, dest, 0))
12301 tree expr;
12303 if (fcode != BUILT_IN_MEMPCPY_CHK)
12305 /* Evaluate and ignore LEN in case it has side-effects. */
12306 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12307 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12310 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12311 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12314 /* __memmove_chk special case. */
12315 if (fcode == BUILT_IN_MEMMOVE_CHK)
12317 unsigned int src_align
12318 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12320 if (src_align == 0)
12321 return NULL_RTX;
12323 /* If src is categorized for a readonly section we can use
12324 normal __memcpy_chk. */
12325 if (readonly_data_expr (src))
12327 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12328 if (!fn)
12329 return NULL_RTX;
12330 fn = build_call_expr (fn, 4, dest, src, len, size);
12331 STRIP_TYPE_NOPS (fn);
12332 while (TREE_CODE (fn) == COMPOUND_EXPR)
12334 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12335 EXPAND_NORMAL);
12336 fn = TREE_OPERAND (fn, 1);
12338 if (TREE_CODE (fn) == CALL_EXPR)
12339 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12340 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12343 return NULL_RTX;
12347 /* Emit warning if a buffer overflow is detected at compile time. */
12349 static void
12350 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12352 int is_strlen = 0;
12353 tree len, size;
12354 location_t loc = tree_nonartificial_location (exp);
12356 switch (fcode)
12358 case BUILT_IN_STRCPY_CHK:
12359 case BUILT_IN_STPCPY_CHK:
12360 /* For __strcat_chk the warning will be emitted only if overflowing
12361 by at least strlen (dest) + 1 bytes. */
12362 case BUILT_IN_STRCAT_CHK:
12363 len = CALL_EXPR_ARG (exp, 1);
12364 size = CALL_EXPR_ARG (exp, 2);
12365 is_strlen = 1;
12366 break;
12367 case BUILT_IN_STRNCAT_CHK:
12368 case BUILT_IN_STRNCPY_CHK:
12369 len = CALL_EXPR_ARG (exp, 2);
12370 size = CALL_EXPR_ARG (exp, 3);
12371 break;
12372 case BUILT_IN_SNPRINTF_CHK:
12373 case BUILT_IN_VSNPRINTF_CHK:
12374 len = CALL_EXPR_ARG (exp, 1);
12375 size = CALL_EXPR_ARG (exp, 3);
12376 break;
12377 default:
12378 gcc_unreachable ();
12381 if (!len || !size)
12382 return;
12384 if (! host_integerp (size, 1) || integer_all_onesp (size))
12385 return;
12387 if (is_strlen)
12389 len = c_strlen (len, 1);
12390 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12391 return;
12393 else if (fcode == BUILT_IN_STRNCAT_CHK)
12395 tree src = CALL_EXPR_ARG (exp, 1);
12396 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12397 return;
12398 src = c_strlen (src, 1);
12399 if (! src || ! host_integerp (src, 1))
12401 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12402 exp, get_callee_fndecl (exp));
12403 return;
12405 else if (tree_int_cst_lt (src, size))
12406 return;
12408 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12409 return;
12411 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12412 exp, get_callee_fndecl (exp));
12415 /* Emit warning if a buffer overflow is detected at compile time
12416 in __sprintf_chk/__vsprintf_chk calls. */
12418 static void
12419 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12421 tree dest, size, len, fmt, flag;
12422 const char *fmt_str;
12423 int nargs = call_expr_nargs (exp);
12425 /* Verify the required arguments in the original call. */
12427 if (nargs < 4)
12428 return;
12429 dest = CALL_EXPR_ARG (exp, 0);
12430 flag = CALL_EXPR_ARG (exp, 1);
12431 size = CALL_EXPR_ARG (exp, 2);
12432 fmt = CALL_EXPR_ARG (exp, 3);
12434 if (! host_integerp (size, 1) || integer_all_onesp (size))
12435 return;
12437 /* Check whether the format is a literal string constant. */
12438 fmt_str = c_getstr (fmt);
12439 if (fmt_str == NULL)
12440 return;
12442 if (!init_target_chars ())
12443 return;
12445 /* If the format doesn't contain % args or %%, we know its size. */
12446 if (strchr (fmt_str, target_percent) == 0)
12447 len = build_int_cstu (size_type_node, strlen (fmt_str));
12448 /* If the format is "%s" and first ... argument is a string literal,
12449 we know it too. */
12450 else if (fcode == BUILT_IN_SPRINTF_CHK
12451 && strcmp (fmt_str, target_percent_s) == 0)
12453 tree arg;
12455 if (nargs < 5)
12456 return;
12457 arg = CALL_EXPR_ARG (exp, 4);
12458 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12459 return;
12461 len = c_strlen (arg, 1);
12462 if (!len || ! host_integerp (len, 1))
12463 return;
12465 else
12466 return;
12468 if (! tree_int_cst_lt (len, size))
12469 warning_at (tree_nonartificial_location (exp),
12470 0, "%Kcall to %D will always overflow destination buffer",
12471 exp, get_callee_fndecl (exp));
12474 /* Emit warning if a free is called with address of a variable. */
12476 static void
12477 maybe_emit_free_warning (tree exp)
12479 tree arg = CALL_EXPR_ARG (exp, 0);
12481 STRIP_NOPS (arg);
12482 if (TREE_CODE (arg) != ADDR_EXPR)
12483 return;
12485 arg = get_base_address (TREE_OPERAND (arg, 0));
12486 if (arg == NULL || INDIRECT_REF_P (arg))
12487 return;
12489 if (SSA_VAR_P (arg))
12490 warning_at (tree_nonartificial_location (exp),
12491 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12492 else
12493 warning_at (tree_nonartificial_location (exp),
12494 0, "%Kattempt to free a non-heap object", exp);
12497 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12498 if possible. */
12500 tree
12501 fold_builtin_object_size (tree ptr, tree ost)
12503 tree ret = NULL_TREE;
12504 int object_size_type;
12506 if (!validate_arg (ptr, POINTER_TYPE)
12507 || !validate_arg (ost, INTEGER_TYPE))
12508 return NULL_TREE;
12510 STRIP_NOPS (ost);
12512 if (TREE_CODE (ost) != INTEGER_CST
12513 || tree_int_cst_sgn (ost) < 0
12514 || compare_tree_int (ost, 3) > 0)
12515 return NULL_TREE;
12517 object_size_type = tree_low_cst (ost, 0);
12519 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12520 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12521 and (size_t) 0 for types 2 and 3. */
12522 if (TREE_SIDE_EFFECTS (ptr))
12523 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12525 if (TREE_CODE (ptr) == ADDR_EXPR)
12526 ret = build_int_cstu (size_type_node,
12527 compute_builtin_object_size (ptr, object_size_type));
12529 else if (TREE_CODE (ptr) == SSA_NAME)
12531 unsigned HOST_WIDE_INT bytes;
12533 /* If object size is not known yet, delay folding until
12534 later. Maybe subsequent passes will help determining
12535 it. */
12536 bytes = compute_builtin_object_size (ptr, object_size_type);
12537 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12538 ? -1 : 0))
12539 ret = build_int_cstu (size_type_node, bytes);
12542 if (ret)
12544 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12545 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12546 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12547 ret = NULL_TREE;
12550 return ret;
12553 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12554 DEST, SRC, LEN, and SIZE are the arguments to the call.
12555 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12556 code of the builtin. If MAXLEN is not NULL, it is maximum length
12557 passed as third argument. */
12559 tree
12560 fold_builtin_memory_chk (location_t loc, tree fndecl,
12561 tree dest, tree src, tree len, tree size,
12562 tree maxlen, bool ignore,
12563 enum built_in_function fcode)
12565 tree fn;
12567 if (!validate_arg (dest, POINTER_TYPE)
12568 || !validate_arg (src,
12569 (fcode == BUILT_IN_MEMSET_CHK
12570 ? INTEGER_TYPE : POINTER_TYPE))
12571 || !validate_arg (len, INTEGER_TYPE)
12572 || !validate_arg (size, INTEGER_TYPE))
12573 return NULL_TREE;
12575 /* If SRC and DEST are the same (and not volatile), return DEST
12576 (resp. DEST+LEN for __mempcpy_chk). */
12577 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12579 if (fcode != BUILT_IN_MEMPCPY_CHK)
12580 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12581 dest, len);
12582 else
12584 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12585 dest, len);
12586 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12590 if (! host_integerp (size, 1))
12591 return NULL_TREE;
12593 if (! integer_all_onesp (size))
12595 if (! host_integerp (len, 1))
12597 /* If LEN is not constant, try MAXLEN too.
12598 For MAXLEN only allow optimizing into non-_ocs function
12599 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12600 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12602 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12604 /* (void) __mempcpy_chk () can be optimized into
12605 (void) __memcpy_chk (). */
12606 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12607 if (!fn)
12608 return NULL_TREE;
12610 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12612 return NULL_TREE;
12615 else
12616 maxlen = len;
12618 if (tree_int_cst_lt (size, maxlen))
12619 return NULL_TREE;
12622 fn = NULL_TREE;
12623 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12624 mem{cpy,pcpy,move,set} is available. */
12625 switch (fcode)
12627 case BUILT_IN_MEMCPY_CHK:
12628 fn = built_in_decls[BUILT_IN_MEMCPY];
12629 break;
12630 case BUILT_IN_MEMPCPY_CHK:
12631 fn = built_in_decls[BUILT_IN_MEMPCPY];
12632 break;
12633 case BUILT_IN_MEMMOVE_CHK:
12634 fn = built_in_decls[BUILT_IN_MEMMOVE];
12635 break;
12636 case BUILT_IN_MEMSET_CHK:
12637 fn = built_in_decls[BUILT_IN_MEMSET];
12638 break;
12639 default:
12640 break;
12643 if (!fn)
12644 return NULL_TREE;
12646 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12649 /* Fold a call to the __st[rp]cpy_chk builtin.
12650 DEST, SRC, and SIZE are the arguments to the call.
12651 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12652 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12653 strings passed as second argument. */
12655 tree
12656 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12657 tree src, tree size,
12658 tree maxlen, bool ignore,
12659 enum built_in_function fcode)
12661 tree len, fn;
12663 if (!validate_arg (dest, POINTER_TYPE)
12664 || !validate_arg (src, POINTER_TYPE)
12665 || !validate_arg (size, INTEGER_TYPE))
12666 return NULL_TREE;
12668 /* If SRC and DEST are the same (and not volatile), return DEST. */
12669 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12670 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12672 if (! host_integerp (size, 1))
12673 return NULL_TREE;
12675 if (! integer_all_onesp (size))
12677 len = c_strlen (src, 1);
12678 if (! len || ! host_integerp (len, 1))
12680 /* If LEN is not constant, try MAXLEN too.
12681 For MAXLEN only allow optimizing into non-_ocs function
12682 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12683 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12685 if (fcode == BUILT_IN_STPCPY_CHK)
12687 if (! ignore)
12688 return NULL_TREE;
12690 /* If return value of __stpcpy_chk is ignored,
12691 optimize into __strcpy_chk. */
12692 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12693 if (!fn)
12694 return NULL_TREE;
12696 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12699 if (! len || TREE_SIDE_EFFECTS (len))
12700 return NULL_TREE;
12702 /* If c_strlen returned something, but not a constant,
12703 transform __strcpy_chk into __memcpy_chk. */
12704 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12705 if (!fn)
12706 return NULL_TREE;
12708 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12709 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12710 build_call_expr_loc (loc, fn, 4,
12711 dest, src, len, size));
12714 else
12715 maxlen = len;
12717 if (! tree_int_cst_lt (maxlen, size))
12718 return NULL_TREE;
12721 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12722 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12723 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12724 if (!fn)
12725 return NULL_TREE;
12727 return build_call_expr_loc (loc, fn, 2, dest, src);
12730 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12731 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12732 length passed as third argument. */
12734 tree
12735 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12736 tree len, tree size, tree maxlen)
12738 tree fn;
12740 if (!validate_arg (dest, POINTER_TYPE)
12741 || !validate_arg (src, POINTER_TYPE)
12742 || !validate_arg (len, INTEGER_TYPE)
12743 || !validate_arg (size, INTEGER_TYPE))
12744 return NULL_TREE;
12746 if (! host_integerp (size, 1))
12747 return NULL_TREE;
12749 if (! integer_all_onesp (size))
12751 if (! host_integerp (len, 1))
12753 /* If LEN is not constant, try MAXLEN too.
12754 For MAXLEN only allow optimizing into non-_ocs function
12755 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12756 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12757 return NULL_TREE;
12759 else
12760 maxlen = len;
12762 if (tree_int_cst_lt (size, maxlen))
12763 return NULL_TREE;
12766 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12767 fn = built_in_decls[BUILT_IN_STRNCPY];
12768 if (!fn)
12769 return NULL_TREE;
12771 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12774 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12775 are the arguments to the call. */
12777 static tree
12778 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12779 tree src, tree size)
12781 tree fn;
12782 const char *p;
12784 if (!validate_arg (dest, POINTER_TYPE)
12785 || !validate_arg (src, POINTER_TYPE)
12786 || !validate_arg (size, INTEGER_TYPE))
12787 return NULL_TREE;
12789 p = c_getstr (src);
12790 /* If the SRC parameter is "", return DEST. */
12791 if (p && *p == '\0')
12792 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12794 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12795 return NULL_TREE;
12797 /* If __builtin_strcat_chk is used, assume strcat is available. */
12798 fn = built_in_decls[BUILT_IN_STRCAT];
12799 if (!fn)
12800 return NULL_TREE;
12802 return build_call_expr_loc (loc, fn, 2, dest, src);
12805 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12806 LEN, and SIZE. */
12808 static tree
12809 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12810 tree dest, tree src, tree len, tree size)
12812 tree fn;
12813 const char *p;
12815 if (!validate_arg (dest, POINTER_TYPE)
12816 || !validate_arg (src, POINTER_TYPE)
12817 || !validate_arg (size, INTEGER_TYPE)
12818 || !validate_arg (size, INTEGER_TYPE))
12819 return NULL_TREE;
12821 p = c_getstr (src);
12822 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12823 if (p && *p == '\0')
12824 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12825 else if (integer_zerop (len))
12826 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12828 if (! host_integerp (size, 1))
12829 return NULL_TREE;
12831 if (! integer_all_onesp (size))
12833 tree src_len = c_strlen (src, 1);
12834 if (src_len
12835 && host_integerp (src_len, 1)
12836 && host_integerp (len, 1)
12837 && ! tree_int_cst_lt (len, src_len))
12839 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12840 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12841 if (!fn)
12842 return NULL_TREE;
12844 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12846 return NULL_TREE;
12849 /* If __builtin_strncat_chk is used, assume strncat is available. */
12850 fn = built_in_decls[BUILT_IN_STRNCAT];
12851 if (!fn)
12852 return NULL_TREE;
12854 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12857 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12858 a normal call should be emitted rather than expanding the function
12859 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12861 static tree
12862 fold_builtin_sprintf_chk (location_t loc, tree exp,
12863 enum built_in_function fcode)
12865 tree dest, size, len, fn, fmt, flag;
12866 const char *fmt_str;
12867 int nargs = call_expr_nargs (exp);
12869 /* Verify the required arguments in the original call. */
12870 if (nargs < 4)
12871 return NULL_TREE;
12872 dest = CALL_EXPR_ARG (exp, 0);
12873 if (!validate_arg (dest, POINTER_TYPE))
12874 return NULL_TREE;
12875 flag = CALL_EXPR_ARG (exp, 1);
12876 if (!validate_arg (flag, INTEGER_TYPE))
12877 return NULL_TREE;
12878 size = CALL_EXPR_ARG (exp, 2);
12879 if (!validate_arg (size, INTEGER_TYPE))
12880 return NULL_TREE;
12881 fmt = CALL_EXPR_ARG (exp, 3);
12882 if (!validate_arg (fmt, POINTER_TYPE))
12883 return NULL_TREE;
12885 if (! host_integerp (size, 1))
12886 return NULL_TREE;
12888 len = NULL_TREE;
12890 if (!init_target_chars ())
12891 return NULL_TREE;
12893 /* Check whether the format is a literal string constant. */
12894 fmt_str = c_getstr (fmt);
12895 if (fmt_str != NULL)
12897 /* If the format doesn't contain % args or %%, we know the size. */
12898 if (strchr (fmt_str, target_percent) == 0)
12900 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12901 len = build_int_cstu (size_type_node, strlen (fmt_str));
12903 /* If the format is "%s" and first ... argument is a string literal,
12904 we know the size too. */
12905 else if (fcode == BUILT_IN_SPRINTF_CHK
12906 && strcmp (fmt_str, target_percent_s) == 0)
12908 tree arg;
12910 if (nargs == 5)
12912 arg = CALL_EXPR_ARG (exp, 4);
12913 if (validate_arg (arg, POINTER_TYPE))
12915 len = c_strlen (arg, 1);
12916 if (! len || ! host_integerp (len, 1))
12917 len = NULL_TREE;
12923 if (! integer_all_onesp (size))
12925 if (! len || ! tree_int_cst_lt (len, size))
12926 return NULL_TREE;
12929 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12930 or if format doesn't contain % chars or is "%s". */
12931 if (! integer_zerop (flag))
12933 if (fmt_str == NULL)
12934 return NULL_TREE;
12935 if (strchr (fmt_str, target_percent) != NULL
12936 && strcmp (fmt_str, target_percent_s))
12937 return NULL_TREE;
12940 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12941 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12942 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12943 if (!fn)
12944 return NULL_TREE;
12946 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12949 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12950 a normal call should be emitted rather than expanding the function
12951 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12952 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12953 passed as second argument. */
12955 tree
12956 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12957 enum built_in_function fcode)
12959 tree dest, size, len, fn, fmt, flag;
12960 const char *fmt_str;
12962 /* Verify the required arguments in the original call. */
12963 if (call_expr_nargs (exp) < 5)
12964 return NULL_TREE;
12965 dest = CALL_EXPR_ARG (exp, 0);
12966 if (!validate_arg (dest, POINTER_TYPE))
12967 return NULL_TREE;
12968 len = CALL_EXPR_ARG (exp, 1);
12969 if (!validate_arg (len, INTEGER_TYPE))
12970 return NULL_TREE;
12971 flag = CALL_EXPR_ARG (exp, 2);
12972 if (!validate_arg (flag, INTEGER_TYPE))
12973 return NULL_TREE;
12974 size = CALL_EXPR_ARG (exp, 3);
12975 if (!validate_arg (size, INTEGER_TYPE))
12976 return NULL_TREE;
12977 fmt = CALL_EXPR_ARG (exp, 4);
12978 if (!validate_arg (fmt, POINTER_TYPE))
12979 return NULL_TREE;
12981 if (! host_integerp (size, 1))
12982 return NULL_TREE;
12984 if (! integer_all_onesp (size))
12986 if (! host_integerp (len, 1))
12988 /* If LEN is not constant, try MAXLEN too.
12989 For MAXLEN only allow optimizing into non-_ocs function
12990 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12991 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12992 return NULL_TREE;
12994 else
12995 maxlen = len;
12997 if (tree_int_cst_lt (size, maxlen))
12998 return NULL_TREE;
13001 if (!init_target_chars ())
13002 return NULL_TREE;
13004 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13005 or if format doesn't contain % chars or is "%s". */
13006 if (! integer_zerop (flag))
13008 fmt_str = c_getstr (fmt);
13009 if (fmt_str == NULL)
13010 return NULL_TREE;
13011 if (strchr (fmt_str, target_percent) != NULL
13012 && strcmp (fmt_str, target_percent_s))
13013 return NULL_TREE;
13016 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13017 available. */
13018 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13019 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13020 if (!fn)
13021 return NULL_TREE;
13023 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
13026 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13027 FMT and ARG are the arguments to the call; we don't fold cases with
13028 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13030 Return NULL_TREE if no simplification was possible, otherwise return the
13031 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13032 code of the function to be simplified. */
13034 static tree
13035 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13036 tree arg, bool ignore,
13037 enum built_in_function fcode)
13039 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13040 const char *fmt_str = NULL;
13042 /* If the return value is used, don't do the transformation. */
13043 if (! ignore)
13044 return NULL_TREE;
13046 /* Verify the required arguments in the original call. */
13047 if (!validate_arg (fmt, POINTER_TYPE))
13048 return NULL_TREE;
13050 /* Check whether the format is a literal string constant. */
13051 fmt_str = c_getstr (fmt);
13052 if (fmt_str == NULL)
13053 return NULL_TREE;
13055 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13057 /* If we're using an unlocked function, assume the other
13058 unlocked functions exist explicitly. */
13059 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
13060 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
13062 else
13064 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
13065 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
13068 if (!init_target_chars ())
13069 return NULL_TREE;
13071 if (strcmp (fmt_str, target_percent_s) == 0
13072 || strchr (fmt_str, target_percent) == NULL)
13074 const char *str;
13076 if (strcmp (fmt_str, target_percent_s) == 0)
13078 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13079 return NULL_TREE;
13081 if (!arg || !validate_arg (arg, POINTER_TYPE))
13082 return NULL_TREE;
13084 str = c_getstr (arg);
13085 if (str == NULL)
13086 return NULL_TREE;
13088 else
13090 /* The format specifier doesn't contain any '%' characters. */
13091 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13092 && arg)
13093 return NULL_TREE;
13094 str = fmt_str;
13097 /* If the string was "", printf does nothing. */
13098 if (str[0] == '\0')
13099 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13101 /* If the string has length of 1, call putchar. */
13102 if (str[1] == '\0')
13104 /* Given printf("c"), (where c is any one character,)
13105 convert "c"[0] to an int and pass that to the replacement
13106 function. */
13107 newarg = build_int_cst (NULL_TREE, str[0]);
13108 if (fn_putchar)
13109 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13111 else
13113 /* If the string was "string\n", call puts("string"). */
13114 size_t len = strlen (str);
13115 if ((unsigned char)str[len - 1] == target_newline)
13117 /* Create a NUL-terminated string that's one char shorter
13118 than the original, stripping off the trailing '\n'. */
13119 char *newstr = XALLOCAVEC (char, len);
13120 memcpy (newstr, str, len - 1);
13121 newstr[len - 1] = 0;
13123 newarg = build_string_literal (len, newstr);
13124 if (fn_puts)
13125 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13127 else
13128 /* We'd like to arrange to call fputs(string,stdout) here,
13129 but we need stdout and don't have a way to get it yet. */
13130 return NULL_TREE;
13134 /* The other optimizations can be done only on the non-va_list variants. */
13135 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13136 return NULL_TREE;
13138 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13139 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13141 if (!arg || !validate_arg (arg, POINTER_TYPE))
13142 return NULL_TREE;
13143 if (fn_puts)
13144 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13147 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13148 else if (strcmp (fmt_str, target_percent_c) == 0)
13150 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13151 return NULL_TREE;
13152 if (fn_putchar)
13153 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13156 if (!call)
13157 return NULL_TREE;
13159 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13162 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13163 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13164 more than 3 arguments, and ARG may be null in the 2-argument case.
13166 Return NULL_TREE if no simplification was possible, otherwise return the
13167 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13168 code of the function to be simplified. */
13170 static tree
13171 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13172 tree fmt, tree arg, bool ignore,
13173 enum built_in_function fcode)
13175 tree fn_fputc, fn_fputs, call = NULL_TREE;
13176 const char *fmt_str = NULL;
13178 /* If the return value is used, don't do the transformation. */
13179 if (! ignore)
13180 return NULL_TREE;
13182 /* Verify the required arguments in the original call. */
13183 if (!validate_arg (fp, POINTER_TYPE))
13184 return NULL_TREE;
13185 if (!validate_arg (fmt, POINTER_TYPE))
13186 return NULL_TREE;
13188 /* Check whether the format is a literal string constant. */
13189 fmt_str = c_getstr (fmt);
13190 if (fmt_str == NULL)
13191 return NULL_TREE;
13193 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13195 /* If we're using an unlocked function, assume the other
13196 unlocked functions exist explicitly. */
13197 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13198 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13200 else
13202 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13203 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13206 if (!init_target_chars ())
13207 return NULL_TREE;
13209 /* If the format doesn't contain % args or %%, use strcpy. */
13210 if (strchr (fmt_str, target_percent) == NULL)
13212 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13213 && arg)
13214 return NULL_TREE;
13216 /* If the format specifier was "", fprintf does nothing. */
13217 if (fmt_str[0] == '\0')
13219 /* If FP has side-effects, just wait until gimplification is
13220 done. */
13221 if (TREE_SIDE_EFFECTS (fp))
13222 return NULL_TREE;
13224 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13227 /* When "string" doesn't contain %, replace all cases of
13228 fprintf (fp, string) with fputs (string, fp). The fputs
13229 builtin will take care of special cases like length == 1. */
13230 if (fn_fputs)
13231 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13234 /* The other optimizations can be done only on the non-va_list variants. */
13235 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13236 return NULL_TREE;
13238 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13239 else if (strcmp (fmt_str, target_percent_s) == 0)
13241 if (!arg || !validate_arg (arg, POINTER_TYPE))
13242 return NULL_TREE;
13243 if (fn_fputs)
13244 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13247 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13248 else if (strcmp (fmt_str, target_percent_c) == 0)
13250 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13251 return NULL_TREE;
13252 if (fn_fputc)
13253 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13256 if (!call)
13257 return NULL_TREE;
13258 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13261 /* Initialize format string characters in the target charset. */
13263 static bool
13264 init_target_chars (void)
13266 static bool init;
13267 if (!init)
13269 target_newline = lang_hooks.to_target_charset ('\n');
13270 target_percent = lang_hooks.to_target_charset ('%');
13271 target_c = lang_hooks.to_target_charset ('c');
13272 target_s = lang_hooks.to_target_charset ('s');
13273 if (target_newline == 0 || target_percent == 0 || target_c == 0
13274 || target_s == 0)
13275 return false;
13277 target_percent_c[0] = target_percent;
13278 target_percent_c[1] = target_c;
13279 target_percent_c[2] = '\0';
13281 target_percent_s[0] = target_percent;
13282 target_percent_s[1] = target_s;
13283 target_percent_s[2] = '\0';
13285 target_percent_s_newline[0] = target_percent;
13286 target_percent_s_newline[1] = target_s;
13287 target_percent_s_newline[2] = target_newline;
13288 target_percent_s_newline[3] = '\0';
13290 init = true;
13292 return true;
13295 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13296 and no overflow/underflow occurred. INEXACT is true if M was not
13297 exactly calculated. TYPE is the tree type for the result. This
13298 function assumes that you cleared the MPFR flags and then
13299 calculated M to see if anything subsequently set a flag prior to
13300 entering this function. Return NULL_TREE if any checks fail. */
13302 static tree
13303 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13305 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13306 overflow/underflow occurred. If -frounding-math, proceed iff the
13307 result of calling FUNC was exact. */
13308 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13309 && (!flag_rounding_math || !inexact))
13311 REAL_VALUE_TYPE rr;
13313 real_from_mpfr (&rr, m, type, GMP_RNDN);
13314 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13315 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13316 but the mpft_t is not, then we underflowed in the
13317 conversion. */
13318 if (real_isfinite (&rr)
13319 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13321 REAL_VALUE_TYPE rmode;
13323 real_convert (&rmode, TYPE_MODE (type), &rr);
13324 /* Proceed iff the specified mode can hold the value. */
13325 if (real_identical (&rmode, &rr))
13326 return build_real (type, rmode);
13329 return NULL_TREE;
13332 #ifdef HAVE_mpc
13333 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13334 number and no overflow/underflow occurred. INEXACT is true if M
13335 was not exactly calculated. TYPE is the tree type for the result.
13336 This function assumes that you cleared the MPFR flags and then
13337 calculated M to see if anything subsequently set a flag prior to
13338 entering this function. Return NULL_TREE if any checks fail. */
13340 static tree
13341 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13343 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13344 overflow/underflow occurred. If -frounding-math, proceed iff the
13345 result of calling FUNC was exact. */
13346 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13347 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13348 && (!flag_rounding_math || !inexact))
13350 REAL_VALUE_TYPE re, im;
13352 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13353 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13354 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13355 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13356 but the mpft_t is not, then we underflowed in the
13357 conversion. */
13358 if (real_isfinite (&re) && real_isfinite (&im)
13359 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13360 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13362 REAL_VALUE_TYPE re_mode, im_mode;
13364 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13365 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13366 /* Proceed iff the specified mode can hold the value. */
13367 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13368 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13369 build_real (TREE_TYPE (type), im_mode));
13372 return NULL_TREE;
13374 #endif /* HAVE_mpc */
13376 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13377 FUNC on it and return the resulting value as a tree with type TYPE.
13378 If MIN and/or MAX are not NULL, then the supplied ARG must be
13379 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13380 acceptable values, otherwise they are not. The mpfr precision is
13381 set to the precision of TYPE. We assume that function FUNC returns
13382 zero if the result could be calculated exactly within the requested
13383 precision. */
13385 static tree
13386 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13387 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13388 bool inclusive)
13390 tree result = NULL_TREE;
13392 STRIP_NOPS (arg);
13394 /* To proceed, MPFR must exactly represent the target floating point
13395 format, which only happens when the target base equals two. */
13396 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13397 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13399 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13401 if (real_isfinite (ra)
13402 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13403 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13405 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13406 const int prec = fmt->p;
13407 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13408 int inexact;
13409 mpfr_t m;
13411 mpfr_init2 (m, prec);
13412 mpfr_from_real (m, ra, GMP_RNDN);
13413 mpfr_clear_flags ();
13414 inexact = func (m, m, rnd);
13415 result = do_mpfr_ckconv (m, type, inexact);
13416 mpfr_clear (m);
13420 return result;
13423 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13424 FUNC on it and return the resulting value as a tree with type TYPE.
13425 The mpfr precision is set to the precision of TYPE. We assume that
13426 function FUNC returns zero if the result could be calculated
13427 exactly within the requested precision. */
13429 static tree
13430 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13431 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13433 tree result = NULL_TREE;
13435 STRIP_NOPS (arg1);
13436 STRIP_NOPS (arg2);
13438 /* To proceed, MPFR must exactly represent the target floating point
13439 format, which only happens when the target base equals two. */
13440 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13441 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13442 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13444 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13445 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13447 if (real_isfinite (ra1) && real_isfinite (ra2))
13449 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13450 const int prec = fmt->p;
13451 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13452 int inexact;
13453 mpfr_t m1, m2;
13455 mpfr_inits2 (prec, m1, m2, NULL);
13456 mpfr_from_real (m1, ra1, GMP_RNDN);
13457 mpfr_from_real (m2, ra2, GMP_RNDN);
13458 mpfr_clear_flags ();
13459 inexact = func (m1, m1, m2, rnd);
13460 result = do_mpfr_ckconv (m1, type, inexact);
13461 mpfr_clears (m1, m2, NULL);
13465 return result;
13468 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13469 FUNC on it and return the resulting value as a tree with type TYPE.
13470 The mpfr precision is set to the precision of TYPE. We assume that
13471 function FUNC returns zero if the result could be calculated
13472 exactly within the requested precision. */
13474 static tree
13475 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13476 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13478 tree result = NULL_TREE;
13480 STRIP_NOPS (arg1);
13481 STRIP_NOPS (arg2);
13482 STRIP_NOPS (arg3);
13484 /* To proceed, MPFR must exactly represent the target floating point
13485 format, which only happens when the target base equals two. */
13486 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13487 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13488 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13489 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13491 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13492 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13493 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13495 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13497 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13498 const int prec = fmt->p;
13499 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13500 int inexact;
13501 mpfr_t m1, m2, m3;
13503 mpfr_inits2 (prec, m1, m2, m3, NULL);
13504 mpfr_from_real (m1, ra1, GMP_RNDN);
13505 mpfr_from_real (m2, ra2, GMP_RNDN);
13506 mpfr_from_real (m3, ra3, GMP_RNDN);
13507 mpfr_clear_flags ();
13508 inexact = func (m1, m1, m2, m3, rnd);
13509 result = do_mpfr_ckconv (m1, type, inexact);
13510 mpfr_clears (m1, m2, m3, NULL);
13514 return result;
13517 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13518 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13519 If ARG_SINP and ARG_COSP are NULL then the result is returned
13520 as a complex value.
13521 The type is taken from the type of ARG and is used for setting the
13522 precision of the calculation and results. */
13524 static tree
13525 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13527 tree const type = TREE_TYPE (arg);
13528 tree result = NULL_TREE;
13530 STRIP_NOPS (arg);
13532 /* To proceed, MPFR must exactly represent the target floating point
13533 format, which only happens when the target base equals two. */
13534 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13535 && TREE_CODE (arg) == REAL_CST
13536 && !TREE_OVERFLOW (arg))
13538 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13540 if (real_isfinite (ra))
13542 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13543 const int prec = fmt->p;
13544 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13545 tree result_s, result_c;
13546 int inexact;
13547 mpfr_t m, ms, mc;
13549 mpfr_inits2 (prec, m, ms, mc, NULL);
13550 mpfr_from_real (m, ra, GMP_RNDN);
13551 mpfr_clear_flags ();
13552 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13553 result_s = do_mpfr_ckconv (ms, type, inexact);
13554 result_c = do_mpfr_ckconv (mc, type, inexact);
13555 mpfr_clears (m, ms, mc, NULL);
13556 if (result_s && result_c)
13558 /* If we are to return in a complex value do so. */
13559 if (!arg_sinp && !arg_cosp)
13560 return build_complex (build_complex_type (type),
13561 result_c, result_s);
13563 /* Dereference the sin/cos pointer arguments. */
13564 arg_sinp = build_fold_indirect_ref (arg_sinp);
13565 arg_cosp = build_fold_indirect_ref (arg_cosp);
13566 /* Proceed if valid pointer type were passed in. */
13567 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13568 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13570 /* Set the values. */
13571 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13572 result_s);
13573 TREE_SIDE_EFFECTS (result_s) = 1;
13574 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13575 result_c);
13576 TREE_SIDE_EFFECTS (result_c) = 1;
13577 /* Combine the assignments into a compound expr. */
13578 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13579 result_s, result_c));
13584 return result;
13587 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13588 two-argument mpfr order N Bessel function FUNC on them and return
13589 the resulting value as a tree with type TYPE. The mpfr precision
13590 is set to the precision of TYPE. We assume that function FUNC
13591 returns zero if the result could be calculated exactly within the
13592 requested precision. */
13593 static tree
13594 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13595 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13596 const REAL_VALUE_TYPE *min, bool inclusive)
13598 tree result = NULL_TREE;
13600 STRIP_NOPS (arg1);
13601 STRIP_NOPS (arg2);
13603 /* To proceed, MPFR must exactly represent the target floating point
13604 format, which only happens when the target base equals two. */
13605 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13606 && host_integerp (arg1, 0)
13607 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13609 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13610 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13612 if (n == (long)n
13613 && real_isfinite (ra)
13614 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13616 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13617 const int prec = fmt->p;
13618 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13619 int inexact;
13620 mpfr_t m;
13622 mpfr_init2 (m, prec);
13623 mpfr_from_real (m, ra, GMP_RNDN);
13624 mpfr_clear_flags ();
13625 inexact = func (m, n, m, rnd);
13626 result = do_mpfr_ckconv (m, type, inexact);
13627 mpfr_clear (m);
13631 return result;
13634 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13635 the pointer *(ARG_QUO) and return the result. The type is taken
13636 from the type of ARG0 and is used for setting the precision of the
13637 calculation and results. */
13639 static tree
13640 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13642 tree const type = TREE_TYPE (arg0);
13643 tree result = NULL_TREE;
13645 STRIP_NOPS (arg0);
13646 STRIP_NOPS (arg1);
13648 /* To proceed, MPFR must exactly represent the target floating point
13649 format, which only happens when the target base equals two. */
13650 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13651 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13652 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13654 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13655 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13657 if (real_isfinite (ra0) && real_isfinite (ra1))
13659 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13660 const int prec = fmt->p;
13661 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13662 tree result_rem;
13663 long integer_quo;
13664 mpfr_t m0, m1;
13666 mpfr_inits2 (prec, m0, m1, NULL);
13667 mpfr_from_real (m0, ra0, GMP_RNDN);
13668 mpfr_from_real (m1, ra1, GMP_RNDN);
13669 mpfr_clear_flags ();
13670 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13671 /* Remquo is independent of the rounding mode, so pass
13672 inexact=0 to do_mpfr_ckconv(). */
13673 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13674 mpfr_clears (m0, m1, NULL);
13675 if (result_rem)
13677 /* MPFR calculates quo in the host's long so it may
13678 return more bits in quo than the target int can hold
13679 if sizeof(host long) > sizeof(target int). This can
13680 happen even for native compilers in LP64 mode. In
13681 these cases, modulo the quo value with the largest
13682 number that the target int can hold while leaving one
13683 bit for the sign. */
13684 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13685 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13687 /* Dereference the quo pointer argument. */
13688 arg_quo = build_fold_indirect_ref (arg_quo);
13689 /* Proceed iff a valid pointer type was passed in. */
13690 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13692 /* Set the value. */
13693 tree result_quo = fold_build2 (MODIFY_EXPR,
13694 TREE_TYPE (arg_quo), arg_quo,
13695 build_int_cst (NULL, integer_quo));
13696 TREE_SIDE_EFFECTS (result_quo) = 1;
13697 /* Combine the quo assignment with the rem. */
13698 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13699 result_quo, result_rem));
13704 return result;
13707 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13708 resulting value as a tree with type TYPE. The mpfr precision is
13709 set to the precision of TYPE. We assume that this mpfr function
13710 returns zero if the result could be calculated exactly within the
13711 requested precision. In addition, the integer pointer represented
13712 by ARG_SG will be dereferenced and set to the appropriate signgam
13713 (-1,1) value. */
13715 static tree
13716 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13718 tree result = NULL_TREE;
13720 STRIP_NOPS (arg);
13722 /* To proceed, MPFR must exactly represent the target floating point
13723 format, which only happens when the target base equals two. Also
13724 verify ARG is a constant and that ARG_SG is an int pointer. */
13725 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13726 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13727 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13728 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13730 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13732 /* In addition to NaN and Inf, the argument cannot be zero or a
13733 negative integer. */
13734 if (real_isfinite (ra)
13735 && ra->cl != rvc_zero
13736 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13738 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13739 const int prec = fmt->p;
13740 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13741 int inexact, sg;
13742 mpfr_t m;
13743 tree result_lg;
13745 mpfr_init2 (m, prec);
13746 mpfr_from_real (m, ra, GMP_RNDN);
13747 mpfr_clear_flags ();
13748 inexact = mpfr_lgamma (m, &sg, m, rnd);
13749 result_lg = do_mpfr_ckconv (m, type, inexact);
13750 mpfr_clear (m);
13751 if (result_lg)
13753 tree result_sg;
13755 /* Dereference the arg_sg pointer argument. */
13756 arg_sg = build_fold_indirect_ref (arg_sg);
13757 /* Assign the signgam value into *arg_sg. */
13758 result_sg = fold_build2 (MODIFY_EXPR,
13759 TREE_TYPE (arg_sg), arg_sg,
13760 build_int_cst (NULL, sg));
13761 TREE_SIDE_EFFECTS (result_sg) = 1;
13762 /* Combine the signgam assignment with the lgamma result. */
13763 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13764 result_sg, result_lg));
13769 return result;
13772 #ifdef HAVE_mpc
13773 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13774 function FUNC on it and return the resulting value as a tree with
13775 type TYPE. The mpfr precision is set to the precision of TYPE. We
13776 assume that function FUNC returns zero if the result could be
13777 calculated exactly within the requested precision. */
13779 static tree
13780 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13782 tree result = NULL_TREE;
13784 STRIP_NOPS (arg);
13786 /* To proceed, MPFR must exactly represent the target floating point
13787 format, which only happens when the target base equals two. */
13788 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13789 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13790 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13792 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13793 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13795 if (real_isfinite (re) && real_isfinite (im))
13797 const struct real_format *const fmt =
13798 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13799 const int prec = fmt->p;
13800 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13801 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13802 int inexact;
13803 mpc_t m;
13805 mpc_init2 (m, prec);
13806 mpfr_from_real (mpc_realref(m), re, rnd);
13807 mpfr_from_real (mpc_imagref(m), im, rnd);
13808 mpfr_clear_flags ();
13809 inexact = func (m, m, crnd);
13810 result = do_mpc_ckconv (m, type, inexact);
13811 mpc_clear (m);
13815 return result;
13818 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13819 mpc function FUNC on it and return the resulting value as a tree
13820 with type TYPE. The mpfr precision is set to the precision of
13821 TYPE. We assume that function FUNC returns zero if the result
13822 could be calculated exactly within the requested precision. */
13824 #ifdef HAVE_mpc
13825 tree
13826 do_mpc_arg2 (tree arg0, tree arg1, tree type,
13827 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13829 tree result = NULL_TREE;
13831 STRIP_NOPS (arg0);
13832 STRIP_NOPS (arg1);
13834 /* To proceed, MPFR must exactly represent the target floating point
13835 format, which only happens when the target base equals two. */
13836 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13837 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13838 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13839 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13840 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13842 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13843 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13844 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13845 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13847 if (real_isfinite (re0) && real_isfinite (im0)
13848 && real_isfinite (re1) && real_isfinite (im1))
13850 const struct real_format *const fmt =
13851 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13852 const int prec = fmt->p;
13853 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13854 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13855 int inexact;
13856 mpc_t m0, m1;
13858 mpc_init2 (m0, prec);
13859 mpc_init2 (m1, prec);
13860 mpfr_from_real (mpc_realref(m0), re0, rnd);
13861 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13862 mpfr_from_real (mpc_realref(m1), re1, rnd);
13863 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13864 mpfr_clear_flags ();
13865 inexact = func (m0, m0, m1, crnd);
13866 result = do_mpc_ckconv (m0, type, inexact);
13867 mpc_clear (m0);
13868 mpc_clear (m1);
13872 return result;
13874 # endif
13875 #endif /* HAVE_mpc */
13877 /* FIXME tuples.
13878 The functions below provide an alternate interface for folding
13879 builtin function calls presented as GIMPLE_CALL statements rather
13880 than as CALL_EXPRs. The folded result is still expressed as a
13881 tree. There is too much code duplication in the handling of
13882 varargs functions, and a more intrusive re-factoring would permit
13883 better sharing of code between the tree and statement-based
13884 versions of these functions. */
13886 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13887 along with N new arguments specified as the "..." parameters. SKIP
13888 is the number of arguments in STMT to be omitted. This function is used
13889 to do varargs-to-varargs transformations. */
13891 static tree
13892 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13894 int oldnargs = gimple_call_num_args (stmt);
13895 int nargs = oldnargs - skip + n;
13896 tree fntype = TREE_TYPE (fndecl);
13897 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13898 tree *buffer;
13899 int i, j;
13900 va_list ap;
13901 location_t loc = gimple_location (stmt);
13903 buffer = XALLOCAVEC (tree, nargs);
13904 va_start (ap, n);
13905 for (i = 0; i < n; i++)
13906 buffer[i] = va_arg (ap, tree);
13907 va_end (ap);
13908 for (j = skip; j < oldnargs; j++, i++)
13909 buffer[i] = gimple_call_arg (stmt, j);
13911 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13914 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13915 a normal call should be emitted rather than expanding the function
13916 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13918 static tree
13919 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13921 tree dest, size, len, fn, fmt, flag;
13922 const char *fmt_str;
13923 int nargs = gimple_call_num_args (stmt);
13925 /* Verify the required arguments in the original call. */
13926 if (nargs < 4)
13927 return NULL_TREE;
13928 dest = gimple_call_arg (stmt, 0);
13929 if (!validate_arg (dest, POINTER_TYPE))
13930 return NULL_TREE;
13931 flag = gimple_call_arg (stmt, 1);
13932 if (!validate_arg (flag, INTEGER_TYPE))
13933 return NULL_TREE;
13934 size = gimple_call_arg (stmt, 2);
13935 if (!validate_arg (size, INTEGER_TYPE))
13936 return NULL_TREE;
13937 fmt = gimple_call_arg (stmt, 3);
13938 if (!validate_arg (fmt, POINTER_TYPE))
13939 return NULL_TREE;
13941 if (! host_integerp (size, 1))
13942 return NULL_TREE;
13944 len = NULL_TREE;
13946 if (!init_target_chars ())
13947 return NULL_TREE;
13949 /* Check whether the format is a literal string constant. */
13950 fmt_str = c_getstr (fmt);
13951 if (fmt_str != NULL)
13953 /* If the format doesn't contain % args or %%, we know the size. */
13954 if (strchr (fmt_str, target_percent) == 0)
13956 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13957 len = build_int_cstu (size_type_node, strlen (fmt_str));
13959 /* If the format is "%s" and first ... argument is a string literal,
13960 we know the size too. */
13961 else if (fcode == BUILT_IN_SPRINTF_CHK
13962 && strcmp (fmt_str, target_percent_s) == 0)
13964 tree arg;
13966 if (nargs == 5)
13968 arg = gimple_call_arg (stmt, 4);
13969 if (validate_arg (arg, POINTER_TYPE))
13971 len = c_strlen (arg, 1);
13972 if (! len || ! host_integerp (len, 1))
13973 len = NULL_TREE;
13979 if (! integer_all_onesp (size))
13981 if (! len || ! tree_int_cst_lt (len, size))
13982 return NULL_TREE;
13985 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13986 or if format doesn't contain % chars or is "%s". */
13987 if (! integer_zerop (flag))
13989 if (fmt_str == NULL)
13990 return NULL_TREE;
13991 if (strchr (fmt_str, target_percent) != NULL
13992 && strcmp (fmt_str, target_percent_s))
13993 return NULL_TREE;
13996 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13997 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13998 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13999 if (!fn)
14000 return NULL_TREE;
14002 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
14005 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14006 a normal call should be emitted rather than expanding the function
14007 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14008 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14009 passed as second argument. */
14011 tree
14012 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14013 enum built_in_function fcode)
14015 tree dest, size, len, fn, fmt, flag;
14016 const char *fmt_str;
14018 /* Verify the required arguments in the original call. */
14019 if (gimple_call_num_args (stmt) < 5)
14020 return NULL_TREE;
14021 dest = gimple_call_arg (stmt, 0);
14022 if (!validate_arg (dest, POINTER_TYPE))
14023 return NULL_TREE;
14024 len = gimple_call_arg (stmt, 1);
14025 if (!validate_arg (len, INTEGER_TYPE))
14026 return NULL_TREE;
14027 flag = gimple_call_arg (stmt, 2);
14028 if (!validate_arg (flag, INTEGER_TYPE))
14029 return NULL_TREE;
14030 size = gimple_call_arg (stmt, 3);
14031 if (!validate_arg (size, INTEGER_TYPE))
14032 return NULL_TREE;
14033 fmt = gimple_call_arg (stmt, 4);
14034 if (!validate_arg (fmt, POINTER_TYPE))
14035 return NULL_TREE;
14037 if (! host_integerp (size, 1))
14038 return NULL_TREE;
14040 if (! integer_all_onesp (size))
14042 if (! host_integerp (len, 1))
14044 /* If LEN is not constant, try MAXLEN too.
14045 For MAXLEN only allow optimizing into non-_ocs function
14046 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
14047 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
14048 return NULL_TREE;
14050 else
14051 maxlen = len;
14053 if (tree_int_cst_lt (size, maxlen))
14054 return NULL_TREE;
14057 if (!init_target_chars ())
14058 return NULL_TREE;
14060 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
14061 or if format doesn't contain % chars or is "%s". */
14062 if (! integer_zerop (flag))
14064 fmt_str = c_getstr (fmt);
14065 if (fmt_str == NULL)
14066 return NULL_TREE;
14067 if (strchr (fmt_str, target_percent) != NULL
14068 && strcmp (fmt_str, target_percent_s))
14069 return NULL_TREE;
14072 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
14073 available. */
14074 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
14075 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
14076 if (!fn)
14077 return NULL_TREE;
14079 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
14082 /* Builtins with folding operations that operate on "..." arguments
14083 need special handling; we need to store the arguments in a convenient
14084 data structure before attempting any folding. Fortunately there are
14085 only a few builtins that fall into this category. FNDECL is the
14086 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14087 result of the function call is ignored. */
14089 static tree
14090 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14091 bool ignore ATTRIBUTE_UNUSED)
14093 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14094 tree ret = NULL_TREE;
14096 switch (fcode)
14098 case BUILT_IN_SPRINTF_CHK:
14099 case BUILT_IN_VSPRINTF_CHK:
14100 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14101 break;
14103 case BUILT_IN_SNPRINTF_CHK:
14104 case BUILT_IN_VSNPRINTF_CHK:
14105 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14107 default:
14108 break;
14110 if (ret)
14112 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14113 TREE_NO_WARNING (ret) = 1;
14114 return ret;
14116 return NULL_TREE;
14119 /* A wrapper function for builtin folding that prevents warnings for
14120 "statement without effect" and the like, caused by removing the
14121 call node earlier than the warning is generated. */
14123 tree
14124 fold_call_stmt (gimple stmt, bool ignore)
14126 tree ret = NULL_TREE;
14127 tree fndecl = gimple_call_fndecl (stmt);
14128 location_t loc = gimple_location (stmt);
14129 if (fndecl
14130 && TREE_CODE (fndecl) == FUNCTION_DECL
14131 && DECL_BUILT_IN (fndecl)
14132 && !gimple_call_va_arg_pack_p (stmt))
14134 int nargs = gimple_call_num_args (stmt);
14136 if (avoid_folding_inline_builtin (fndecl))
14137 return NULL_TREE;
14138 /* FIXME: Don't use a list in this interface. */
14139 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14141 tree arglist = NULL_TREE;
14142 int i;
14143 for (i = nargs - 1; i >= 0; i--)
14144 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
14145 return targetm.fold_builtin (fndecl, arglist, ignore);
14147 else
14149 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14151 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
14152 int i;
14153 for (i = 0; i < nargs; i++)
14154 args[i] = gimple_call_arg (stmt, i);
14155 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14157 if (!ret)
14158 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14159 if (ret)
14161 /* Propagate location information from original call to
14162 expansion of builtin. Otherwise things like
14163 maybe_emit_chk_warning, that operate on the expansion
14164 of a builtin, will use the wrong location information. */
14165 if (gimple_has_location (stmt))
14167 tree realret = ret;
14168 if (TREE_CODE (ret) == NOP_EXPR)
14169 realret = TREE_OPERAND (ret, 0);
14170 if (CAN_HAVE_LOCATION_P (realret)
14171 && !EXPR_HAS_LOCATION (realret))
14172 SET_EXPR_LOCATION (realret, loc);
14173 return realret;
14175 return ret;
14179 return NULL_TREE;