2009-08-05 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / builtins.c
blob57c0c1bce59a75079f0bd6ba1cd83d26fad9fc86
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #ifdef HAVE_mpc_pow
64 static tree do_mpc_arg2 (tree, tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t));
65 #endif
66 #endif
68 /* Define the names of the builtin function types and codes. */
69 const char *const built_in_class_names[4]
70 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
72 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
73 const char * built_in_names[(int) END_BUILTINS] =
75 #include "builtins.def"
77 #undef DEF_BUILTIN
79 /* Setup an array of _DECL trees, make sure each element is
80 initialized to NULL_TREE. */
81 tree built_in_decls[(int) END_BUILTINS];
82 /* Declarations used when constructing the builtin implicitly in the compiler.
83 It may be NULL_TREE when this is invalid (for instance runtime is not
84 required to implement the function call in all cases). */
85 tree implicit_built_in_decls[(int) END_BUILTINS];
87 static const char *c_getstr (tree);
88 static rtx c_readstr (const char *, enum machine_mode);
89 static int target_char_cast (tree, char *);
90 static rtx get_memory_rtx (tree, tree);
91 static int apply_args_size (void);
92 static int apply_result_size (void);
93 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
94 static rtx result_vector (int, rtx);
95 #endif
96 static void expand_builtin_update_setjmp_buf (rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static void expand_errno_check (tree, rtx);
105 static rtx expand_builtin_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_args_info (tree);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
132 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
133 enum machine_mode, int);
134 static rtx expand_builtin_bcopy (tree, int);
135 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
137 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
142 static rtx expand_builtin_bzero (tree);
143 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
146 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
147 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
148 static rtx expand_builtin_alloca (tree, rtx);
149 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static rtx expand_builtin_fputs (tree, rtx, bool);
152 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
153 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
154 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
155 static tree stabilize_va_list_loc (location_t, tree, int);
156 static rtx expand_builtin_expect (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_expect (location_t, tree, tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree fold_builtin_nan (tree, tree, int);
163 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164 static bool validate_arg (const_tree, enum tree_code code);
165 static bool integer_valued_real_p (tree);
166 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
167 static bool readonly_data_expr (tree);
168 static rtx expand_builtin_fabs (tree, rtx, rtx);
169 static rtx expand_builtin_signbit (tree, rtx);
170 static tree fold_builtin_sqrt (location_t, tree, tree);
171 static tree fold_builtin_cbrt (location_t, tree, tree);
172 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_cos (location_t, tree, tree, tree);
175 static tree fold_builtin_cosh (location_t, tree, tree, tree);
176 static tree fold_builtin_tan (tree, tree);
177 static tree fold_builtin_trunc (location_t, tree, tree);
178 static tree fold_builtin_floor (location_t, tree, tree);
179 static tree fold_builtin_ceil (location_t, tree, tree);
180 static tree fold_builtin_round (location_t, tree, tree);
181 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
182 static tree fold_builtin_bitop (tree, tree);
183 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
184 static tree fold_builtin_strchr (location_t, tree, tree, tree);
185 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
187 static tree fold_builtin_strcmp (location_t, tree, tree);
188 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
189 static tree fold_builtin_signbit (location_t, tree, tree);
190 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
191 static tree fold_builtin_isascii (location_t, tree);
192 static tree fold_builtin_toascii (location_t, tree);
193 static tree fold_builtin_isdigit (location_t, tree);
194 static tree fold_builtin_fabs (location_t, tree, tree);
195 static tree fold_builtin_abs (location_t, tree, tree);
196 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
197 enum tree_code);
198 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
199 static tree fold_builtin_0 (location_t, tree, bool);
200 static tree fold_builtin_1 (location_t, tree, tree, bool);
201 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
202 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
203 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
204 static tree fold_builtin_varargs (location_t, tree, tree, bool);
206 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
207 static tree fold_builtin_strstr (location_t, tree, tree, tree);
208 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
209 static tree fold_builtin_strcat (location_t, tree, tree);
210 static tree fold_builtin_strncat (location_t, tree, tree, tree);
211 static tree fold_builtin_strspn (location_t, tree, tree);
212 static tree fold_builtin_strcspn (location_t, tree, tree);
213 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
215 static rtx expand_builtin_object_size (tree);
216 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
217 enum built_in_function);
218 static void maybe_emit_chk_warning (tree, enum built_in_function);
219 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
220 static void maybe_emit_free_warning (tree);
221 static tree fold_builtin_object_size (tree, tree);
222 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
223 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
224 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
225 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
226 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
227 enum built_in_function);
228 static bool init_target_chars (void);
230 static unsigned HOST_WIDE_INT target_newline;
231 static unsigned HOST_WIDE_INT target_percent;
232 static unsigned HOST_WIDE_INT target_c;
233 static unsigned HOST_WIDE_INT target_s;
234 static char target_percent_c[3];
235 static char target_percent_s[3];
236 static char target_percent_s_newline[4];
237 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_arg2 (tree, tree, tree,
240 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
241 static tree do_mpfr_arg3 (tree, tree, tree, tree,
242 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
243 static tree do_mpfr_sincos (tree, tree, tree);
244 static tree do_mpfr_bessel_n (tree, tree, tree,
245 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
246 const REAL_VALUE_TYPE *, bool);
247 static tree do_mpfr_remquo (tree, tree, tree);
248 static tree do_mpfr_lgamma_r (tree, tree, tree);
250 bool
251 is_builtin_name (const char *name)
253 if (strncmp (name, "__builtin_", 10) == 0)
254 return true;
255 if (strncmp (name, "__sync_", 7) == 0)
256 return true;
257 return false;
260 /* Return true if NODE should be considered for inline expansion regardless
261 of the optimization level. This means whenever a function is invoked with
262 its "internal" name, which normally contains the prefix "__builtin". */
264 static bool
265 called_as_built_in (tree node)
267 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
268 we want the name used to call the function, not the name it
269 will have. */
270 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
271 return is_builtin_name (name);
274 /* Return the alignment in bits of EXP, an object.
275 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
276 guessed alignment e.g. from type alignment. */
279 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
281 unsigned int inner;
283 inner = max_align;
284 if (handled_component_p (exp))
286 HOST_WIDE_INT bitsize, bitpos;
287 tree offset;
288 enum machine_mode mode;
289 int unsignedp, volatilep;
291 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
292 &mode, &unsignedp, &volatilep, true);
293 if (bitpos)
294 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
295 while (offset)
297 tree next_offset;
299 if (TREE_CODE (offset) == PLUS_EXPR)
301 next_offset = TREE_OPERAND (offset, 0);
302 offset = TREE_OPERAND (offset, 1);
304 else
305 next_offset = NULL;
306 if (host_integerp (offset, 1))
308 /* Any overflow in calculating offset_bits won't change
309 the alignment. */
310 unsigned offset_bits
311 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
313 if (offset_bits)
314 inner = MIN (inner, (offset_bits & -offset_bits));
316 else if (TREE_CODE (offset) == MULT_EXPR
317 && host_integerp (TREE_OPERAND (offset, 1), 1))
319 /* Any overflow in calculating offset_factor won't change
320 the alignment. */
321 unsigned offset_factor
322 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
323 * BITS_PER_UNIT);
325 if (offset_factor)
326 inner = MIN (inner, (offset_factor & -offset_factor));
328 else
330 inner = MIN (inner, BITS_PER_UNIT);
331 break;
333 offset = next_offset;
336 if (DECL_P (exp))
337 align = MIN (inner, DECL_ALIGN (exp));
338 #ifdef CONSTANT_ALIGNMENT
339 else if (CONSTANT_CLASS_P (exp))
340 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
341 #endif
342 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
343 || TREE_CODE (exp) == INDIRECT_REF)
344 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
345 else
346 align = MIN (align, inner);
347 return MIN (align, max_align);
350 /* Returns true iff we can trust that alignment information has been
351 calculated properly. */
353 bool
354 can_trust_pointer_alignment (void)
356 /* We rely on TER to compute accurate alignment information. */
357 return (optimize && flag_tree_ter);
360 /* Return the alignment in bits of EXP, a pointer valued expression.
361 But don't return more than MAX_ALIGN no matter what.
362 The alignment returned is, by default, the alignment of the thing that
363 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
365 Otherwise, look at the expression to see if we can do better, i.e., if the
366 expression is actually pointing at an object whose alignment is tighter. */
369 get_pointer_alignment (tree exp, unsigned int max_align)
371 unsigned int align, inner;
373 if (!can_trust_pointer_alignment ())
374 return 0;
376 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
377 return 0;
379 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
380 align = MIN (align, max_align);
382 while (1)
384 switch (TREE_CODE (exp))
386 CASE_CONVERT:
387 exp = TREE_OPERAND (exp, 0);
388 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
389 return align;
391 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
392 align = MIN (inner, max_align);
393 break;
395 case POINTER_PLUS_EXPR:
396 /* If sum of pointer + int, restrict our maximum alignment to that
397 imposed by the integer. If not, we can't do any better than
398 ALIGN. */
399 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
400 return align;
402 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
403 & (max_align / BITS_PER_UNIT - 1))
404 != 0)
405 max_align >>= 1;
407 exp = TREE_OPERAND (exp, 0);
408 break;
410 case ADDR_EXPR:
411 /* See what we are pointing at and look at its alignment. */
412 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
414 default:
415 return align;
420 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
421 way, because it could contain a zero byte in the middle.
422 TREE_STRING_LENGTH is the size of the character array, not the string.
424 ONLY_VALUE should be nonzero if the result is not going to be emitted
425 into the instruction stream and zero if it is going to be expanded.
426 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
427 is returned, otherwise NULL, since
428 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
429 evaluate the side-effects.
431 The value returned is of type `ssizetype'.
433 Unfortunately, string_constant can't access the values of const char
434 arrays with initializers, so neither can we do so here. */
436 tree
437 c_strlen (tree src, int only_value)
439 tree offset_node;
440 HOST_WIDE_INT offset;
441 int max;
442 const char *ptr;
444 STRIP_NOPS (src);
445 if (TREE_CODE (src) == COND_EXPR
446 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
448 tree len1, len2;
450 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
451 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
452 if (tree_int_cst_equal (len1, len2))
453 return len1;
456 if (TREE_CODE (src) == COMPOUND_EXPR
457 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
458 return c_strlen (TREE_OPERAND (src, 1), only_value);
460 src = string_constant (src, &offset_node);
461 if (src == 0)
462 return NULL_TREE;
464 max = TREE_STRING_LENGTH (src) - 1;
465 ptr = TREE_STRING_POINTER (src);
467 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
469 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
470 compute the offset to the following null if we don't know where to
471 start searching for it. */
472 int i;
474 for (i = 0; i < max; i++)
475 if (ptr[i] == 0)
476 return NULL_TREE;
478 /* We don't know the starting offset, but we do know that the string
479 has no internal zero bytes. We can assume that the offset falls
480 within the bounds of the string; otherwise, the programmer deserves
481 what he gets. Subtract the offset from the length of the string,
482 and return that. This would perhaps not be valid if we were dealing
483 with named arrays in addition to literal string constants. */
485 return size_diffop_loc (input_location, size_int (max), offset_node);
488 /* We have a known offset into the string. Start searching there for
489 a null character if we can represent it as a single HOST_WIDE_INT. */
490 if (offset_node == 0)
491 offset = 0;
492 else if (! host_integerp (offset_node, 0))
493 offset = -1;
494 else
495 offset = tree_low_cst (offset_node, 0);
497 /* If the offset is known to be out of bounds, warn, and call strlen at
498 runtime. */
499 if (offset < 0 || offset > max)
501 /* Suppress multiple warnings for propagated constant strings. */
502 if (! TREE_NO_WARNING (src))
504 warning (0, "offset outside bounds of constant string");
505 TREE_NO_WARNING (src) = 1;
507 return NULL_TREE;
510 /* Use strlen to search for the first zero byte. Since any strings
511 constructed with build_string will have nulls appended, we win even
512 if we get handed something like (char[4])"abcd".
514 Since OFFSET is our starting index into the string, no further
515 calculation is needed. */
516 return ssize_int (strlen (ptr + offset));
519 /* Return a char pointer for a C string if it is a string constant
520 or sum of string constant and integer constant. */
522 static const char *
523 c_getstr (tree src)
525 tree offset_node;
527 src = string_constant (src, &offset_node);
528 if (src == 0)
529 return 0;
531 if (offset_node == 0)
532 return TREE_STRING_POINTER (src);
533 else if (!host_integerp (offset_node, 1)
534 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
535 return 0;
537 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
540 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
541 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
543 static rtx
544 c_readstr (const char *str, enum machine_mode mode)
546 HOST_WIDE_INT c[2];
547 HOST_WIDE_INT ch;
548 unsigned int i, j;
550 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
552 c[0] = 0;
553 c[1] = 0;
554 ch = 1;
555 for (i = 0; i < GET_MODE_SIZE (mode); i++)
557 j = i;
558 if (WORDS_BIG_ENDIAN)
559 j = GET_MODE_SIZE (mode) - i - 1;
560 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
561 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
562 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
563 j *= BITS_PER_UNIT;
564 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
566 if (ch)
567 ch = (unsigned char) str[i];
568 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
570 return immed_double_const (c[0], c[1], mode);
573 /* Cast a target constant CST to target CHAR and if that value fits into
574 host char type, return zero and put that value into variable pointed to by
575 P. */
577 static int
578 target_char_cast (tree cst, char *p)
580 unsigned HOST_WIDE_INT val, hostval;
582 if (!host_integerp (cst, 1)
583 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
584 return 1;
586 val = tree_low_cst (cst, 1);
587 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
588 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
590 hostval = val;
591 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
592 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
594 if (val != hostval)
595 return 1;
597 *p = hostval;
598 return 0;
601 /* Similar to save_expr, but assumes that arbitrary code is not executed
602 in between the multiple evaluations. In particular, we assume that a
603 non-addressable local variable will not be modified. */
605 static tree
606 builtin_save_expr (tree exp)
608 if (TREE_ADDRESSABLE (exp) == 0
609 && (TREE_CODE (exp) == PARM_DECL
610 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
611 return exp;
613 return save_expr (exp);
616 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
617 times to get the address of either a higher stack frame, or a return
618 address located within it (depending on FNDECL_CODE). */
620 static rtx
621 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
623 int i;
625 #ifdef INITIAL_FRAME_ADDRESS_RTX
626 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
627 #else
628 rtx tem;
630 /* For a zero count with __builtin_return_address, we don't care what
631 frame address we return, because target-specific definitions will
632 override us. Therefore frame pointer elimination is OK, and using
633 the soft frame pointer is OK.
635 For a nonzero count, or a zero count with __builtin_frame_address,
636 we require a stable offset from the current frame pointer to the
637 previous one, so we must use the hard frame pointer, and
638 we must disable frame pointer elimination. */
639 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
640 tem = frame_pointer_rtx;
641 else
643 tem = hard_frame_pointer_rtx;
645 /* Tell reload not to eliminate the frame pointer. */
646 crtl->accesses_prior_frames = 1;
648 #endif
650 /* Some machines need special handling before we can access
651 arbitrary frames. For example, on the SPARC, we must first flush
652 all register windows to the stack. */
653 #ifdef SETUP_FRAME_ADDRESSES
654 if (count > 0)
655 SETUP_FRAME_ADDRESSES ();
656 #endif
658 /* On the SPARC, the return address is not in the frame, it is in a
659 register. There is no way to access it off of the current frame
660 pointer, but it can be accessed off the previous frame pointer by
661 reading the value from the register window save area. */
662 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
663 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
664 count--;
665 #endif
667 /* Scan back COUNT frames to the specified frame. */
668 for (i = 0; i < count; i++)
670 /* Assume the dynamic chain pointer is in the word that the
671 frame address points to, unless otherwise specified. */
672 #ifdef DYNAMIC_CHAIN_ADDRESS
673 tem = DYNAMIC_CHAIN_ADDRESS (tem);
674 #endif
675 tem = memory_address (Pmode, tem);
676 tem = gen_frame_mem (Pmode, tem);
677 tem = copy_to_reg (tem);
680 /* For __builtin_frame_address, return what we've got. But, on
681 the SPARC for example, we may have to add a bias. */
682 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
683 #ifdef FRAME_ADDR_RTX
684 return FRAME_ADDR_RTX (tem);
685 #else
686 return tem;
687 #endif
689 /* For __builtin_return_address, get the return address from that frame. */
690 #ifdef RETURN_ADDR_RTX
691 tem = RETURN_ADDR_RTX (count, tem);
692 #else
693 tem = memory_address (Pmode,
694 plus_constant (tem, GET_MODE_SIZE (Pmode)));
695 tem = gen_frame_mem (Pmode, tem);
696 #endif
697 return tem;
700 /* Alias set used for setjmp buffer. */
701 static alias_set_type setjmp_alias_set = -1;
703 /* Construct the leading half of a __builtin_setjmp call. Control will
704 return to RECEIVER_LABEL. This is also called directly by the SJLJ
705 exception handling code. */
707 void
708 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
710 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
711 rtx stack_save;
712 rtx mem;
714 if (setjmp_alias_set == -1)
715 setjmp_alias_set = new_alias_set ();
717 buf_addr = convert_memory_address (Pmode, buf_addr);
719 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
721 /* We store the frame pointer and the address of receiver_label in
722 the buffer and use the rest of it for the stack save area, which
723 is machine-dependent. */
725 mem = gen_rtx_MEM (Pmode, buf_addr);
726 set_mem_alias_set (mem, setjmp_alias_set);
727 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
729 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
730 set_mem_alias_set (mem, setjmp_alias_set);
732 emit_move_insn (validize_mem (mem),
733 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
735 stack_save = gen_rtx_MEM (sa_mode,
736 plus_constant (buf_addr,
737 2 * GET_MODE_SIZE (Pmode)));
738 set_mem_alias_set (stack_save, setjmp_alias_set);
739 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
741 /* If there is further processing to do, do it. */
742 #ifdef HAVE_builtin_setjmp_setup
743 if (HAVE_builtin_setjmp_setup)
744 emit_insn (gen_builtin_setjmp_setup (buf_addr));
745 #endif
747 /* Tell optimize_save_area_alloca that extra work is going to
748 need to go on during alloca. */
749 cfun->calls_setjmp = 1;
751 /* We have a nonlocal label. */
752 cfun->has_nonlocal_label = 1;
755 /* Construct the trailing part of a __builtin_setjmp call. This is
756 also called directly by the SJLJ exception handling code. */
758 void
759 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
761 /* Clobber the FP when we get here, so we have to make sure it's
762 marked as used by this function. */
763 emit_use (hard_frame_pointer_rtx);
765 /* Mark the static chain as clobbered here so life information
766 doesn't get messed up for it. */
767 emit_clobber (static_chain_rtx);
769 /* Now put in the code to restore the frame pointer, and argument
770 pointer, if needed. */
771 #ifdef HAVE_nonlocal_goto
772 if (! HAVE_nonlocal_goto)
773 #endif
775 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
776 /* This might change the hard frame pointer in ways that aren't
777 apparent to early optimization passes, so force a clobber. */
778 emit_clobber (hard_frame_pointer_rtx);
781 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
782 if (fixed_regs[ARG_POINTER_REGNUM])
784 #ifdef ELIMINABLE_REGS
785 size_t i;
786 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
788 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
789 if (elim_regs[i].from == ARG_POINTER_REGNUM
790 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
791 break;
793 if (i == ARRAY_SIZE (elim_regs))
794 #endif
796 /* Now restore our arg pointer from the address at which it
797 was saved in our stack frame. */
798 emit_move_insn (crtl->args.internal_arg_pointer,
799 copy_to_reg (get_arg_pointer_save_area ()));
802 #endif
804 #ifdef HAVE_builtin_setjmp_receiver
805 if (HAVE_builtin_setjmp_receiver)
806 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
807 else
808 #endif
809 #ifdef HAVE_nonlocal_goto_receiver
810 if (HAVE_nonlocal_goto_receiver)
811 emit_insn (gen_nonlocal_goto_receiver ());
812 else
813 #endif
814 { /* Nothing */ }
816 /* We must not allow the code we just generated to be reordered by
817 scheduling. Specifically, the update of the frame pointer must
818 happen immediately, not later. */
819 emit_insn (gen_blockage ());
822 /* __builtin_longjmp is passed a pointer to an array of five words (not
823 all will be used on all machines). It operates similarly to the C
824 library function of the same name, but is more efficient. Much of
825 the code below is copied from the handling of non-local gotos. */
827 static void
828 expand_builtin_longjmp (rtx buf_addr, rtx value)
830 rtx fp, lab, stack, insn, last;
831 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
833 /* DRAP is needed for stack realign if longjmp is expanded to current
834 function */
835 if (SUPPORTS_STACK_ALIGNMENT)
836 crtl->need_drap = true;
838 if (setjmp_alias_set == -1)
839 setjmp_alias_set = new_alias_set ();
841 buf_addr = convert_memory_address (Pmode, buf_addr);
843 buf_addr = force_reg (Pmode, buf_addr);
845 /* We used to store value in static_chain_rtx, but that fails if pointers
846 are smaller than integers. We instead require that the user must pass
847 a second argument of 1, because that is what builtin_setjmp will
848 return. This also makes EH slightly more efficient, since we are no
849 longer copying around a value that we don't care about. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
903 if (JUMP_P (insn))
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
908 else if (CALL_P (insn))
909 break;
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 if (JUMP_P (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
991 else if (CALL_P (insn))
992 break;
995 return const0_rtx;
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1036 static void
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1106 #endif
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1159 set_mem_attributes (mem, exp, 0);
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1227 offset = -1;
1228 length = -1;
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1247 return mem;
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* For each register that may be used for calling a function, this
1267 gives the offset of that register into the block returned by
1268 __builtin_apply_args. 0 indicates that the register is not
1269 used for calling a function. */
1270 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1272 /* Return the size required for the block returned by __builtin_apply_args,
1273 and initialize apply_args_mode. */
1275 static int
1276 apply_args_size (void)
1278 static int size = -1;
1279 int align;
1280 unsigned int regno;
1281 enum machine_mode mode;
1283 /* The values computed by this function never change. */
1284 if (size < 0)
1286 /* The first value is the incoming arg-pointer. */
1287 size = GET_MODE_SIZE (Pmode);
1289 /* The second value is the structure value address unless this is
1290 passed as an "invisible" first argument. */
1291 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1292 size += GET_MODE_SIZE (Pmode);
1294 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1295 if (FUNCTION_ARG_REGNO_P (regno))
1297 mode = reg_raw_mode[regno];
1299 gcc_assert (mode != VOIDmode);
1301 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1302 if (size % align != 0)
1303 size = CEIL (size, align) * align;
1304 apply_args_reg_offset[regno] = size;
1305 size += GET_MODE_SIZE (mode);
1306 apply_args_mode[regno] = mode;
1308 else
1310 apply_args_mode[regno] = VOIDmode;
1311 apply_args_reg_offset[regno] = 0;
1314 return size;
1317 /* Return the size required for the block returned by __builtin_apply,
1318 and initialize apply_result_mode. */
1320 static int
1321 apply_result_size (void)
1323 static int size = -1;
1324 int align, regno;
1325 enum machine_mode mode;
1327 /* The values computed by this function never change. */
1328 if (size < 0)
1330 size = 0;
1332 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1333 if (FUNCTION_VALUE_REGNO_P (regno))
1335 mode = reg_raw_mode[regno];
1337 gcc_assert (mode != VOIDmode);
1339 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1340 if (size % align != 0)
1341 size = CEIL (size, align) * align;
1342 size += GET_MODE_SIZE (mode);
1343 apply_result_mode[regno] = mode;
1345 else
1346 apply_result_mode[regno] = VOIDmode;
1348 /* Allow targets that use untyped_call and untyped_return to override
1349 the size so that machine-specific information can be stored here. */
1350 #ifdef APPLY_RESULT_SIZE
1351 size = APPLY_RESULT_SIZE;
1352 #endif
1354 return size;
1357 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1358 /* Create a vector describing the result block RESULT. If SAVEP is true,
1359 the result block is used to save the values; otherwise it is used to
1360 restore the values. */
1362 static rtx
1363 result_vector (int savep, rtx result)
1365 int regno, size, align, nelts;
1366 enum machine_mode mode;
1367 rtx reg, mem;
1368 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1370 size = nelts = 0;
1371 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1372 if ((mode = apply_result_mode[regno]) != VOIDmode)
1374 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1375 if (size % align != 0)
1376 size = CEIL (size, align) * align;
1377 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1378 mem = adjust_address (result, mode, size);
1379 savevec[nelts++] = (savep
1380 ? gen_rtx_SET (VOIDmode, mem, reg)
1381 : gen_rtx_SET (VOIDmode, reg, mem));
1382 size += GET_MODE_SIZE (mode);
1384 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1386 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1388 /* Save the state required to perform an untyped call with the same
1389 arguments as were passed to the current function. */
1391 static rtx
1392 expand_builtin_apply_args_1 (void)
1394 rtx registers, tem;
1395 int size, align, regno;
1396 enum machine_mode mode;
1397 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1399 /* Create a block where the arg-pointer, structure value address,
1400 and argument registers can be saved. */
1401 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1403 /* Walk past the arg-pointer and structure value address. */
1404 size = GET_MODE_SIZE (Pmode);
1405 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1406 size += GET_MODE_SIZE (Pmode);
1408 /* Save each register used in calling a function to the block. */
1409 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1410 if ((mode = apply_args_mode[regno]) != VOIDmode)
1412 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1413 if (size % align != 0)
1414 size = CEIL (size, align) * align;
1416 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1418 emit_move_insn (adjust_address (registers, mode, size), tem);
1419 size += GET_MODE_SIZE (mode);
1422 /* Save the arg pointer to the block. */
1423 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1424 #ifdef STACK_GROWS_DOWNWARD
1425 /* We need the pointer as the caller actually passed them to us, not
1426 as we might have pretended they were passed. Make sure it's a valid
1427 operand, as emit_move_insn isn't expected to handle a PLUS. */
1429 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1430 NULL_RTX);
1431 #endif
1432 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1434 size = GET_MODE_SIZE (Pmode);
1436 /* Save the structure value address unless this is passed as an
1437 "invisible" first argument. */
1438 if (struct_incoming_value)
1440 emit_move_insn (adjust_address (registers, Pmode, size),
1441 copy_to_reg (struct_incoming_value));
1442 size += GET_MODE_SIZE (Pmode);
1445 /* Return the address of the block. */
1446 return copy_addr_to_reg (XEXP (registers, 0));
1449 /* __builtin_apply_args returns block of memory allocated on
1450 the stack into which is stored the arg pointer, structure
1451 value address, static chain, and all the registers that might
1452 possibly be used in performing a function call. The code is
1453 moved to the start of the function so the incoming values are
1454 saved. */
1456 static rtx
1457 expand_builtin_apply_args (void)
1459 /* Don't do __builtin_apply_args more than once in a function.
1460 Save the result of the first call and reuse it. */
1461 if (apply_args_value != 0)
1462 return apply_args_value;
1464 /* When this function is called, it means that registers must be
1465 saved on entry to this function. So we migrate the
1466 call to the first insn of this function. */
1467 rtx temp;
1468 rtx seq;
1470 start_sequence ();
1471 temp = expand_builtin_apply_args_1 ();
1472 seq = get_insns ();
1473 end_sequence ();
1475 apply_args_value = temp;
1477 /* Put the insns after the NOTE that starts the function.
1478 If this is inside a start_sequence, make the outer-level insn
1479 chain current, so the code is placed at the start of the
1480 function. If internal_arg_pointer is a non-virtual pseudo,
1481 it needs to be placed after the function that initializes
1482 that pseudo. */
1483 push_topmost_sequence ();
1484 if (REG_P (crtl->args.internal_arg_pointer)
1485 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1486 emit_insn_before (seq, parm_birth_insn);
1487 else
1488 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1489 pop_topmost_sequence ();
1490 return temp;
1494 /* Perform an untyped call and save the state required to perform an
1495 untyped return of whatever value was returned by the given function. */
1497 static rtx
1498 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1500 int size, align, regno;
1501 enum machine_mode mode;
1502 rtx incoming_args, result, reg, dest, src, call_insn;
1503 rtx old_stack_level = 0;
1504 rtx call_fusage = 0;
1505 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1507 arguments = convert_memory_address (Pmode, arguments);
1509 /* Create a block where the return registers can be saved. */
1510 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1512 /* Fetch the arg pointer from the ARGUMENTS block. */
1513 incoming_args = gen_reg_rtx (Pmode);
1514 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1515 #ifndef STACK_GROWS_DOWNWARD
1516 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1517 incoming_args, 0, OPTAB_LIB_WIDEN);
1518 #endif
1520 /* Push a new argument block and copy the arguments. Do not allow
1521 the (potential) memcpy call below to interfere with our stack
1522 manipulations. */
1523 do_pending_stack_adjust ();
1524 NO_DEFER_POP;
1526 /* Save the stack with nonlocal if available. */
1527 #ifdef HAVE_save_stack_nonlocal
1528 if (HAVE_save_stack_nonlocal)
1529 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1530 else
1531 #endif
1532 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1534 /* Allocate a block of memory onto the stack and copy the memory
1535 arguments to the outgoing arguments address. */
1536 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1538 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1539 may have already set current_function_calls_alloca to true.
1540 current_function_calls_alloca won't be set if argsize is zero,
1541 so we have to guarantee need_drap is true here. */
1542 if (SUPPORTS_STACK_ALIGNMENT)
1543 crtl->need_drap = true;
1545 dest = virtual_outgoing_args_rtx;
1546 #ifndef STACK_GROWS_DOWNWARD
1547 if (CONST_INT_P (argsize))
1548 dest = plus_constant (dest, -INTVAL (argsize));
1549 else
1550 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1551 #endif
1552 dest = gen_rtx_MEM (BLKmode, dest);
1553 set_mem_align (dest, PARM_BOUNDARY);
1554 src = gen_rtx_MEM (BLKmode, incoming_args);
1555 set_mem_align (src, PARM_BOUNDARY);
1556 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1558 /* Refer to the argument block. */
1559 apply_args_size ();
1560 arguments = gen_rtx_MEM (BLKmode, arguments);
1561 set_mem_align (arguments, PARM_BOUNDARY);
1563 /* Walk past the arg-pointer and structure value address. */
1564 size = GET_MODE_SIZE (Pmode);
1565 if (struct_value)
1566 size += GET_MODE_SIZE (Pmode);
1568 /* Restore each of the registers previously saved. Make USE insns
1569 for each of these registers for use in making the call. */
1570 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1571 if ((mode = apply_args_mode[regno]) != VOIDmode)
1573 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1574 if (size % align != 0)
1575 size = CEIL (size, align) * align;
1576 reg = gen_rtx_REG (mode, regno);
1577 emit_move_insn (reg, adjust_address (arguments, mode, size));
1578 use_reg (&call_fusage, reg);
1579 size += GET_MODE_SIZE (mode);
1582 /* Restore the structure value address unless this is passed as an
1583 "invisible" first argument. */
1584 size = GET_MODE_SIZE (Pmode);
1585 if (struct_value)
1587 rtx value = gen_reg_rtx (Pmode);
1588 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1589 emit_move_insn (struct_value, value);
1590 if (REG_P (struct_value))
1591 use_reg (&call_fusage, struct_value);
1592 size += GET_MODE_SIZE (Pmode);
1595 /* All arguments and registers used for the call are set up by now! */
1596 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1598 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1599 and we don't want to load it into a register as an optimization,
1600 because prepare_call_address already did it if it should be done. */
1601 if (GET_CODE (function) != SYMBOL_REF)
1602 function = memory_address (FUNCTION_MODE, function);
1604 /* Generate the actual call instruction and save the return value. */
1605 #ifdef HAVE_untyped_call
1606 if (HAVE_untyped_call)
1607 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1608 result, result_vector (1, result)));
1609 else
1610 #endif
1611 #ifdef HAVE_call_value
1612 if (HAVE_call_value)
1614 rtx valreg = 0;
1616 /* Locate the unique return register. It is not possible to
1617 express a call that sets more than one return register using
1618 call_value; use untyped_call for that. In fact, untyped_call
1619 only needs to save the return registers in the given block. */
1620 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1621 if ((mode = apply_result_mode[regno]) != VOIDmode)
1623 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1625 valreg = gen_rtx_REG (mode, regno);
1628 emit_call_insn (GEN_CALL_VALUE (valreg,
1629 gen_rtx_MEM (FUNCTION_MODE, function),
1630 const0_rtx, NULL_RTX, const0_rtx));
1632 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1634 else
1635 #endif
1636 gcc_unreachable ();
1638 /* Find the CALL insn we just emitted, and attach the register usage
1639 information. */
1640 call_insn = last_call_insn ();
1641 add_function_usage_to (call_insn, call_fusage);
1643 /* Restore the stack. */
1644 #ifdef HAVE_save_stack_nonlocal
1645 if (HAVE_save_stack_nonlocal)
1646 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1647 else
1648 #endif
1649 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1651 OK_DEFER_POP;
1653 /* Return the address of the result block. */
1654 result = copy_addr_to_reg (XEXP (result, 0));
1655 return convert_memory_address (ptr_mode, result);
1658 /* Perform an untyped return. */
1660 static void
1661 expand_builtin_return (rtx result)
1663 int size, align, regno;
1664 enum machine_mode mode;
1665 rtx reg;
1666 rtx call_fusage = 0;
1668 result = convert_memory_address (Pmode, result);
1670 apply_result_size ();
1671 result = gen_rtx_MEM (BLKmode, result);
1673 #ifdef HAVE_untyped_return
1674 if (HAVE_untyped_return)
1676 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1677 emit_barrier ();
1678 return;
1680 #endif
1682 /* Restore the return value and note that each value is used. */
1683 size = 0;
1684 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1685 if ((mode = apply_result_mode[regno]) != VOIDmode)
1687 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1688 if (size % align != 0)
1689 size = CEIL (size, align) * align;
1690 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1691 emit_move_insn (reg, adjust_address (result, mode, size));
1693 push_to_sequence (call_fusage);
1694 emit_use (reg);
1695 call_fusage = get_insns ();
1696 end_sequence ();
1697 size += GET_MODE_SIZE (mode);
1700 /* Put the USE insns before the return. */
1701 emit_insn (call_fusage);
1703 /* Return whatever values was restored by jumping directly to the end
1704 of the function. */
1705 expand_naked_return ();
1708 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1710 static enum type_class
1711 type_to_class (tree type)
1713 switch (TREE_CODE (type))
1715 case VOID_TYPE: return void_type_class;
1716 case INTEGER_TYPE: return integer_type_class;
1717 case ENUMERAL_TYPE: return enumeral_type_class;
1718 case BOOLEAN_TYPE: return boolean_type_class;
1719 case POINTER_TYPE: return pointer_type_class;
1720 case REFERENCE_TYPE: return reference_type_class;
1721 case OFFSET_TYPE: return offset_type_class;
1722 case REAL_TYPE: return real_type_class;
1723 case COMPLEX_TYPE: return complex_type_class;
1724 case FUNCTION_TYPE: return function_type_class;
1725 case METHOD_TYPE: return method_type_class;
1726 case RECORD_TYPE: return record_type_class;
1727 case UNION_TYPE:
1728 case QUAL_UNION_TYPE: return union_type_class;
1729 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1730 ? string_type_class : array_type_class);
1731 case LANG_TYPE: return lang_type_class;
1732 default: return no_type_class;
1736 /* Expand a call EXP to __builtin_classify_type. */
1738 static rtx
1739 expand_builtin_classify_type (tree exp)
1741 if (call_expr_nargs (exp))
1742 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1743 return GEN_INT (no_type_class);
1746 /* This helper macro, meant to be used in mathfn_built_in below,
1747 determines which among a set of three builtin math functions is
1748 appropriate for a given type mode. The `F' and `L' cases are
1749 automatically generated from the `double' case. */
1750 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1751 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1752 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1753 fcodel = BUILT_IN_MATHFN##L ; break;
1754 /* Similar to above, but appends _R after any F/L suffix. */
1755 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1756 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1757 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1758 fcodel = BUILT_IN_MATHFN##L_R ; break;
1760 /* Return mathematic function equivalent to FN but operating directly
1761 on TYPE, if available. If IMPLICIT is true find the function in
1762 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1763 can't do the conversion, return zero. */
1765 static tree
1766 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1768 tree const *const fn_arr
1769 = implicit ? implicit_built_in_decls : built_in_decls;
1770 enum built_in_function fcode, fcodef, fcodel;
1772 switch (fn)
1774 CASE_MATHFN (BUILT_IN_ACOS)
1775 CASE_MATHFN (BUILT_IN_ACOSH)
1776 CASE_MATHFN (BUILT_IN_ASIN)
1777 CASE_MATHFN (BUILT_IN_ASINH)
1778 CASE_MATHFN (BUILT_IN_ATAN)
1779 CASE_MATHFN (BUILT_IN_ATAN2)
1780 CASE_MATHFN (BUILT_IN_ATANH)
1781 CASE_MATHFN (BUILT_IN_CBRT)
1782 CASE_MATHFN (BUILT_IN_CEIL)
1783 CASE_MATHFN (BUILT_IN_CEXPI)
1784 CASE_MATHFN (BUILT_IN_COPYSIGN)
1785 CASE_MATHFN (BUILT_IN_COS)
1786 CASE_MATHFN (BUILT_IN_COSH)
1787 CASE_MATHFN (BUILT_IN_DREM)
1788 CASE_MATHFN (BUILT_IN_ERF)
1789 CASE_MATHFN (BUILT_IN_ERFC)
1790 CASE_MATHFN (BUILT_IN_EXP)
1791 CASE_MATHFN (BUILT_IN_EXP10)
1792 CASE_MATHFN (BUILT_IN_EXP2)
1793 CASE_MATHFN (BUILT_IN_EXPM1)
1794 CASE_MATHFN (BUILT_IN_FABS)
1795 CASE_MATHFN (BUILT_IN_FDIM)
1796 CASE_MATHFN (BUILT_IN_FLOOR)
1797 CASE_MATHFN (BUILT_IN_FMA)
1798 CASE_MATHFN (BUILT_IN_FMAX)
1799 CASE_MATHFN (BUILT_IN_FMIN)
1800 CASE_MATHFN (BUILT_IN_FMOD)
1801 CASE_MATHFN (BUILT_IN_FREXP)
1802 CASE_MATHFN (BUILT_IN_GAMMA)
1803 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1804 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1805 CASE_MATHFN (BUILT_IN_HYPOT)
1806 CASE_MATHFN (BUILT_IN_ILOGB)
1807 CASE_MATHFN (BUILT_IN_INF)
1808 CASE_MATHFN (BUILT_IN_ISINF)
1809 CASE_MATHFN (BUILT_IN_J0)
1810 CASE_MATHFN (BUILT_IN_J1)
1811 CASE_MATHFN (BUILT_IN_JN)
1812 CASE_MATHFN (BUILT_IN_LCEIL)
1813 CASE_MATHFN (BUILT_IN_LDEXP)
1814 CASE_MATHFN (BUILT_IN_LFLOOR)
1815 CASE_MATHFN (BUILT_IN_LGAMMA)
1816 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1817 CASE_MATHFN (BUILT_IN_LLCEIL)
1818 CASE_MATHFN (BUILT_IN_LLFLOOR)
1819 CASE_MATHFN (BUILT_IN_LLRINT)
1820 CASE_MATHFN (BUILT_IN_LLROUND)
1821 CASE_MATHFN (BUILT_IN_LOG)
1822 CASE_MATHFN (BUILT_IN_LOG10)
1823 CASE_MATHFN (BUILT_IN_LOG1P)
1824 CASE_MATHFN (BUILT_IN_LOG2)
1825 CASE_MATHFN (BUILT_IN_LOGB)
1826 CASE_MATHFN (BUILT_IN_LRINT)
1827 CASE_MATHFN (BUILT_IN_LROUND)
1828 CASE_MATHFN (BUILT_IN_MODF)
1829 CASE_MATHFN (BUILT_IN_NAN)
1830 CASE_MATHFN (BUILT_IN_NANS)
1831 CASE_MATHFN (BUILT_IN_NEARBYINT)
1832 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1833 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1834 CASE_MATHFN (BUILT_IN_POW)
1835 CASE_MATHFN (BUILT_IN_POWI)
1836 CASE_MATHFN (BUILT_IN_POW10)
1837 CASE_MATHFN (BUILT_IN_REMAINDER)
1838 CASE_MATHFN (BUILT_IN_REMQUO)
1839 CASE_MATHFN (BUILT_IN_RINT)
1840 CASE_MATHFN (BUILT_IN_ROUND)
1841 CASE_MATHFN (BUILT_IN_SCALB)
1842 CASE_MATHFN (BUILT_IN_SCALBLN)
1843 CASE_MATHFN (BUILT_IN_SCALBN)
1844 CASE_MATHFN (BUILT_IN_SIGNBIT)
1845 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1846 CASE_MATHFN (BUILT_IN_SIN)
1847 CASE_MATHFN (BUILT_IN_SINCOS)
1848 CASE_MATHFN (BUILT_IN_SINH)
1849 CASE_MATHFN (BUILT_IN_SQRT)
1850 CASE_MATHFN (BUILT_IN_TAN)
1851 CASE_MATHFN (BUILT_IN_TANH)
1852 CASE_MATHFN (BUILT_IN_TGAMMA)
1853 CASE_MATHFN (BUILT_IN_TRUNC)
1854 CASE_MATHFN (BUILT_IN_Y0)
1855 CASE_MATHFN (BUILT_IN_Y1)
1856 CASE_MATHFN (BUILT_IN_YN)
1858 default:
1859 return NULL_TREE;
1862 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1863 return fn_arr[fcode];
1864 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1865 return fn_arr[fcodef];
1866 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1867 return fn_arr[fcodel];
1868 else
1869 return NULL_TREE;
1872 /* Like mathfn_built_in_1(), but always use the implicit array. */
1874 tree
1875 mathfn_built_in (tree type, enum built_in_function fn)
1877 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1880 /* If errno must be maintained, expand the RTL to check if the result,
1881 TARGET, of a built-in function call, EXP, is NaN, and if so set
1882 errno to EDOM. */
1884 static void
1885 expand_errno_check (tree exp, rtx target)
1887 rtx lab = gen_label_rtx ();
1889 /* Test the result; if it is NaN, set errno=EDOM because
1890 the argument was not in the domain. */
1891 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1892 NULL_RTX, NULL_RTX, lab);
1894 #ifdef TARGET_EDOM
1895 /* If this built-in doesn't throw an exception, set errno directly. */
1896 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1898 #ifdef GEN_ERRNO_RTX
1899 rtx errno_rtx = GEN_ERRNO_RTX;
1900 #else
1901 rtx errno_rtx
1902 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1903 #endif
1904 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1905 emit_label (lab);
1906 return;
1908 #endif
1910 /* Make sure the library call isn't expanded as a tail call. */
1911 CALL_EXPR_TAILCALL (exp) = 0;
1913 /* We can't set errno=EDOM directly; let the library call do it.
1914 Pop the arguments right away in case the call gets deleted. */
1915 NO_DEFER_POP;
1916 expand_call (exp, target, 0);
1917 OK_DEFER_POP;
1918 emit_label (lab);
1921 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1922 Return NULL_RTX if a normal call should be emitted rather than expanding
1923 the function in-line. EXP is the expression that is a call to the builtin
1924 function; if convenient, the result should be placed in TARGET.
1925 SUBTARGET may be used as the target for computing one of EXP's operands. */
1927 static rtx
1928 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1930 optab builtin_optab;
1931 rtx op0, insns, before_call;
1932 tree fndecl = get_callee_fndecl (exp);
1933 enum machine_mode mode;
1934 bool errno_set = false;
1935 tree arg;
1937 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1938 return NULL_RTX;
1940 arg = CALL_EXPR_ARG (exp, 0);
1942 switch (DECL_FUNCTION_CODE (fndecl))
1944 CASE_FLT_FN (BUILT_IN_SQRT):
1945 errno_set = ! tree_expr_nonnegative_p (arg);
1946 builtin_optab = sqrt_optab;
1947 break;
1948 CASE_FLT_FN (BUILT_IN_EXP):
1949 errno_set = true; builtin_optab = exp_optab; break;
1950 CASE_FLT_FN (BUILT_IN_EXP10):
1951 CASE_FLT_FN (BUILT_IN_POW10):
1952 errno_set = true; builtin_optab = exp10_optab; break;
1953 CASE_FLT_FN (BUILT_IN_EXP2):
1954 errno_set = true; builtin_optab = exp2_optab; break;
1955 CASE_FLT_FN (BUILT_IN_EXPM1):
1956 errno_set = true; builtin_optab = expm1_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOGB):
1958 errno_set = true; builtin_optab = logb_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG):
1960 errno_set = true; builtin_optab = log_optab; break;
1961 CASE_FLT_FN (BUILT_IN_LOG10):
1962 errno_set = true; builtin_optab = log10_optab; break;
1963 CASE_FLT_FN (BUILT_IN_LOG2):
1964 errno_set = true; builtin_optab = log2_optab; break;
1965 CASE_FLT_FN (BUILT_IN_LOG1P):
1966 errno_set = true; builtin_optab = log1p_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ASIN):
1968 builtin_optab = asin_optab; break;
1969 CASE_FLT_FN (BUILT_IN_ACOS):
1970 builtin_optab = acos_optab; break;
1971 CASE_FLT_FN (BUILT_IN_TAN):
1972 builtin_optab = tan_optab; break;
1973 CASE_FLT_FN (BUILT_IN_ATAN):
1974 builtin_optab = atan_optab; break;
1975 CASE_FLT_FN (BUILT_IN_FLOOR):
1976 builtin_optab = floor_optab; break;
1977 CASE_FLT_FN (BUILT_IN_CEIL):
1978 builtin_optab = ceil_optab; break;
1979 CASE_FLT_FN (BUILT_IN_TRUNC):
1980 builtin_optab = btrunc_optab; break;
1981 CASE_FLT_FN (BUILT_IN_ROUND):
1982 builtin_optab = round_optab; break;
1983 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1984 builtin_optab = nearbyint_optab;
1985 if (flag_trapping_math)
1986 break;
1987 /* Else fallthrough and expand as rint. */
1988 CASE_FLT_FN (BUILT_IN_RINT):
1989 builtin_optab = rint_optab; break;
1990 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1991 builtin_optab = significand_optab; break;
1992 default:
1993 gcc_unreachable ();
1996 /* Make a suitable register to place result in. */
1997 mode = TYPE_MODE (TREE_TYPE (exp));
1999 if (! flag_errno_math || ! HONOR_NANS (mode))
2000 errno_set = false;
2002 /* Before working hard, check whether the instruction is available. */
2003 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2005 target = gen_reg_rtx (mode);
2007 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2008 need to expand the argument again. This way, we will not perform
2009 side-effects more the once. */
2010 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2012 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2014 start_sequence ();
2016 /* Compute into TARGET.
2017 Set TARGET to wherever the result comes back. */
2018 target = expand_unop (mode, builtin_optab, op0, target, 0);
2020 if (target != 0)
2022 if (errno_set)
2023 expand_errno_check (exp, target);
2025 /* Output the entire sequence. */
2026 insns = get_insns ();
2027 end_sequence ();
2028 emit_insn (insns);
2029 return target;
2032 /* If we were unable to expand via the builtin, stop the sequence
2033 (without outputting the insns) and call to the library function
2034 with the stabilized argument list. */
2035 end_sequence ();
2038 before_call = get_last_insn ();
2040 return expand_call (exp, target, target == const0_rtx);
2043 /* Expand a call to the builtin binary math functions (pow and atan2).
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2048 operands. */
2050 static rtx
2051 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2053 optab builtin_optab;
2054 rtx op0, op1, insns;
2055 int op1_type = REAL_TYPE;
2056 tree fndecl = get_callee_fndecl (exp);
2057 tree arg0, arg1;
2058 enum machine_mode mode;
2059 bool errno_set = true;
2061 switch (DECL_FUNCTION_CODE (fndecl))
2063 CASE_FLT_FN (BUILT_IN_SCALBN):
2064 CASE_FLT_FN (BUILT_IN_SCALBLN):
2065 CASE_FLT_FN (BUILT_IN_LDEXP):
2066 op1_type = INTEGER_TYPE;
2067 default:
2068 break;
2071 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2072 return NULL_RTX;
2074 arg0 = CALL_EXPR_ARG (exp, 0);
2075 arg1 = CALL_EXPR_ARG (exp, 1);
2077 switch (DECL_FUNCTION_CODE (fndecl))
2079 CASE_FLT_FN (BUILT_IN_POW):
2080 builtin_optab = pow_optab; break;
2081 CASE_FLT_FN (BUILT_IN_ATAN2):
2082 builtin_optab = atan2_optab; break;
2083 CASE_FLT_FN (BUILT_IN_SCALB):
2084 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2085 return 0;
2086 builtin_optab = scalb_optab; break;
2087 CASE_FLT_FN (BUILT_IN_SCALBN):
2088 CASE_FLT_FN (BUILT_IN_SCALBLN):
2089 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2090 return 0;
2091 /* Fall through... */
2092 CASE_FLT_FN (BUILT_IN_LDEXP):
2093 builtin_optab = ldexp_optab; break;
2094 CASE_FLT_FN (BUILT_IN_FMOD):
2095 builtin_optab = fmod_optab; break;
2096 CASE_FLT_FN (BUILT_IN_REMAINDER):
2097 CASE_FLT_FN (BUILT_IN_DREM):
2098 builtin_optab = remainder_optab; break;
2099 default:
2100 gcc_unreachable ();
2103 /* Make a suitable register to place result in. */
2104 mode = TYPE_MODE (TREE_TYPE (exp));
2106 /* Before working hard, check whether the instruction is available. */
2107 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2108 return NULL_RTX;
2110 target = gen_reg_rtx (mode);
2112 if (! flag_errno_math || ! HONOR_NANS (mode))
2113 errno_set = false;
2115 /* Always stabilize the argument list. */
2116 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2117 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2119 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2120 op1 = expand_normal (arg1);
2122 start_sequence ();
2124 /* Compute into TARGET.
2125 Set TARGET to wherever the result comes back. */
2126 target = expand_binop (mode, builtin_optab, op0, op1,
2127 target, 0, OPTAB_DIRECT);
2129 /* If we were unable to expand via the builtin, stop the sequence
2130 (without outputting the insns) and call to the library function
2131 with the stabilized argument list. */
2132 if (target == 0)
2134 end_sequence ();
2135 return expand_call (exp, target, target == const0_rtx);
2138 if (errno_set)
2139 expand_errno_check (exp, target);
2141 /* Output the entire sequence. */
2142 insns = get_insns ();
2143 end_sequence ();
2144 emit_insn (insns);
2146 return target;
2149 /* Expand a call to the builtin sin and cos math functions.
2150 Return NULL_RTX if a normal call should be emitted rather than expanding the
2151 function in-line. EXP is the expression that is a call to the builtin
2152 function; if convenient, the result should be placed in TARGET.
2153 SUBTARGET may be used as the target for computing one of EXP's
2154 operands. */
2156 static rtx
2157 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2159 optab builtin_optab;
2160 rtx op0, insns;
2161 tree fndecl = get_callee_fndecl (exp);
2162 enum machine_mode mode;
2163 tree arg;
2165 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2166 return NULL_RTX;
2168 arg = CALL_EXPR_ARG (exp, 0);
2170 switch (DECL_FUNCTION_CODE (fndecl))
2172 CASE_FLT_FN (BUILT_IN_SIN):
2173 CASE_FLT_FN (BUILT_IN_COS):
2174 builtin_optab = sincos_optab; break;
2175 default:
2176 gcc_unreachable ();
2179 /* Make a suitable register to place result in. */
2180 mode = TYPE_MODE (TREE_TYPE (exp));
2182 /* Check if sincos insn is available, otherwise fallback
2183 to sin or cos insn. */
2184 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2185 switch (DECL_FUNCTION_CODE (fndecl))
2187 CASE_FLT_FN (BUILT_IN_SIN):
2188 builtin_optab = sin_optab; break;
2189 CASE_FLT_FN (BUILT_IN_COS):
2190 builtin_optab = cos_optab; break;
2191 default:
2192 gcc_unreachable ();
2195 /* Before working hard, check whether the instruction is available. */
2196 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2198 target = gen_reg_rtx (mode);
2200 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2201 need to expand the argument again. This way, we will not perform
2202 side-effects more the once. */
2203 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2205 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2207 start_sequence ();
2209 /* Compute into TARGET.
2210 Set TARGET to wherever the result comes back. */
2211 if (builtin_optab == sincos_optab)
2213 int result;
2215 switch (DECL_FUNCTION_CODE (fndecl))
2217 CASE_FLT_FN (BUILT_IN_SIN):
2218 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2219 break;
2220 CASE_FLT_FN (BUILT_IN_COS):
2221 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2222 break;
2223 default:
2224 gcc_unreachable ();
2226 gcc_assert (result);
2228 else
2230 target = expand_unop (mode, builtin_optab, op0, target, 0);
2233 if (target != 0)
2235 /* Output the entire sequence. */
2236 insns = get_insns ();
2237 end_sequence ();
2238 emit_insn (insns);
2239 return target;
2242 /* If we were unable to expand via the builtin, stop the sequence
2243 (without outputting the insns) and call to the library function
2244 with the stabilized argument list. */
2245 end_sequence ();
2248 target = expand_call (exp, target, target == const0_rtx);
2250 return target;
2253 /* Expand a call to one of the builtin math functions that operate on
2254 floating point argument and output an integer result (ilogb, isinf,
2255 isnan, etc).
2256 Return 0 if a normal call should be emitted rather than expanding the
2257 function in-line. EXP is the expression that is a call to the builtin
2258 function; if convenient, the result should be placed in TARGET.
2259 SUBTARGET may be used as the target for computing one of EXP's operands. */
2261 static rtx
2262 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2264 optab builtin_optab = 0;
2265 enum insn_code icode = CODE_FOR_nothing;
2266 rtx op0;
2267 tree fndecl = get_callee_fndecl (exp);
2268 enum machine_mode mode;
2269 bool errno_set = false;
2270 tree arg;
2271 location_t loc = EXPR_LOCATION (exp);
2273 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2274 return NULL_RTX;
2276 arg = CALL_EXPR_ARG (exp, 0);
2278 switch (DECL_FUNCTION_CODE (fndecl))
2280 CASE_FLT_FN (BUILT_IN_ILOGB):
2281 errno_set = true; builtin_optab = ilogb_optab; break;
2282 CASE_FLT_FN (BUILT_IN_ISINF):
2283 builtin_optab = isinf_optab; break;
2284 case BUILT_IN_ISNORMAL:
2285 case BUILT_IN_ISFINITE:
2286 CASE_FLT_FN (BUILT_IN_FINITE):
2287 /* These builtins have no optabs (yet). */
2288 break;
2289 default:
2290 gcc_unreachable ();
2293 /* There's no easy way to detect the case we need to set EDOM. */
2294 if (flag_errno_math && errno_set)
2295 return NULL_RTX;
2297 /* Optab mode depends on the mode of the input argument. */
2298 mode = TYPE_MODE (TREE_TYPE (arg));
2300 if (builtin_optab)
2301 icode = optab_handler (builtin_optab, mode)->insn_code;
2303 /* Before working hard, check whether the instruction is available. */
2304 if (icode != CODE_FOR_nothing)
2306 /* Make a suitable register to place result in. */
2307 if (!target
2308 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2309 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2311 gcc_assert (insn_data[icode].operand[0].predicate
2312 (target, GET_MODE (target)));
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321 if (mode != GET_MODE (op0))
2322 op0 = convert_to_mode (mode, op0, 0);
2324 /* Compute into TARGET.
2325 Set TARGET to wherever the result comes back. */
2326 emit_unop_insn (icode, target, op0, UNKNOWN);
2327 return target;
2330 /* If there is no optab, try generic code. */
2331 switch (DECL_FUNCTION_CODE (fndecl))
2333 tree result;
2335 CASE_FLT_FN (BUILT_IN_ISINF):
2337 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2338 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2339 tree const type = TREE_TYPE (arg);
2340 REAL_VALUE_TYPE r;
2341 char buf[128];
2343 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2344 real_from_string (&r, buf);
2345 result = build_call_expr (isgr_fn, 2,
2346 fold_build1_loc (loc, ABS_EXPR, type, arg),
2347 build_real (type, r));
2348 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2350 CASE_FLT_FN (BUILT_IN_FINITE):
2351 case BUILT_IN_ISFINITE:
2353 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2354 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2355 tree const type = TREE_TYPE (arg);
2356 REAL_VALUE_TYPE r;
2357 char buf[128];
2359 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2360 real_from_string (&r, buf);
2361 result = build_call_expr (isle_fn, 2,
2362 fold_build1_loc (loc, ABS_EXPR, type, arg),
2363 build_real (type, r));
2364 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2366 case BUILT_IN_ISNORMAL:
2368 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2369 islessequal(fabs(x),DBL_MAX). */
2370 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2371 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2372 tree const type = TREE_TYPE (arg);
2373 REAL_VALUE_TYPE rmax, rmin;
2374 char buf[128];
2376 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2377 real_from_string (&rmax, buf);
2378 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2379 real_from_string (&rmin, buf);
2380 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
2381 result = build_call_expr (isle_fn, 2, arg,
2382 build_real (type, rmax));
2383 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2384 build_call_expr (isge_fn, 2, arg,
2385 build_real (type, rmin)));
2386 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2388 default:
2389 break;
2392 target = expand_call (exp, target, target == const0_rtx);
2394 return target;
2397 /* Expand a call to the builtin sincos math function.
2398 Return NULL_RTX if a normal call should be emitted rather than expanding the
2399 function in-line. EXP is the expression that is a call to the builtin
2400 function. */
2402 static rtx
2403 expand_builtin_sincos (tree exp)
2405 rtx op0, op1, op2, target1, target2;
2406 enum machine_mode mode;
2407 tree arg, sinp, cosp;
2408 int result;
2409 location_t loc = EXPR_LOCATION (exp);
2411 if (!validate_arglist (exp, REAL_TYPE,
2412 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2413 return NULL_RTX;
2415 arg = CALL_EXPR_ARG (exp, 0);
2416 sinp = CALL_EXPR_ARG (exp, 1);
2417 cosp = CALL_EXPR_ARG (exp, 2);
2419 /* Make a suitable register to place result in. */
2420 mode = TYPE_MODE (TREE_TYPE (arg));
2422 /* Check if sincos insn is available, otherwise emit the call. */
2423 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2424 return NULL_RTX;
2426 target1 = gen_reg_rtx (mode);
2427 target2 = gen_reg_rtx (mode);
2429 op0 = expand_normal (arg);
2430 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2431 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2433 /* Compute into target1 and target2.
2434 Set TARGET to wherever the result comes back. */
2435 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2436 gcc_assert (result);
2438 /* Move target1 and target2 to the memory locations indicated
2439 by op1 and op2. */
2440 emit_move_insn (op1, target1);
2441 emit_move_insn (op2, target2);
2443 return const0_rtx;
2446 /* Expand a call to the internal cexpi builtin to the sincos math function.
2447 EXP is the expression that is a call to the builtin function; if convenient,
2448 the result should be placed in TARGET. SUBTARGET may be used as the target
2449 for computing one of EXP's operands. */
2451 static rtx
2452 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2454 tree fndecl = get_callee_fndecl (exp);
2455 tree arg, type;
2456 enum machine_mode mode;
2457 rtx op0, op1, op2;
2458 location_t loc = EXPR_LOCATION (exp);
2460 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2461 return NULL_RTX;
2463 arg = CALL_EXPR_ARG (exp, 0);
2464 type = TREE_TYPE (arg);
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2467 /* Try expanding via a sincos optab, fall back to emitting a libcall
2468 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2469 is only generated from sincos, cexp or if we have either of them. */
2470 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2472 op1 = gen_reg_rtx (mode);
2473 op2 = gen_reg_rtx (mode);
2475 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2477 /* Compute into op1 and op2. */
2478 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2480 else if (TARGET_HAS_SINCOS)
2482 tree call, fn = NULL_TREE;
2483 tree top1, top2;
2484 rtx op1a, op2a;
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = built_in_decls[BUILT_IN_SINCOSF];
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = built_in_decls[BUILT_IN_SINCOS];
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = built_in_decls[BUILT_IN_SINCOSL];
2492 else
2493 gcc_unreachable ();
2495 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2496 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2497 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2498 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2499 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2500 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2502 /* Make sure not to fold the sincos call again. */
2503 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2504 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2505 call, 3, arg, top1, top2));
2507 else
2509 tree call, fn = NULL_TREE, narg;
2510 tree ctype = build_complex_type (type);
2512 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2513 fn = built_in_decls[BUILT_IN_CEXPF];
2514 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2515 fn = built_in_decls[BUILT_IN_CEXP];
2516 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2517 fn = built_in_decls[BUILT_IN_CEXPL];
2518 else
2519 gcc_unreachable ();
2521 /* If we don't have a decl for cexp create one. This is the
2522 friendliest fallback if the user calls __builtin_cexpi
2523 without full target C99 function support. */
2524 if (fn == NULL_TREE)
2526 tree fntype;
2527 const char *name = NULL;
2529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2530 name = "cexpf";
2531 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2532 name = "cexp";
2533 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2534 name = "cexpl";
2536 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2537 fn = build_fn_decl (name, fntype);
2540 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2541 build_real (type, dconst0), arg);
2543 /* Make sure not to fold the cexp call again. */
2544 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2545 return expand_expr (build_call_nary (ctype, call, 1, narg),
2546 target, VOIDmode, EXPAND_NORMAL);
2549 /* Now build the proper return type. */
2550 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2551 make_tree (TREE_TYPE (arg), op2),
2552 make_tree (TREE_TYPE (arg), op1)),
2553 target, VOIDmode, EXPAND_NORMAL);
2556 /* Expand a call to one of the builtin rounding functions gcc defines
2557 as an extension (lfloor and lceil). As these are gcc extensions we
2558 do not need to worry about setting errno to EDOM.
2559 If expanding via optab fails, lower expression to (int)(floor(x)).
2560 EXP is the expression that is a call to the builtin function;
2561 if convenient, the result should be placed in TARGET. */
2563 static rtx
2564 expand_builtin_int_roundingfn (tree exp, rtx target)
2566 convert_optab builtin_optab;
2567 rtx op0, insns, tmp;
2568 tree fndecl = get_callee_fndecl (exp);
2569 enum built_in_function fallback_fn;
2570 tree fallback_fndecl;
2571 enum machine_mode mode;
2572 tree arg;
2574 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2575 gcc_unreachable ();
2577 arg = CALL_EXPR_ARG (exp, 0);
2579 switch (DECL_FUNCTION_CODE (fndecl))
2581 CASE_FLT_FN (BUILT_IN_LCEIL):
2582 CASE_FLT_FN (BUILT_IN_LLCEIL):
2583 builtin_optab = lceil_optab;
2584 fallback_fn = BUILT_IN_CEIL;
2585 break;
2587 CASE_FLT_FN (BUILT_IN_LFLOOR):
2588 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2589 builtin_optab = lfloor_optab;
2590 fallback_fn = BUILT_IN_FLOOR;
2591 break;
2593 default:
2594 gcc_unreachable ();
2597 /* Make a suitable register to place result in. */
2598 mode = TYPE_MODE (TREE_TYPE (exp));
2600 target = gen_reg_rtx (mode);
2602 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2603 need to expand the argument again. This way, we will not perform
2604 side-effects more the once. */
2605 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2607 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2609 start_sequence ();
2611 /* Compute into TARGET. */
2612 if (expand_sfix_optab (target, op0, builtin_optab))
2614 /* Output the entire sequence. */
2615 insns = get_insns ();
2616 end_sequence ();
2617 emit_insn (insns);
2618 return target;
2621 /* If we were unable to expand via the builtin, stop the sequence
2622 (without outputting the insns). */
2623 end_sequence ();
2625 /* Fall back to floating point rounding optab. */
2626 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2628 /* For non-C99 targets we may end up without a fallback fndecl here
2629 if the user called __builtin_lfloor directly. In this case emit
2630 a call to the floor/ceil variants nevertheless. This should result
2631 in the best user experience for not full C99 targets. */
2632 if (fallback_fndecl == NULL_TREE)
2634 tree fntype;
2635 const char *name = NULL;
2637 switch (DECL_FUNCTION_CODE (fndecl))
2639 case BUILT_IN_LCEIL:
2640 case BUILT_IN_LLCEIL:
2641 name = "ceil";
2642 break;
2643 case BUILT_IN_LCEILF:
2644 case BUILT_IN_LLCEILF:
2645 name = "ceilf";
2646 break;
2647 case BUILT_IN_LCEILL:
2648 case BUILT_IN_LLCEILL:
2649 name = "ceill";
2650 break;
2651 case BUILT_IN_LFLOOR:
2652 case BUILT_IN_LLFLOOR:
2653 name = "floor";
2654 break;
2655 case BUILT_IN_LFLOORF:
2656 case BUILT_IN_LLFLOORF:
2657 name = "floorf";
2658 break;
2659 case BUILT_IN_LFLOORL:
2660 case BUILT_IN_LLFLOORL:
2661 name = "floorl";
2662 break;
2663 default:
2664 gcc_unreachable ();
2667 fntype = build_function_type_list (TREE_TYPE (arg),
2668 TREE_TYPE (arg), NULL_TREE);
2669 fallback_fndecl = build_fn_decl (name, fntype);
2672 exp = build_call_expr (fallback_fndecl, 1, arg);
2674 tmp = expand_normal (exp);
2676 /* Truncate the result of floating point optab to integer
2677 via expand_fix (). */
2678 target = gen_reg_rtx (mode);
2679 expand_fix (target, tmp, 0);
2681 return target;
2684 /* Expand a call to one of the builtin math functions doing integer
2685 conversion (lrint).
2686 Return 0 if a normal call should be emitted rather than expanding the
2687 function in-line. EXP is the expression that is a call to the builtin
2688 function; if convenient, the result should be placed in TARGET. */
2690 static rtx
2691 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2693 convert_optab builtin_optab;
2694 rtx op0, insns;
2695 tree fndecl = get_callee_fndecl (exp);
2696 tree arg;
2697 enum machine_mode mode;
2699 /* There's no easy way to detect the case we need to set EDOM. */
2700 if (flag_errno_math)
2701 return NULL_RTX;
2703 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2704 gcc_unreachable ();
2706 arg = CALL_EXPR_ARG (exp, 0);
2708 switch (DECL_FUNCTION_CODE (fndecl))
2710 CASE_FLT_FN (BUILT_IN_LRINT):
2711 CASE_FLT_FN (BUILT_IN_LLRINT):
2712 builtin_optab = lrint_optab; break;
2713 CASE_FLT_FN (BUILT_IN_LROUND):
2714 CASE_FLT_FN (BUILT_IN_LLROUND):
2715 builtin_optab = lround_optab; break;
2716 default:
2717 gcc_unreachable ();
2720 /* Make a suitable register to place result in. */
2721 mode = TYPE_MODE (TREE_TYPE (exp));
2723 target = gen_reg_rtx (mode);
2725 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2726 need to expand the argument again. This way, we will not perform
2727 side-effects more the once. */
2728 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2730 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2732 start_sequence ();
2734 if (expand_sfix_optab (target, op0, builtin_optab))
2736 /* Output the entire sequence. */
2737 insns = get_insns ();
2738 end_sequence ();
2739 emit_insn (insns);
2740 return target;
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns) and call to the library function
2745 with the stabilized argument list. */
2746 end_sequence ();
2748 target = expand_call (exp, target, target == const0_rtx);
2750 return target;
2753 /* To evaluate powi(x,n), the floating point value x raised to the
2754 constant integer exponent n, we use a hybrid algorithm that
2755 combines the "window method" with look-up tables. For an
2756 introduction to exponentiation algorithms and "addition chains",
2757 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2758 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2759 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2760 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2762 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2763 multiplications to inline before calling the system library's pow
2764 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2765 so this default never requires calling pow, powf or powl. */
2767 #ifndef POWI_MAX_MULTS
2768 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2769 #endif
2771 /* The size of the "optimal power tree" lookup table. All
2772 exponents less than this value are simply looked up in the
2773 powi_table below. This threshold is also used to size the
2774 cache of pseudo registers that hold intermediate results. */
2775 #define POWI_TABLE_SIZE 256
2777 /* The size, in bits of the window, used in the "window method"
2778 exponentiation algorithm. This is equivalent to a radix of
2779 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2780 #define POWI_WINDOW_SIZE 3
2782 /* The following table is an efficient representation of an
2783 "optimal power tree". For each value, i, the corresponding
2784 value, j, in the table states than an optimal evaluation
2785 sequence for calculating pow(x,i) can be found by evaluating
2786 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2787 100 integers is given in Knuth's "Seminumerical algorithms". */
2789 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2791 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2792 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2793 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2794 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2795 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2796 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2797 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2798 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2799 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2800 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2801 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2802 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2803 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2804 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2805 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2806 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2807 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2808 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2809 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2810 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2811 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2812 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2813 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2814 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2815 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2816 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2817 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2818 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2819 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2820 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2821 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2822 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2826 /* Return the number of multiplications required to calculate
2827 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2828 subroutine of powi_cost. CACHE is an array indicating
2829 which exponents have already been calculated. */
2831 static int
2832 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2834 /* If we've already calculated this exponent, then this evaluation
2835 doesn't require any additional multiplications. */
2836 if (cache[n])
2837 return 0;
2839 cache[n] = true;
2840 return powi_lookup_cost (n - powi_table[n], cache)
2841 + powi_lookup_cost (powi_table[n], cache) + 1;
2844 /* Return the number of multiplications required to calculate
2845 powi(x,n) for an arbitrary x, given the exponent N. This
2846 function needs to be kept in sync with expand_powi below. */
2848 static int
2849 powi_cost (HOST_WIDE_INT n)
2851 bool cache[POWI_TABLE_SIZE];
2852 unsigned HOST_WIDE_INT digit;
2853 unsigned HOST_WIDE_INT val;
2854 int result;
2856 if (n == 0)
2857 return 0;
2859 /* Ignore the reciprocal when calculating the cost. */
2860 val = (n < 0) ? -n : n;
2862 /* Initialize the exponent cache. */
2863 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2864 cache[1] = true;
2866 result = 0;
2868 while (val >= POWI_TABLE_SIZE)
2870 if (val & 1)
2872 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2873 result += powi_lookup_cost (digit, cache)
2874 + POWI_WINDOW_SIZE + 1;
2875 val >>= POWI_WINDOW_SIZE;
2877 else
2879 val >>= 1;
2880 result++;
2884 return result + powi_lookup_cost (val, cache);
2887 /* Recursive subroutine of expand_powi. This function takes the array,
2888 CACHE, of already calculated exponents and an exponent N and returns
2889 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2891 static rtx
2892 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2894 unsigned HOST_WIDE_INT digit;
2895 rtx target, result;
2896 rtx op0, op1;
2898 if (n < POWI_TABLE_SIZE)
2900 if (cache[n])
2901 return cache[n];
2903 target = gen_reg_rtx (mode);
2904 cache[n] = target;
2906 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2907 op1 = expand_powi_1 (mode, powi_table[n], cache);
2909 else if (n & 1)
2911 target = gen_reg_rtx (mode);
2912 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2913 op0 = expand_powi_1 (mode, n - digit, cache);
2914 op1 = expand_powi_1 (mode, digit, cache);
2916 else
2918 target = gen_reg_rtx (mode);
2919 op0 = expand_powi_1 (mode, n >> 1, cache);
2920 op1 = op0;
2923 result = expand_mult (mode, op0, op1, target, 0);
2924 if (result != target)
2925 emit_move_insn (target, result);
2926 return target;
2929 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2930 floating point operand in mode MODE, and N is the exponent. This
2931 function needs to be kept in sync with powi_cost above. */
2933 static rtx
2934 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2936 unsigned HOST_WIDE_INT val;
2937 rtx cache[POWI_TABLE_SIZE];
2938 rtx result;
2940 if (n == 0)
2941 return CONST1_RTX (mode);
2943 val = (n < 0) ? -n : n;
2945 memset (cache, 0, sizeof (cache));
2946 cache[1] = x;
2948 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2950 /* If the original exponent was negative, reciprocate the result. */
2951 if (n < 0)
2952 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2953 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2955 return result;
2958 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2959 a normal call should be emitted rather than expanding the function
2960 in-line. EXP is the expression that is a call to the builtin
2961 function; if convenient, the result should be placed in TARGET. */
2963 static rtx
2964 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2966 tree arg0, arg1;
2967 tree fn, narg0;
2968 tree type = TREE_TYPE (exp);
2969 REAL_VALUE_TYPE cint, c, c2;
2970 HOST_WIDE_INT n;
2971 rtx op, op2;
2972 enum machine_mode mode = TYPE_MODE (type);
2974 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2975 return NULL_RTX;
2977 arg0 = CALL_EXPR_ARG (exp, 0);
2978 arg1 = CALL_EXPR_ARG (exp, 1);
2980 if (TREE_CODE (arg1) != REAL_CST
2981 || TREE_OVERFLOW (arg1))
2982 return expand_builtin_mathfn_2 (exp, target, subtarget);
2984 /* Handle constant exponents. */
2986 /* For integer valued exponents we can expand to an optimal multiplication
2987 sequence using expand_powi. */
2988 c = TREE_REAL_CST (arg1);
2989 n = real_to_integer (&c);
2990 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2991 if (real_identical (&c, &cint)
2992 && ((n >= -1 && n <= 2)
2993 || (flag_unsafe_math_optimizations
2994 && optimize_insn_for_speed_p ()
2995 && powi_cost (n) <= POWI_MAX_MULTS)))
2997 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2998 if (n != 1)
3000 op = force_reg (mode, op);
3001 op = expand_powi (op, mode, n);
3003 return op;
3006 narg0 = builtin_save_expr (arg0);
3008 /* If the exponent is not integer valued, check if it is half of an integer.
3009 In this case we can expand to sqrt (x) * x**(n/2). */
3010 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3011 if (fn != NULL_TREE)
3013 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3014 n = real_to_integer (&c2);
3015 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3016 if (real_identical (&c2, &cint)
3017 && ((flag_unsafe_math_optimizations
3018 && optimize_insn_for_speed_p ()
3019 && powi_cost (n/2) <= POWI_MAX_MULTS)
3020 || n == 1))
3022 tree call_expr = build_call_expr (fn, 1, narg0);
3023 /* Use expand_expr in case the newly built call expression
3024 was folded to a non-call. */
3025 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3026 if (n != 1)
3028 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3029 op2 = force_reg (mode, op2);
3030 op2 = expand_powi (op2, mode, abs (n / 2));
3031 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3032 0, OPTAB_LIB_WIDEN);
3033 /* If the original exponent was negative, reciprocate the
3034 result. */
3035 if (n < 0)
3036 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3037 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3039 return op;
3043 /* Try if the exponent is a third of an integer. In this case
3044 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3045 different from pow (x, 1./3.) due to rounding and behavior
3046 with negative x we need to constrain this transformation to
3047 unsafe math and positive x or finite math. */
3048 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3049 if (fn != NULL_TREE
3050 && flag_unsafe_math_optimizations
3051 && (tree_expr_nonnegative_p (arg0)
3052 || !HONOR_NANS (mode)))
3054 REAL_VALUE_TYPE dconst3;
3055 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3056 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3057 real_round (&c2, mode, &c2);
3058 n = real_to_integer (&c2);
3059 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3060 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3061 real_convert (&c2, mode, &c2);
3062 if (real_identical (&c2, &c)
3063 && ((optimize_insn_for_speed_p ()
3064 && powi_cost (n/3) <= POWI_MAX_MULTS)
3065 || n == 1))
3067 tree call_expr = build_call_expr (fn, 1,narg0);
3068 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3069 if (abs (n) % 3 == 2)
3070 op = expand_simple_binop (mode, MULT, op, op, op,
3071 0, OPTAB_LIB_WIDEN);
3072 if (n != 1)
3074 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3075 op2 = force_reg (mode, op2);
3076 op2 = expand_powi (op2, mode, abs (n / 3));
3077 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3078 0, OPTAB_LIB_WIDEN);
3079 /* If the original exponent was negative, reciprocate the
3080 result. */
3081 if (n < 0)
3082 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3083 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3085 return op;
3089 /* Fall back to optab expansion. */
3090 return expand_builtin_mathfn_2 (exp, target, subtarget);
3093 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3094 a normal call should be emitted rather than expanding the function
3095 in-line. EXP is the expression that is a call to the builtin
3096 function; if convenient, the result should be placed in TARGET. */
3098 static rtx
3099 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3101 tree arg0, arg1;
3102 rtx op0, op1;
3103 enum machine_mode mode;
3104 enum machine_mode mode2;
3106 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3107 return NULL_RTX;
3109 arg0 = CALL_EXPR_ARG (exp, 0);
3110 arg1 = CALL_EXPR_ARG (exp, 1);
3111 mode = TYPE_MODE (TREE_TYPE (exp));
3113 /* Handle constant power. */
3115 if (TREE_CODE (arg1) == INTEGER_CST
3116 && !TREE_OVERFLOW (arg1))
3118 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3120 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3121 Otherwise, check the number of multiplications required. */
3122 if ((TREE_INT_CST_HIGH (arg1) == 0
3123 || TREE_INT_CST_HIGH (arg1) == -1)
3124 && ((n >= -1 && n <= 2)
3125 || (optimize_insn_for_speed_p ()
3126 && powi_cost (n) <= POWI_MAX_MULTS)))
3128 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3129 op0 = force_reg (mode, op0);
3130 return expand_powi (op0, mode, n);
3134 /* Emit a libcall to libgcc. */
3136 /* Mode of the 2nd argument must match that of an int. */
3137 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3139 if (target == NULL_RTX)
3140 target = gen_reg_rtx (mode);
3142 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3143 if (GET_MODE (op0) != mode)
3144 op0 = convert_to_mode (mode, op0, 0);
3145 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3146 if (GET_MODE (op1) != mode2)
3147 op1 = convert_to_mode (mode2, op1, 0);
3149 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3150 target, LCT_CONST, mode, 2,
3151 op0, mode, op1, mode2);
3153 return target;
3156 /* Expand expression EXP which is a call to the strlen builtin. Return
3157 NULL_RTX if we failed the caller should emit a normal call, otherwise
3158 try to get the result in TARGET, if convenient. */
3160 static rtx
3161 expand_builtin_strlen (tree exp, rtx target,
3162 enum machine_mode target_mode)
3164 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3165 return NULL_RTX;
3166 else
3168 rtx pat;
3169 tree len;
3170 tree src = CALL_EXPR_ARG (exp, 0);
3171 rtx result, src_reg, char_rtx, before_strlen;
3172 enum machine_mode insn_mode = target_mode, char_mode;
3173 enum insn_code icode = CODE_FOR_nothing;
3174 int align;
3176 /* If the length can be computed at compile-time, return it. */
3177 len = c_strlen (src, 0);
3178 if (len)
3179 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3181 /* If the length can be computed at compile-time and is constant
3182 integer, but there are side-effects in src, evaluate
3183 src for side-effects, then return len.
3184 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3185 can be optimized into: i++; x = 3; */
3186 len = c_strlen (src, 1);
3187 if (len && TREE_CODE (len) == INTEGER_CST)
3189 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3190 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3193 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3195 /* If SRC is not a pointer type, don't do this operation inline. */
3196 if (align == 0)
3197 return NULL_RTX;
3199 /* Bail out if we can't compute strlen in the right mode. */
3200 while (insn_mode != VOIDmode)
3202 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3203 if (icode != CODE_FOR_nothing)
3204 break;
3206 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3208 if (insn_mode == VOIDmode)
3209 return NULL_RTX;
3211 /* Make a place to write the result of the instruction. */
3212 result = target;
3213 if (! (result != 0
3214 && REG_P (result)
3215 && GET_MODE (result) == insn_mode
3216 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3217 result = gen_reg_rtx (insn_mode);
3219 /* Make a place to hold the source address. We will not expand
3220 the actual source until we are sure that the expansion will
3221 not fail -- there are trees that cannot be expanded twice. */
3222 src_reg = gen_reg_rtx (Pmode);
3224 /* Mark the beginning of the strlen sequence so we can emit the
3225 source operand later. */
3226 before_strlen = get_last_insn ();
3228 char_rtx = const0_rtx;
3229 char_mode = insn_data[(int) icode].operand[2].mode;
3230 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3231 char_mode))
3232 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3234 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3235 char_rtx, GEN_INT (align));
3236 if (! pat)
3237 return NULL_RTX;
3238 emit_insn (pat);
3240 /* Now that we are assured of success, expand the source. */
3241 start_sequence ();
3242 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3243 if (pat != src_reg)
3244 emit_move_insn (src_reg, pat);
3245 pat = get_insns ();
3246 end_sequence ();
3248 if (before_strlen)
3249 emit_insn_after (pat, before_strlen);
3250 else
3251 emit_insn_before (pat, get_insns ());
3253 /* Return the value in the proper mode for this function. */
3254 if (GET_MODE (result) == target_mode)
3255 target = result;
3256 else if (target != 0)
3257 convert_move (target, result, 0);
3258 else
3259 target = convert_to_mode (target_mode, result, 0);
3261 return target;
3265 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3266 caller should emit a normal call, otherwise try to get the result
3267 in TARGET, if convenient (and in mode MODE if that's convenient). */
3269 static rtx
3270 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3272 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3274 tree type = TREE_TYPE (exp);
3275 tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
3276 CALL_EXPR_ARG (exp, 0),
3277 CALL_EXPR_ARG (exp, 1), type);
3278 if (result)
3279 return expand_expr (result, target, mode, EXPAND_NORMAL);
3281 return NULL_RTX;
3284 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3285 caller should emit a normal call, otherwise try to get the result
3286 in TARGET, if convenient (and in mode MODE if that's convenient). */
3288 static rtx
3289 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3291 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3293 tree type = TREE_TYPE (exp);
3294 tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
3295 CALL_EXPR_ARG (exp, 0),
3296 CALL_EXPR_ARG (exp, 1), type);
3297 if (result)
3298 return expand_expr (result, target, mode, EXPAND_NORMAL);
3300 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3302 return NULL_RTX;
3305 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3306 caller should emit a normal call, otherwise try to get the result
3307 in TARGET, if convenient (and in mode MODE if that's convenient). */
3309 static rtx
3310 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3312 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3314 tree type = TREE_TYPE (exp);
3315 tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
3316 CALL_EXPR_ARG (exp, 0),
3317 CALL_EXPR_ARG (exp, 1), type);
3318 if (result)
3319 return expand_expr (result, target, mode, EXPAND_NORMAL);
3321 return NULL_RTX;
3324 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3325 caller should emit a normal call, otherwise try to get the result
3326 in TARGET, if convenient (and in mode MODE if that's convenient). */
3328 static rtx
3329 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3331 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3333 tree type = TREE_TYPE (exp);
3334 tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
3335 CALL_EXPR_ARG (exp, 0),
3336 CALL_EXPR_ARG (exp, 1), type);
3337 if (result)
3338 return expand_expr (result, target, mode, EXPAND_NORMAL);
3340 return NULL_RTX;
3343 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3344 bytes from constant string DATA + OFFSET and return it as target
3345 constant. */
3347 static rtx
3348 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3349 enum machine_mode mode)
3351 const char *str = (const char *) data;
3353 gcc_assert (offset >= 0
3354 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3355 <= strlen (str) + 1));
3357 return c_readstr (str + offset, mode);
3360 /* Expand a call EXP to the memcpy builtin.
3361 Return NULL_RTX if we failed, the caller should emit a normal call,
3362 otherwise try to get the result in TARGET, if convenient (and in
3363 mode MODE if that's convenient). */
3365 static rtx
3366 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3368 tree fndecl = get_callee_fndecl (exp);
3370 if (!validate_arglist (exp,
3371 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3372 return NULL_RTX;
3373 else
3375 tree dest = CALL_EXPR_ARG (exp, 0);
3376 tree src = CALL_EXPR_ARG (exp, 1);
3377 tree len = CALL_EXPR_ARG (exp, 2);
3378 const char *src_str;
3379 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3380 unsigned int dest_align
3381 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3382 rtx dest_mem, src_mem, dest_addr, len_rtx;
3383 tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
3384 dest, src, len,
3385 TREE_TYPE (TREE_TYPE (fndecl)),
3386 false, /*endp=*/0);
3387 HOST_WIDE_INT expected_size = -1;
3388 unsigned int expected_align = 0;
3389 tree_ann_common_t ann;
3391 if (result)
3393 while (TREE_CODE (result) == COMPOUND_EXPR)
3395 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3396 EXPAND_NORMAL);
3397 result = TREE_OPERAND (result, 1);
3399 return expand_expr (result, target, mode, EXPAND_NORMAL);
3402 /* If DEST is not a pointer type, call the normal function. */
3403 if (dest_align == 0)
3404 return NULL_RTX;
3406 /* If either SRC is not a pointer type, don't do this
3407 operation in-line. */
3408 if (src_align == 0)
3409 return NULL_RTX;
3411 ann = tree_common_ann (exp);
3412 if (ann)
3413 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3415 if (expected_align < dest_align)
3416 expected_align = dest_align;
3417 dest_mem = get_memory_rtx (dest, len);
3418 set_mem_align (dest_mem, dest_align);
3419 len_rtx = expand_normal (len);
3420 src_str = c_getstr (src);
3422 /* If SRC is a string constant and block move would be done
3423 by pieces, we can avoid loading the string from memory
3424 and only stored the computed constants. */
3425 if (src_str
3426 && CONST_INT_P (len_rtx)
3427 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3428 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3429 CONST_CAST (char *, src_str),
3430 dest_align, false))
3432 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3433 builtin_memcpy_read_str,
3434 CONST_CAST (char *, src_str),
3435 dest_align, false, 0);
3436 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3437 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3438 return dest_mem;
3441 src_mem = get_memory_rtx (src, len);
3442 set_mem_align (src_mem, src_align);
3444 /* Copy word part most expediently. */
3445 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3446 CALL_EXPR_TAILCALL (exp)
3447 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3448 expected_align, expected_size);
3450 if (dest_addr == 0)
3452 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3453 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3455 return dest_addr;
3459 /* Expand a call EXP to the mempcpy builtin.
3460 Return NULL_RTX if we failed; the caller should emit a normal call,
3461 otherwise try to get the result in TARGET, if convenient (and in
3462 mode MODE if that's convenient). If ENDP is 0 return the
3463 destination pointer, if ENDP is 1 return the end pointer ala
3464 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3465 stpcpy. */
3467 static rtx
3468 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3470 if (!validate_arglist (exp,
3471 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3472 return NULL_RTX;
3473 else
3475 tree dest = CALL_EXPR_ARG (exp, 0);
3476 tree src = CALL_EXPR_ARG (exp, 1);
3477 tree len = CALL_EXPR_ARG (exp, 2);
3478 return expand_builtin_mempcpy_args (dest, src, len,
3479 TREE_TYPE (exp),
3480 target, mode, /*endp=*/ 1);
3484 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3485 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3486 so that this can also be called without constructing an actual CALL_EXPR.
3487 TYPE is the return type of the call. The other arguments and return value
3488 are the same as for expand_builtin_mempcpy. */
3490 static rtx
3491 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3492 rtx target, enum machine_mode mode, int endp)
3494 /* If return value is ignored, transform mempcpy into memcpy. */
3495 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3497 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3498 tree result = build_call_expr (fn, 3, dest, src, len);
3500 while (TREE_CODE (result) == COMPOUND_EXPR)
3502 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3503 EXPAND_NORMAL);
3504 result = TREE_OPERAND (result, 1);
3506 return expand_expr (result, target, mode, EXPAND_NORMAL);
3508 else
3510 const char *src_str;
3511 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3512 unsigned int dest_align
3513 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3514 rtx dest_mem, src_mem, len_rtx;
3515 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3516 dest, src, len, type, false, endp);
3518 if (result)
3520 while (TREE_CODE (result) == COMPOUND_EXPR)
3522 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3523 EXPAND_NORMAL);
3524 result = TREE_OPERAND (result, 1);
3526 return expand_expr (result, target, mode, EXPAND_NORMAL);
3529 /* If either SRC or DEST is not a pointer type, don't do this
3530 operation in-line. */
3531 if (dest_align == 0 || src_align == 0)
3532 return NULL_RTX;
3534 /* If LEN is not constant, call the normal function. */
3535 if (! host_integerp (len, 1))
3536 return NULL_RTX;
3538 len_rtx = expand_normal (len);
3539 src_str = c_getstr (src);
3541 /* If SRC is a string constant and block move would be done
3542 by pieces, we can avoid loading the string from memory
3543 and only stored the computed constants. */
3544 if (src_str
3545 && CONST_INT_P (len_rtx)
3546 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3547 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3548 CONST_CAST (char *, src_str),
3549 dest_align, false))
3551 dest_mem = get_memory_rtx (dest, len);
3552 set_mem_align (dest_mem, dest_align);
3553 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3554 builtin_memcpy_read_str,
3555 CONST_CAST (char *, src_str),
3556 dest_align, false, endp);
3557 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3558 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3559 return dest_mem;
3562 if (CONST_INT_P (len_rtx)
3563 && can_move_by_pieces (INTVAL (len_rtx),
3564 MIN (dest_align, src_align)))
3566 dest_mem = get_memory_rtx (dest, len);
3567 set_mem_align (dest_mem, dest_align);
3568 src_mem = get_memory_rtx (src, len);
3569 set_mem_align (src_mem, src_align);
3570 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3571 MIN (dest_align, src_align), endp);
3572 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3573 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3574 return dest_mem;
3577 return NULL_RTX;
3581 /* Expand expression EXP, which is a call to the memmove builtin. Return
3582 NULL_RTX if we failed; the caller should emit a normal call. */
3584 static rtx
3585 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3587 if (!validate_arglist (exp,
3588 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3589 return NULL_RTX;
3590 else
3592 tree dest = CALL_EXPR_ARG (exp, 0);
3593 tree src = CALL_EXPR_ARG (exp, 1);
3594 tree len = CALL_EXPR_ARG (exp, 2);
3595 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3596 target, mode, ignore);
3600 /* Helper function to do the actual work for expand_builtin_memmove. The
3601 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3602 so that this can also be called without constructing an actual CALL_EXPR.
3603 TYPE is the return type of the call. The other arguments and return value
3604 are the same as for expand_builtin_memmove. */
3606 static rtx
3607 expand_builtin_memmove_args (tree dest, tree src, tree len,
3608 tree type, rtx target, enum machine_mode mode,
3609 int ignore)
3611 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3612 dest, src, len, type, ignore, /*endp=*/3);
3614 if (result)
3616 STRIP_TYPE_NOPS (result);
3617 while (TREE_CODE (result) == COMPOUND_EXPR)
3619 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3620 EXPAND_NORMAL);
3621 result = TREE_OPERAND (result, 1);
3623 return expand_expr (result, target, mode, EXPAND_NORMAL);
3626 /* Otherwise, call the normal function. */
3627 return NULL_RTX;
3630 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3631 NULL_RTX if we failed the caller should emit a normal call. */
3633 static rtx
3634 expand_builtin_bcopy (tree exp, int ignore)
3636 tree type = TREE_TYPE (exp);
3637 tree src, dest, size;
3638 location_t loc = EXPR_LOCATION (exp);
3640 if (!validate_arglist (exp,
3641 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3642 return NULL_RTX;
3644 src = CALL_EXPR_ARG (exp, 0);
3645 dest = CALL_EXPR_ARG (exp, 1);
3646 size = CALL_EXPR_ARG (exp, 2);
3648 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3649 This is done this way so that if it isn't expanded inline, we fall
3650 back to calling bcopy instead of memmove. */
3651 return expand_builtin_memmove_args (dest, src,
3652 fold_convert_loc (loc, sizetype, size),
3653 type, const0_rtx, VOIDmode,
3654 ignore);
3657 #ifndef HAVE_movstr
3658 # define HAVE_movstr 0
3659 # define CODE_FOR_movstr CODE_FOR_nothing
3660 #endif
3662 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3663 we failed, the caller should emit a normal call, otherwise try to
3664 get the result in TARGET, if convenient. If ENDP is 0 return the
3665 destination pointer, if ENDP is 1 return the end pointer ala
3666 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3667 stpcpy. */
3669 static rtx
3670 expand_movstr (tree dest, tree src, rtx target, int endp)
3672 rtx end;
3673 rtx dest_mem;
3674 rtx src_mem;
3675 rtx insn;
3676 const struct insn_data * data;
3678 if (!HAVE_movstr)
3679 return NULL_RTX;
3681 dest_mem = get_memory_rtx (dest, NULL);
3682 src_mem = get_memory_rtx (src, NULL);
3683 if (!endp)
3685 target = force_reg (Pmode, XEXP (dest_mem, 0));
3686 dest_mem = replace_equiv_address (dest_mem, target);
3687 end = gen_reg_rtx (Pmode);
3689 else
3691 if (target == 0 || target == const0_rtx)
3693 end = gen_reg_rtx (Pmode);
3694 if (target == 0)
3695 target = end;
3697 else
3698 end = target;
3701 data = insn_data + CODE_FOR_movstr;
3703 if (data->operand[0].mode != VOIDmode)
3704 end = gen_lowpart (data->operand[0].mode, end);
3706 insn = data->genfun (end, dest_mem, src_mem);
3708 gcc_assert (insn);
3710 emit_insn (insn);
3712 /* movstr is supposed to set end to the address of the NUL
3713 terminator. If the caller requested a mempcpy-like return value,
3714 adjust it. */
3715 if (endp == 1 && target != const0_rtx)
3717 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3718 emit_move_insn (target, force_operand (tem, NULL_RTX));
3721 return target;
3724 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3725 NULL_RTX if we failed the caller should emit a normal call, otherwise
3726 try to get the result in TARGET, if convenient (and in mode MODE if that's
3727 convenient). */
3729 static rtx
3730 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3732 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree src = CALL_EXPR_ARG (exp, 1);
3736 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3738 return NULL_RTX;
3741 /* Helper function to do the actual work for expand_builtin_strcpy. The
3742 arguments to the builtin_strcpy call DEST and SRC are broken out
3743 so that this can also be called without constructing an actual CALL_EXPR.
3744 The other arguments and return value are the same as for
3745 expand_builtin_strcpy. */
3747 static rtx
3748 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3749 rtx target, enum machine_mode mode)
3751 tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
3752 fndecl, dest, src, 0);
3753 if (result)
3754 return expand_expr (result, target, mode, EXPAND_NORMAL);
3755 return expand_movstr (dest, src, target, /*endp=*/0);
3759 /* Expand a call EXP to the stpcpy builtin.
3760 Return NULL_RTX if we failed the caller should emit a normal call,
3761 otherwise try to get the result in TARGET, if convenient (and in
3762 mode MODE if that's convenient). */
3764 static rtx
3765 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3767 tree dst, src;
3768 location_t loc = EXPR_LOCATION (exp);
3770 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3771 return NULL_RTX;
3773 dst = CALL_EXPR_ARG (exp, 0);
3774 src = CALL_EXPR_ARG (exp, 1);
3776 /* If return value is ignored, transform stpcpy into strcpy. */
3777 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3779 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3780 tree result = build_call_expr (fn, 2, dst, src);
3782 STRIP_NOPS (result);
3783 while (TREE_CODE (result) == COMPOUND_EXPR)
3785 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3786 EXPAND_NORMAL);
3787 result = TREE_OPERAND (result, 1);
3789 return expand_expr (result, target, mode, EXPAND_NORMAL);
3791 else
3793 tree len, lenp1;
3794 rtx ret;
3796 /* Ensure we get an actual string whose length can be evaluated at
3797 compile-time, not an expression containing a string. This is
3798 because the latter will potentially produce pessimized code
3799 when used to produce the return value. */
3800 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3801 return expand_movstr (dst, src, target, /*endp=*/2);
3803 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3804 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3805 target, mode, /*endp=*/2);
3807 if (ret)
3808 return ret;
3810 if (TREE_CODE (len) == INTEGER_CST)
3812 rtx len_rtx = expand_normal (len);
3814 if (CONST_INT_P (len_rtx))
3816 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3817 dst, src, target, mode);
3819 if (ret)
3821 if (! target)
3823 if (mode != VOIDmode)
3824 target = gen_reg_rtx (mode);
3825 else
3826 target = gen_reg_rtx (GET_MODE (ret));
3828 if (GET_MODE (target) != GET_MODE (ret))
3829 ret = gen_lowpart (GET_MODE (target), ret);
3831 ret = plus_constant (ret, INTVAL (len_rtx));
3832 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3833 gcc_assert (ret);
3835 return target;
3840 return expand_movstr (dst, src, target, /*endp=*/2);
3844 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3845 bytes from constant string DATA + OFFSET and return it as target
3846 constant. */
3849 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3850 enum machine_mode mode)
3852 const char *str = (const char *) data;
3854 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3855 return const0_rtx;
3857 return c_readstr (str + offset, mode);
3860 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3861 NULL_RTX if we failed the caller should emit a normal call. */
3863 static rtx
3864 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3866 tree fndecl = get_callee_fndecl (exp);
3867 location_t loc = EXPR_LOCATION (exp);
3869 if (validate_arglist (exp,
3870 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3872 tree dest = CALL_EXPR_ARG (exp, 0);
3873 tree src = CALL_EXPR_ARG (exp, 1);
3874 tree len = CALL_EXPR_ARG (exp, 2);
3875 tree slen = c_strlen (src, 1);
3876 tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
3877 fndecl, dest, src, len, slen);
3879 if (result)
3881 while (TREE_CODE (result) == COMPOUND_EXPR)
3883 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3884 EXPAND_NORMAL);
3885 result = TREE_OPERAND (result, 1);
3887 return expand_expr (result, target, mode, EXPAND_NORMAL);
3890 /* We must be passed a constant len and src parameter. */
3891 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3892 return NULL_RTX;
3894 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3896 /* We're required to pad with trailing zeros if the requested
3897 len is greater than strlen(s2)+1. In that case try to
3898 use store_by_pieces, if it fails, punt. */
3899 if (tree_int_cst_lt (slen, len))
3901 unsigned int dest_align
3902 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3903 const char *p = c_getstr (src);
3904 rtx dest_mem;
3906 if (!p || dest_align == 0 || !host_integerp (len, 1)
3907 || !can_store_by_pieces (tree_low_cst (len, 1),
3908 builtin_strncpy_read_str,
3909 CONST_CAST (char *, p),
3910 dest_align, false))
3911 return NULL_RTX;
3913 dest_mem = get_memory_rtx (dest, len);
3914 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3915 builtin_strncpy_read_str,
3916 CONST_CAST (char *, p), dest_align, false, 0);
3917 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3918 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3919 return dest_mem;
3922 return NULL_RTX;
3925 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3926 bytes from constant string DATA + OFFSET and return it as target
3927 constant. */
3930 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3931 enum machine_mode mode)
3933 const char *c = (const char *) data;
3934 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3936 memset (p, *c, GET_MODE_SIZE (mode));
3938 return c_readstr (p, mode);
3941 /* Callback routine for store_by_pieces. Return the RTL of a register
3942 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3943 char value given in the RTL register data. For example, if mode is
3944 4 bytes wide, return the RTL for 0x01010101*data. */
3946 static rtx
3947 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3948 enum machine_mode mode)
3950 rtx target, coeff;
3951 size_t size;
3952 char *p;
3954 size = GET_MODE_SIZE (mode);
3955 if (size == 1)
3956 return (rtx) data;
3958 p = XALLOCAVEC (char, size);
3959 memset (p, 1, size);
3960 coeff = c_readstr (p, mode);
3962 target = convert_to_mode (mode, (rtx) data, 1);
3963 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3964 return force_reg (mode, target);
3967 /* Expand expression EXP, which is a call to the memset builtin. Return
3968 NULL_RTX if we failed the caller should emit a normal call, otherwise
3969 try to get the result in TARGET, if convenient (and in mode MODE if that's
3970 convenient). */
3972 static rtx
3973 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3975 if (!validate_arglist (exp,
3976 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3977 return NULL_RTX;
3978 else
3980 tree dest = CALL_EXPR_ARG (exp, 0);
3981 tree val = CALL_EXPR_ARG (exp, 1);
3982 tree len = CALL_EXPR_ARG (exp, 2);
3983 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3987 /* Helper function to do the actual work for expand_builtin_memset. The
3988 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3989 so that this can also be called without constructing an actual CALL_EXPR.
3990 The other arguments and return value are the same as for
3991 expand_builtin_memset. */
3993 static rtx
3994 expand_builtin_memset_args (tree dest, tree val, tree len,
3995 rtx target, enum machine_mode mode, tree orig_exp)
3997 tree fndecl, fn;
3998 enum built_in_function fcode;
3999 char c;
4000 unsigned int dest_align;
4001 rtx dest_mem, dest_addr, len_rtx;
4002 HOST_WIDE_INT expected_size = -1;
4003 unsigned int expected_align = 0;
4004 tree_ann_common_t ann;
4006 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
4008 /* If DEST is not a pointer type, don't do this operation in-line. */
4009 if (dest_align == 0)
4010 return NULL_RTX;
4012 ann = tree_common_ann (orig_exp);
4013 if (ann)
4014 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
4016 if (expected_align < dest_align)
4017 expected_align = dest_align;
4019 /* If the LEN parameter is zero, return DEST. */
4020 if (integer_zerop (len))
4022 /* Evaluate and ignore VAL in case it has side-effects. */
4023 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4024 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4027 /* Stabilize the arguments in case we fail. */
4028 dest = builtin_save_expr (dest);
4029 val = builtin_save_expr (val);
4030 len = builtin_save_expr (len);
4032 len_rtx = expand_normal (len);
4033 dest_mem = get_memory_rtx (dest, len);
4035 if (TREE_CODE (val) != INTEGER_CST)
4037 rtx val_rtx;
4039 val_rtx = expand_normal (val);
4040 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4041 val_rtx, 0);
4043 /* Assume that we can memset by pieces if we can store
4044 * the coefficients by pieces (in the required modes).
4045 * We can't pass builtin_memset_gen_str as that emits RTL. */
4046 c = 1;
4047 if (host_integerp (len, 1)
4048 && can_store_by_pieces (tree_low_cst (len, 1),
4049 builtin_memset_read_str, &c, dest_align,
4050 true))
4052 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4053 val_rtx);
4054 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4055 builtin_memset_gen_str, val_rtx, dest_align,
4056 true, 0);
4058 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4059 dest_align, expected_align,
4060 expected_size))
4061 goto do_libcall;
4063 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4064 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4065 return dest_mem;
4068 if (target_char_cast (val, &c))
4069 goto do_libcall;
4071 if (c)
4073 if (host_integerp (len, 1)
4074 && can_store_by_pieces (tree_low_cst (len, 1),
4075 builtin_memset_read_str, &c, dest_align,
4076 true))
4077 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4078 builtin_memset_read_str, &c, dest_align, true, 0);
4079 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4080 dest_align, expected_align,
4081 expected_size))
4082 goto do_libcall;
4084 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4085 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4086 return dest_mem;
4089 set_mem_align (dest_mem, dest_align);
4090 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4091 CALL_EXPR_TAILCALL (orig_exp)
4092 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4093 expected_align, expected_size);
4095 if (dest_addr == 0)
4097 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4098 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4101 return dest_addr;
4103 do_libcall:
4104 fndecl = get_callee_fndecl (orig_exp);
4105 fcode = DECL_FUNCTION_CODE (fndecl);
4106 if (fcode == BUILT_IN_MEMSET)
4107 fn = build_call_expr (fndecl, 3, dest, val, len);
4108 else if (fcode == BUILT_IN_BZERO)
4109 fn = build_call_expr (fndecl, 2, dest, len);
4110 else
4111 gcc_unreachable ();
4112 if (TREE_CODE (fn) == CALL_EXPR)
4113 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4114 return expand_call (fn, target, target == const0_rtx);
4117 /* Expand expression EXP, which is a call to the bzero builtin. Return
4118 NULL_RTX if we failed the caller should emit a normal call. */
4120 static rtx
4121 expand_builtin_bzero (tree exp)
4123 tree dest, size;
4124 location_t loc = EXPR_LOCATION (exp);
4126 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4127 return NULL_RTX;
4129 dest = CALL_EXPR_ARG (exp, 0);
4130 size = CALL_EXPR_ARG (exp, 1);
4132 /* New argument list transforming bzero(ptr x, int y) to
4133 memset(ptr x, int 0, size_t y). This is done this way
4134 so that if it isn't expanded inline, we fallback to
4135 calling bzero instead of memset. */
4137 return expand_builtin_memset_args (dest, integer_zero_node,
4138 fold_convert_loc (loc, sizetype, size),
4139 const0_rtx, VOIDmode, exp);
4142 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4143 caller should emit a normal call, otherwise try to get the result
4144 in TARGET, if convenient (and in mode MODE if that's convenient). */
4146 static rtx
4147 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4149 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4150 INTEGER_TYPE, VOID_TYPE))
4152 tree type = TREE_TYPE (exp);
4153 tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
4154 CALL_EXPR_ARG (exp, 0),
4155 CALL_EXPR_ARG (exp, 1),
4156 CALL_EXPR_ARG (exp, 2), type);
4157 if (result)
4158 return expand_expr (result, target, mode, EXPAND_NORMAL);
4160 return NULL_RTX;
4163 /* Expand expression EXP, which is a call to the memcmp built-in function.
4164 Return NULL_RTX if we failed and the
4165 caller should emit a normal call, otherwise try to get the result in
4166 TARGET, if convenient (and in mode MODE, if that's convenient). */
4168 static rtx
4169 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4171 location_t loc = EXPR_LOCATION (exp);
4173 if (!validate_arglist (exp,
4174 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4175 return NULL_RTX;
4176 else
4178 tree result = fold_builtin_memcmp (loc,
4179 CALL_EXPR_ARG (exp, 0),
4180 CALL_EXPR_ARG (exp, 1),
4181 CALL_EXPR_ARG (exp, 2));
4182 if (result)
4183 return expand_expr (result, target, mode, EXPAND_NORMAL);
4186 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4188 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4189 rtx result;
4190 rtx insn;
4191 tree arg1 = CALL_EXPR_ARG (exp, 0);
4192 tree arg2 = CALL_EXPR_ARG (exp, 1);
4193 tree len = CALL_EXPR_ARG (exp, 2);
4195 int arg1_align
4196 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4197 int arg2_align
4198 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4199 enum machine_mode insn_mode;
4201 #ifdef HAVE_cmpmemsi
4202 if (HAVE_cmpmemsi)
4203 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4204 else
4205 #endif
4206 #ifdef HAVE_cmpstrnsi
4207 if (HAVE_cmpstrnsi)
4208 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4209 else
4210 #endif
4211 return NULL_RTX;
4213 /* If we don't have POINTER_TYPE, call the function. */
4214 if (arg1_align == 0 || arg2_align == 0)
4215 return NULL_RTX;
4217 /* Make a place to write the result of the instruction. */
4218 result = target;
4219 if (! (result != 0
4220 && REG_P (result) && GET_MODE (result) == insn_mode
4221 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4222 result = gen_reg_rtx (insn_mode);
4224 arg1_rtx = get_memory_rtx (arg1, len);
4225 arg2_rtx = get_memory_rtx (arg2, len);
4226 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4228 /* Set MEM_SIZE as appropriate. */
4229 if (CONST_INT_P (arg3_rtx))
4231 set_mem_size (arg1_rtx, arg3_rtx);
4232 set_mem_size (arg2_rtx, arg3_rtx);
4235 #ifdef HAVE_cmpmemsi
4236 if (HAVE_cmpmemsi)
4237 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4238 GEN_INT (MIN (arg1_align, arg2_align)));
4239 else
4240 #endif
4241 #ifdef HAVE_cmpstrnsi
4242 if (HAVE_cmpstrnsi)
4243 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4244 GEN_INT (MIN (arg1_align, arg2_align)));
4245 else
4246 #endif
4247 gcc_unreachable ();
4249 if (insn)
4250 emit_insn (insn);
4251 else
4252 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4253 TYPE_MODE (integer_type_node), 3,
4254 XEXP (arg1_rtx, 0), Pmode,
4255 XEXP (arg2_rtx, 0), Pmode,
4256 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4257 TYPE_UNSIGNED (sizetype)),
4258 TYPE_MODE (sizetype));
4260 /* Return the value in the proper mode for this function. */
4261 mode = TYPE_MODE (TREE_TYPE (exp));
4262 if (GET_MODE (result) == mode)
4263 return result;
4264 else if (target != 0)
4266 convert_move (target, result, 0);
4267 return target;
4269 else
4270 return convert_to_mode (mode, result, 0);
4272 #endif
4274 return NULL_RTX;
4277 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4278 if we failed the caller should emit a normal call, otherwise try to get
4279 the result in TARGET, if convenient. */
4281 static rtx
4282 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4284 location_t loc = EXPR_LOCATION (exp);
4286 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4287 return NULL_RTX;
4288 else
4290 tree result = fold_builtin_strcmp (loc,
4291 CALL_EXPR_ARG (exp, 0),
4292 CALL_EXPR_ARG (exp, 1));
4293 if (result)
4294 return expand_expr (result, target, mode, EXPAND_NORMAL);
4297 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4298 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4299 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4301 rtx arg1_rtx, arg2_rtx;
4302 rtx result, insn = NULL_RTX;
4303 tree fndecl, fn;
4304 tree arg1 = CALL_EXPR_ARG (exp, 0);
4305 tree arg2 = CALL_EXPR_ARG (exp, 1);
4307 int arg1_align
4308 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4309 int arg2_align
4310 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4312 /* If we don't have POINTER_TYPE, call the function. */
4313 if (arg1_align == 0 || arg2_align == 0)
4314 return NULL_RTX;
4316 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4317 arg1 = builtin_save_expr (arg1);
4318 arg2 = builtin_save_expr (arg2);
4320 arg1_rtx = get_memory_rtx (arg1, NULL);
4321 arg2_rtx = get_memory_rtx (arg2, NULL);
4323 #ifdef HAVE_cmpstrsi
4324 /* Try to call cmpstrsi. */
4325 if (HAVE_cmpstrsi)
4327 enum machine_mode insn_mode
4328 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4330 /* Make a place to write the result of the instruction. */
4331 result = target;
4332 if (! (result != 0
4333 && REG_P (result) && GET_MODE (result) == insn_mode
4334 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4335 result = gen_reg_rtx (insn_mode);
4337 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4338 GEN_INT (MIN (arg1_align, arg2_align)));
4340 #endif
4341 #ifdef HAVE_cmpstrnsi
4342 /* Try to determine at least one length and call cmpstrnsi. */
4343 if (!insn && HAVE_cmpstrnsi)
4345 tree len;
4346 rtx arg3_rtx;
4348 enum machine_mode insn_mode
4349 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4350 tree len1 = c_strlen (arg1, 1);
4351 tree len2 = c_strlen (arg2, 1);
4353 if (len1)
4354 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4355 if (len2)
4356 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4358 /* If we don't have a constant length for the first, use the length
4359 of the second, if we know it. We don't require a constant for
4360 this case; some cost analysis could be done if both are available
4361 but neither is constant. For now, assume they're equally cheap,
4362 unless one has side effects. If both strings have constant lengths,
4363 use the smaller. */
4365 if (!len1)
4366 len = len2;
4367 else if (!len2)
4368 len = len1;
4369 else if (TREE_SIDE_EFFECTS (len1))
4370 len = len2;
4371 else if (TREE_SIDE_EFFECTS (len2))
4372 len = len1;
4373 else if (TREE_CODE (len1) != INTEGER_CST)
4374 len = len2;
4375 else if (TREE_CODE (len2) != INTEGER_CST)
4376 len = len1;
4377 else if (tree_int_cst_lt (len1, len2))
4378 len = len1;
4379 else
4380 len = len2;
4382 /* If both arguments have side effects, we cannot optimize. */
4383 if (!len || TREE_SIDE_EFFECTS (len))
4384 goto do_libcall;
4386 arg3_rtx = expand_normal (len);
4388 /* Make a place to write the result of the instruction. */
4389 result = target;
4390 if (! (result != 0
4391 && REG_P (result) && GET_MODE (result) == insn_mode
4392 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4393 result = gen_reg_rtx (insn_mode);
4395 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4396 GEN_INT (MIN (arg1_align, arg2_align)));
4398 #endif
4400 if (insn)
4402 emit_insn (insn);
4404 /* Return the value in the proper mode for this function. */
4405 mode = TYPE_MODE (TREE_TYPE (exp));
4406 if (GET_MODE (result) == mode)
4407 return result;
4408 if (target == 0)
4409 return convert_to_mode (mode, result, 0);
4410 convert_move (target, result, 0);
4411 return target;
4414 /* Expand the library call ourselves using a stabilized argument
4415 list to avoid re-evaluating the function's arguments twice. */
4416 #ifdef HAVE_cmpstrnsi
4417 do_libcall:
4418 #endif
4419 fndecl = get_callee_fndecl (exp);
4420 fn = build_call_expr (fndecl, 2, arg1, arg2);
4421 if (TREE_CODE (fn) == CALL_EXPR)
4422 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4423 return expand_call (fn, target, target == const0_rtx);
4425 #endif
4426 return NULL_RTX;
4429 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4430 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4431 the result in TARGET, if convenient. */
4433 static rtx
4434 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4436 location_t loc = EXPR_LOCATION (exp);
4438 if (!validate_arglist (exp,
4439 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4440 return NULL_RTX;
4441 else
4443 tree result = fold_builtin_strncmp (loc,
4444 CALL_EXPR_ARG (exp, 0),
4445 CALL_EXPR_ARG (exp, 1),
4446 CALL_EXPR_ARG (exp, 2));
4447 if (result)
4448 return expand_expr (result, target, mode, EXPAND_NORMAL);
4451 /* If c_strlen can determine an expression for one of the string
4452 lengths, and it doesn't have side effects, then emit cmpstrnsi
4453 using length MIN(strlen(string)+1, arg3). */
4454 #ifdef HAVE_cmpstrnsi
4455 if (HAVE_cmpstrnsi)
4457 tree len, len1, len2;
4458 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4459 rtx result, insn;
4460 tree fndecl, fn;
4461 tree arg1 = CALL_EXPR_ARG (exp, 0);
4462 tree arg2 = CALL_EXPR_ARG (exp, 1);
4463 tree arg3 = CALL_EXPR_ARG (exp, 2);
4465 int arg1_align
4466 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4467 int arg2_align
4468 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4469 enum machine_mode insn_mode
4470 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4472 len1 = c_strlen (arg1, 1);
4473 len2 = c_strlen (arg2, 1);
4475 if (len1)
4476 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4477 if (len2)
4478 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4480 /* If we don't have a constant length for the first, use the length
4481 of the second, if we know it. We don't require a constant for
4482 this case; some cost analysis could be done if both are available
4483 but neither is constant. For now, assume they're equally cheap,
4484 unless one has side effects. If both strings have constant lengths,
4485 use the smaller. */
4487 if (!len1)
4488 len = len2;
4489 else if (!len2)
4490 len = len1;
4491 else if (TREE_SIDE_EFFECTS (len1))
4492 len = len2;
4493 else if (TREE_SIDE_EFFECTS (len2))
4494 len = len1;
4495 else if (TREE_CODE (len1) != INTEGER_CST)
4496 len = len2;
4497 else if (TREE_CODE (len2) != INTEGER_CST)
4498 len = len1;
4499 else if (tree_int_cst_lt (len1, len2))
4500 len = len1;
4501 else
4502 len = len2;
4504 /* If both arguments have side effects, we cannot optimize. */
4505 if (!len || TREE_SIDE_EFFECTS (len))
4506 return NULL_RTX;
4508 /* The actual new length parameter is MIN(len,arg3). */
4509 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4510 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4512 /* If we don't have POINTER_TYPE, call the function. */
4513 if (arg1_align == 0 || arg2_align == 0)
4514 return NULL_RTX;
4516 /* Make a place to write the result of the instruction. */
4517 result = target;
4518 if (! (result != 0
4519 && REG_P (result) && GET_MODE (result) == insn_mode
4520 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4521 result = gen_reg_rtx (insn_mode);
4523 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4524 arg1 = builtin_save_expr (arg1);
4525 arg2 = builtin_save_expr (arg2);
4526 len = builtin_save_expr (len);
4528 arg1_rtx = get_memory_rtx (arg1, len);
4529 arg2_rtx = get_memory_rtx (arg2, len);
4530 arg3_rtx = expand_normal (len);
4531 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4532 GEN_INT (MIN (arg1_align, arg2_align)));
4533 if (insn)
4535 emit_insn (insn);
4537 /* Return the value in the proper mode for this function. */
4538 mode = TYPE_MODE (TREE_TYPE (exp));
4539 if (GET_MODE (result) == mode)
4540 return result;
4541 if (target == 0)
4542 return convert_to_mode (mode, result, 0);
4543 convert_move (target, result, 0);
4544 return target;
4547 /* Expand the library call ourselves using a stabilized argument
4548 list to avoid re-evaluating the function's arguments twice. */
4549 fndecl = get_callee_fndecl (exp);
4550 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4551 if (TREE_CODE (fn) == CALL_EXPR)
4552 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4553 return expand_call (fn, target, target == const0_rtx);
4555 #endif
4556 return NULL_RTX;
4559 /* Expand expression EXP, which is a call to the strcat builtin.
4560 Return NULL_RTX if we failed the caller should emit a normal call,
4561 otherwise try to get the result in TARGET, if convenient. */
4563 static rtx
4564 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4566 location_t loc = EXPR_LOCATION (exp);
4568 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4569 return NULL_RTX;
4570 else
4572 tree dst = CALL_EXPR_ARG (exp, 0);
4573 tree src = CALL_EXPR_ARG (exp, 1);
4574 const char *p = c_getstr (src);
4576 /* If the string length is zero, return the dst parameter. */
4577 if (p && *p == '\0')
4578 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4580 if (optimize_insn_for_speed_p ())
4582 /* See if we can store by pieces into (dst + strlen(dst)). */
4583 tree newsrc, newdst,
4584 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4585 rtx insns;
4587 /* Stabilize the argument list. */
4588 newsrc = builtin_save_expr (src);
4589 dst = builtin_save_expr (dst);
4591 start_sequence ();
4593 /* Create strlen (dst). */
4594 newdst = build_call_expr (strlen_fn, 1, dst);
4595 /* Create (dst p+ strlen (dst)). */
4597 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
4598 TREE_TYPE (dst), dst, newdst);
4599 newdst = builtin_save_expr (newdst);
4601 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4603 end_sequence (); /* Stop sequence. */
4604 return NULL_RTX;
4607 /* Output the entire sequence. */
4608 insns = get_insns ();
4609 end_sequence ();
4610 emit_insn (insns);
4612 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4615 return NULL_RTX;
4619 /* Expand expression EXP, which is a call to the strncat builtin.
4620 Return NULL_RTX if we failed the caller should emit a normal call,
4621 otherwise try to get the result in TARGET, if convenient. */
4623 static rtx
4624 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4626 if (validate_arglist (exp,
4627 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4629 tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
4630 CALL_EXPR_ARG (exp, 0),
4631 CALL_EXPR_ARG (exp, 1),
4632 CALL_EXPR_ARG (exp, 2));
4633 if (result)
4634 return expand_expr (result, target, mode, EXPAND_NORMAL);
4636 return NULL_RTX;
4639 /* Expand expression EXP, which is a call to the strspn builtin.
4640 Return NULL_RTX if we failed the caller should emit a normal call,
4641 otherwise try to get the result in TARGET, if convenient. */
4643 static rtx
4644 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4646 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4648 tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
4649 CALL_EXPR_ARG (exp, 0),
4650 CALL_EXPR_ARG (exp, 1));
4651 if (result)
4652 return expand_expr (result, target, mode, EXPAND_NORMAL);
4654 return NULL_RTX;
4657 /* Expand expression EXP, which is a call to the strcspn builtin.
4658 Return NULL_RTX if we failed the caller should emit a normal call,
4659 otherwise try to get the result in TARGET, if convenient. */
4661 static rtx
4662 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4664 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4666 tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
4667 CALL_EXPR_ARG (exp, 0),
4668 CALL_EXPR_ARG (exp, 1));
4669 if (result)
4670 return expand_expr (result, target, mode, EXPAND_NORMAL);
4672 return NULL_RTX;
4675 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4676 if that's convenient. */
4679 expand_builtin_saveregs (void)
4681 rtx val, seq;
4683 /* Don't do __builtin_saveregs more than once in a function.
4684 Save the result of the first call and reuse it. */
4685 if (saveregs_value != 0)
4686 return saveregs_value;
4688 /* When this function is called, it means that registers must be
4689 saved on entry to this function. So we migrate the call to the
4690 first insn of this function. */
4692 start_sequence ();
4694 /* Do whatever the machine needs done in this case. */
4695 val = targetm.calls.expand_builtin_saveregs ();
4697 seq = get_insns ();
4698 end_sequence ();
4700 saveregs_value = val;
4702 /* Put the insns after the NOTE that starts the function. If this
4703 is inside a start_sequence, make the outer-level insn chain current, so
4704 the code is placed at the start of the function. */
4705 push_topmost_sequence ();
4706 emit_insn_after (seq, entry_of_function ());
4707 pop_topmost_sequence ();
4709 return val;
4712 /* __builtin_args_info (N) returns word N of the arg space info
4713 for the current function. The number and meanings of words
4714 is controlled by the definition of CUMULATIVE_ARGS. */
4716 static rtx
4717 expand_builtin_args_info (tree exp)
4719 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4720 int *word_ptr = (int *) &crtl->args.info;
4722 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4724 if (call_expr_nargs (exp) != 0)
4726 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4727 error ("argument of %<__builtin_args_info%> must be constant");
4728 else
4730 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4732 if (wordnum < 0 || wordnum >= nwords)
4733 error ("argument of %<__builtin_args_info%> out of range");
4734 else
4735 return GEN_INT (word_ptr[wordnum]);
4738 else
4739 error ("missing argument in %<__builtin_args_info%>");
4741 return const0_rtx;
4744 /* Expand a call to __builtin_next_arg. */
4746 static rtx
4747 expand_builtin_next_arg (void)
4749 /* Checking arguments is already done in fold_builtin_next_arg
4750 that must be called before this function. */
4751 return expand_binop (ptr_mode, add_optab,
4752 crtl->args.internal_arg_pointer,
4753 crtl->args.arg_offset_rtx,
4754 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4757 /* Make it easier for the backends by protecting the valist argument
4758 from multiple evaluations. */
4760 static tree
4761 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4763 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4765 gcc_assert (vatype != NULL_TREE);
4767 if (TREE_CODE (vatype) == ARRAY_TYPE)
4769 if (TREE_SIDE_EFFECTS (valist))
4770 valist = save_expr (valist);
4772 /* For this case, the backends will be expecting a pointer to
4773 vatype, but it's possible we've actually been given an array
4774 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4775 So fix it. */
4776 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4778 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4779 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4782 else
4784 tree pt;
4786 if (! needs_lvalue)
4788 if (! TREE_SIDE_EFFECTS (valist))
4789 return valist;
4791 pt = build_pointer_type (vatype);
4792 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4793 TREE_SIDE_EFFECTS (valist) = 1;
4796 if (TREE_SIDE_EFFECTS (valist))
4797 valist = save_expr (valist);
4798 valist = build_fold_indirect_ref_loc (loc, valist);
4801 return valist;
4804 /* The "standard" definition of va_list is void*. */
4806 tree
4807 std_build_builtin_va_list (void)
4809 return ptr_type_node;
4812 /* The "standard" abi va_list is va_list_type_node. */
4814 tree
4815 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4817 return va_list_type_node;
4820 /* The "standard" type of va_list is va_list_type_node. */
4822 tree
4823 std_canonical_va_list_type (tree type)
4825 tree wtype, htype;
4827 if (INDIRECT_REF_P (type))
4828 type = TREE_TYPE (type);
4829 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4830 type = TREE_TYPE (type);
4831 wtype = va_list_type_node;
4832 htype = type;
4833 /* Treat structure va_list types. */
4834 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4835 htype = TREE_TYPE (htype);
4836 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4838 /* If va_list is an array type, the argument may have decayed
4839 to a pointer type, e.g. by being passed to another function.
4840 In that case, unwrap both types so that we can compare the
4841 underlying records. */
4842 if (TREE_CODE (htype) == ARRAY_TYPE
4843 || POINTER_TYPE_P (htype))
4845 wtype = TREE_TYPE (wtype);
4846 htype = TREE_TYPE (htype);
4849 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4850 return va_list_type_node;
4852 return NULL_TREE;
4855 /* The "standard" implementation of va_start: just assign `nextarg' to
4856 the variable. */
4858 void
4859 std_expand_builtin_va_start (tree valist, rtx nextarg)
4861 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4862 convert_move (va_r, nextarg, 0);
4865 /* Expand EXP, a call to __builtin_va_start. */
4867 static rtx
4868 expand_builtin_va_start (tree exp)
4870 rtx nextarg;
4871 tree valist;
4872 location_t loc = EXPR_LOCATION (exp);
4874 if (call_expr_nargs (exp) < 2)
4876 error_at (loc, "too few arguments to function %<va_start%>");
4877 return const0_rtx;
4880 if (fold_builtin_next_arg (exp, true))
4881 return const0_rtx;
4883 nextarg = expand_builtin_next_arg ();
4884 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4886 if (targetm.expand_builtin_va_start)
4887 targetm.expand_builtin_va_start (valist, nextarg);
4888 else
4889 std_expand_builtin_va_start (valist, nextarg);
4891 return const0_rtx;
4894 /* The "standard" implementation of va_arg: read the value from the
4895 current (padded) address and increment by the (padded) size. */
4897 tree
4898 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4899 gimple_seq *post_p)
4901 tree addr, t, type_size, rounded_size, valist_tmp;
4902 unsigned HOST_WIDE_INT align, boundary;
4903 bool indirect;
4905 #ifdef ARGS_GROW_DOWNWARD
4906 /* All of the alignment and movement below is for args-grow-up machines.
4907 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4908 implement their own specialized gimplify_va_arg_expr routines. */
4909 gcc_unreachable ();
4910 #endif
4912 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4913 if (indirect)
4914 type = build_pointer_type (type);
4916 align = PARM_BOUNDARY / BITS_PER_UNIT;
4917 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4919 /* When we align parameter on stack for caller, if the parameter
4920 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4921 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4922 here with caller. */
4923 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4924 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4926 boundary /= BITS_PER_UNIT;
4928 /* Hoist the valist value into a temporary for the moment. */
4929 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4931 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4932 requires greater alignment, we must perform dynamic alignment. */
4933 if (boundary > align
4934 && !integer_zerop (TYPE_SIZE (type)))
4936 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4937 fold_build2 (POINTER_PLUS_EXPR,
4938 TREE_TYPE (valist),
4939 valist_tmp, size_int (boundary - 1)));
4940 gimplify_and_add (t, pre_p);
4942 t = fold_convert (sizetype, valist_tmp);
4943 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4944 fold_convert (TREE_TYPE (valist),
4945 fold_build2 (BIT_AND_EXPR, sizetype, t,
4946 size_int (-boundary))));
4947 gimplify_and_add (t, pre_p);
4949 else
4950 boundary = align;
4952 /* If the actual alignment is less than the alignment of the type,
4953 adjust the type accordingly so that we don't assume strict alignment
4954 when dereferencing the pointer. */
4955 boundary *= BITS_PER_UNIT;
4956 if (boundary < TYPE_ALIGN (type))
4958 type = build_variant_type_copy (type);
4959 TYPE_ALIGN (type) = boundary;
4962 /* Compute the rounded size of the type. */
4963 type_size = size_in_bytes (type);
4964 rounded_size = round_up (type_size, align);
4966 /* Reduce rounded_size so it's sharable with the postqueue. */
4967 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4969 /* Get AP. */
4970 addr = valist_tmp;
4971 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4973 /* Small args are padded downward. */
4974 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4975 rounded_size, size_int (align));
4976 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4977 size_binop (MINUS_EXPR, rounded_size, type_size));
4978 addr = fold_build2 (POINTER_PLUS_EXPR,
4979 TREE_TYPE (addr), addr, t);
4982 /* Compute new value for AP. */
4983 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4984 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4985 gimplify_and_add (t, pre_p);
4987 addr = fold_convert (build_pointer_type (type), addr);
4989 if (indirect)
4990 addr = build_va_arg_indirect_ref (addr);
4992 return build_va_arg_indirect_ref (addr);
4995 /* Build an indirect-ref expression over the given TREE, which represents a
4996 piece of a va_arg() expansion. */
4997 tree
4998 build_va_arg_indirect_ref (tree addr)
5000 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
5002 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
5003 mf_mark (addr);
5005 return addr;
5008 /* Return a dummy expression of type TYPE in order to keep going after an
5009 error. */
5011 static tree
5012 dummy_object (tree type)
5014 tree t = build_int_cst (build_pointer_type (type), 0);
5015 return build1 (INDIRECT_REF, type, t);
5018 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
5019 builtin function, but a very special sort of operator. */
5021 enum gimplify_status
5022 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5024 tree promoted_type, have_va_type;
5025 tree valist = TREE_OPERAND (*expr_p, 0);
5026 tree type = TREE_TYPE (*expr_p);
5027 tree t;
5028 location_t loc = EXPR_LOCATION (*expr_p);
5030 /* Verify that valist is of the proper type. */
5031 have_va_type = TREE_TYPE (valist);
5032 if (have_va_type == error_mark_node)
5033 return GS_ERROR;
5034 have_va_type = targetm.canonical_va_list_type (have_va_type);
5036 if (have_va_type == NULL_TREE)
5038 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
5039 return GS_ERROR;
5042 /* Generate a diagnostic for requesting data of a type that cannot
5043 be passed through `...' due to type promotion at the call site. */
5044 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
5045 != type)
5047 static bool gave_help;
5048 bool warned;
5050 /* Unfortunately, this is merely undefined, rather than a constraint
5051 violation, so we cannot make this an error. If this call is never
5052 executed, the program is still strictly conforming. */
5053 warned = warning_at (loc, 0,
5054 "%qT is promoted to %qT when passed through %<...%>",
5055 type, promoted_type);
5056 if (!gave_help && warned)
5058 gave_help = true;
5059 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5060 promoted_type, type);
5063 /* We can, however, treat "undefined" any way we please.
5064 Call abort to encourage the user to fix the program. */
5065 if (warned)
5066 inform (loc, "if this code is reached, the program will abort");
5067 /* Before the abort, allow the evaluation of the va_list
5068 expression to exit or longjmp. */
5069 gimplify_and_add (valist, pre_p);
5070 t = build_call_expr_loc (loc,
5071 implicit_built_in_decls[BUILT_IN_TRAP], 0);
5072 gimplify_and_add (t, pre_p);
5074 /* This is dead code, but go ahead and finish so that the
5075 mode of the result comes out right. */
5076 *expr_p = dummy_object (type);
5077 return GS_ALL_DONE;
5079 else
5081 /* Make it easier for the backends by protecting the valist argument
5082 from multiple evaluations. */
5083 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5085 /* For this case, the backends will be expecting a pointer to
5086 TREE_TYPE (abi), but it's possible we've
5087 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5088 So fix it. */
5089 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5091 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5092 valist = fold_convert_loc (loc, p1,
5093 build_fold_addr_expr_loc (loc, valist));
5096 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5098 else
5099 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5101 if (!targetm.gimplify_va_arg_expr)
5102 /* FIXME: Once most targets are converted we should merely
5103 assert this is non-null. */
5104 return GS_ALL_DONE;
5106 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5107 SET_EXPR_LOCATION (*expr_p, loc);
5108 return GS_OK;
5112 /* Expand EXP, a call to __builtin_va_end. */
5114 static rtx
5115 expand_builtin_va_end (tree exp)
5117 tree valist = CALL_EXPR_ARG (exp, 0);
5119 /* Evaluate for side effects, if needed. I hate macros that don't
5120 do that. */
5121 if (TREE_SIDE_EFFECTS (valist))
5122 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5124 return const0_rtx;
5127 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5128 builtin rather than just as an assignment in stdarg.h because of the
5129 nastiness of array-type va_list types. */
5131 static rtx
5132 expand_builtin_va_copy (tree exp)
5134 tree dst, src, t;
5135 location_t loc = EXPR_LOCATION (exp);
5137 dst = CALL_EXPR_ARG (exp, 0);
5138 src = CALL_EXPR_ARG (exp, 1);
5140 dst = stabilize_va_list_loc (loc, dst, 1);
5141 src = stabilize_va_list_loc (loc, src, 0);
5143 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5145 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5147 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5148 TREE_SIDE_EFFECTS (t) = 1;
5149 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5151 else
5153 rtx dstb, srcb, size;
5155 /* Evaluate to pointers. */
5156 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5157 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5158 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5159 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5161 dstb = convert_memory_address (Pmode, dstb);
5162 srcb = convert_memory_address (Pmode, srcb);
5164 /* "Dereference" to BLKmode memories. */
5165 dstb = gen_rtx_MEM (BLKmode, dstb);
5166 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5167 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5168 srcb = gen_rtx_MEM (BLKmode, srcb);
5169 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5170 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5172 /* Copy. */
5173 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5176 return const0_rtx;
5179 /* Expand a call to one of the builtin functions __builtin_frame_address or
5180 __builtin_return_address. */
5182 static rtx
5183 expand_builtin_frame_address (tree fndecl, tree exp)
5185 /* The argument must be a nonnegative integer constant.
5186 It counts the number of frames to scan up the stack.
5187 The value is the return address saved in that frame. */
5188 if (call_expr_nargs (exp) == 0)
5189 /* Warning about missing arg was already issued. */
5190 return const0_rtx;
5191 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5193 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5194 error ("invalid argument to %<__builtin_frame_address%>");
5195 else
5196 error ("invalid argument to %<__builtin_return_address%>");
5197 return const0_rtx;
5199 else
5201 rtx tem
5202 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5203 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5205 /* Some ports cannot access arbitrary stack frames. */
5206 if (tem == NULL)
5208 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5209 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5210 else
5211 warning (0, "unsupported argument to %<__builtin_return_address%>");
5212 return const0_rtx;
5215 /* For __builtin_frame_address, return what we've got. */
5216 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5217 return tem;
5219 if (!REG_P (tem)
5220 && ! CONSTANT_P (tem))
5221 tem = copy_to_mode_reg (Pmode, tem);
5222 return tem;
5226 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5227 we failed and the caller should emit a normal call, otherwise try to get
5228 the result in TARGET, if convenient. */
5230 static rtx
5231 expand_builtin_alloca (tree exp, rtx target)
5233 rtx op0;
5234 rtx result;
5236 /* Emit normal call if marked not-inlineable. */
5237 if (CALL_CANNOT_INLINE_P (exp))
5238 return NULL_RTX;
5240 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5241 return NULL_RTX;
5243 /* Compute the argument. */
5244 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5246 /* Allocate the desired space. */
5247 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5248 result = convert_memory_address (ptr_mode, result);
5250 return result;
5253 /* Expand a call to a bswap builtin with argument ARG0. MODE
5254 is the mode to expand with. */
5256 static rtx
5257 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5259 enum machine_mode mode;
5260 tree arg;
5261 rtx op0;
5263 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5264 return NULL_RTX;
5266 arg = CALL_EXPR_ARG (exp, 0);
5267 mode = TYPE_MODE (TREE_TYPE (arg));
5268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5270 target = expand_unop (mode, bswap_optab, op0, target, 1);
5272 gcc_assert (target);
5274 return convert_to_mode (mode, target, 0);
5277 /* Expand a call to a unary builtin in EXP.
5278 Return NULL_RTX if a normal call should be emitted rather than expanding the
5279 function in-line. If convenient, the result should be placed in TARGET.
5280 SUBTARGET may be used as the target for computing one of EXP's operands. */
5282 static rtx
5283 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5284 rtx subtarget, optab op_optab)
5286 rtx op0;
5288 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5289 return NULL_RTX;
5291 /* Compute the argument. */
5292 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5293 VOIDmode, EXPAND_NORMAL);
5294 /* Compute op, into TARGET if possible.
5295 Set TARGET to wherever the result comes back. */
5296 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5297 op_optab, op0, target, 1);
5298 gcc_assert (target);
5300 return convert_to_mode (target_mode, target, 0);
5303 /* If the string passed to fputs is a constant and is one character
5304 long, we attempt to transform this call into __builtin_fputc(). */
5306 static rtx
5307 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5309 /* Verify the arguments in the original call. */
5310 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5312 tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
5313 CALL_EXPR_ARG (exp, 0),
5314 CALL_EXPR_ARG (exp, 1),
5315 (target == const0_rtx),
5316 unlocked, NULL_TREE);
5317 if (result)
5318 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5320 return NULL_RTX;
5323 /* Expand a call to __builtin_expect. We just return our argument
5324 as the builtin_expect semantic should've been already executed by
5325 tree branch prediction pass. */
5327 static rtx
5328 expand_builtin_expect (tree exp, rtx target)
5330 tree arg, c;
5332 if (call_expr_nargs (exp) < 2)
5333 return const0_rtx;
5334 arg = CALL_EXPR_ARG (exp, 0);
5335 c = CALL_EXPR_ARG (exp, 1);
5337 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5338 /* When guessing was done, the hints should be already stripped away. */
5339 gcc_assert (!flag_guess_branch_prob
5340 || optimize == 0 || errorcount || sorrycount);
5341 return target;
5344 void
5345 expand_builtin_trap (void)
5347 #ifdef HAVE_trap
5348 if (HAVE_trap)
5349 emit_insn (gen_trap ());
5350 else
5351 #endif
5352 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5353 emit_barrier ();
5356 /* Expand a call to __builtin_unreachable. We do nothing except emit
5357 a barrier saying that control flow will not pass here.
5359 It is the responsibility of the program being compiled to ensure
5360 that control flow does never reach __builtin_unreachable. */
5361 static void
5362 expand_builtin_unreachable (void)
5364 emit_barrier ();
5367 /* Expand EXP, a call to fabs, fabsf or fabsl.
5368 Return NULL_RTX if a normal call should be emitted rather than expanding
5369 the function inline. If convenient, the result should be placed
5370 in TARGET. SUBTARGET may be used as the target for computing
5371 the operand. */
5373 static rtx
5374 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5376 enum machine_mode mode;
5377 tree arg;
5378 rtx op0;
5380 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5381 return NULL_RTX;
5383 arg = CALL_EXPR_ARG (exp, 0);
5384 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5385 mode = TYPE_MODE (TREE_TYPE (arg));
5386 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5387 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5390 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5391 Return NULL is a normal call should be emitted rather than expanding the
5392 function inline. If convenient, the result should be placed in TARGET.
5393 SUBTARGET may be used as the target for computing the operand. */
5395 static rtx
5396 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5398 rtx op0, op1;
5399 tree arg;
5401 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5402 return NULL_RTX;
5404 arg = CALL_EXPR_ARG (exp, 0);
5405 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5407 arg = CALL_EXPR_ARG (exp, 1);
5408 op1 = expand_normal (arg);
5410 return expand_copysign (op0, op1, target);
5413 /* Create a new constant string literal and return a char* pointer to it.
5414 The STRING_CST value is the LEN characters at STR. */
5415 tree
5416 build_string_literal (int len, const char *str)
5418 tree t, elem, index, type;
5420 t = build_string (len, str);
5421 elem = build_type_variant (char_type_node, 1, 0);
5422 index = build_index_type (size_int (len - 1));
5423 type = build_array_type (elem, index);
5424 TREE_TYPE (t) = type;
5425 TREE_CONSTANT (t) = 1;
5426 TREE_READONLY (t) = 1;
5427 TREE_STATIC (t) = 1;
5429 type = build_pointer_type (elem);
5430 t = build1 (ADDR_EXPR, type,
5431 build4 (ARRAY_REF, elem,
5432 t, integer_zero_node, NULL_TREE, NULL_TREE));
5433 return t;
5436 /* Expand EXP, a call to printf or printf_unlocked.
5437 Return NULL_RTX if a normal call should be emitted rather than transforming
5438 the function inline. If convenient, the result should be placed in
5439 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5440 call. */
5441 static rtx
5442 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5443 bool unlocked)
5445 /* If we're using an unlocked function, assume the other unlocked
5446 functions exist explicitly. */
5447 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5448 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5449 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5450 : implicit_built_in_decls[BUILT_IN_PUTS];
5451 const char *fmt_str;
5452 tree fn = 0;
5453 tree fmt, arg;
5454 int nargs = call_expr_nargs (exp);
5456 /* If the return value is used, don't do the transformation. */
5457 if (target != const0_rtx)
5458 return NULL_RTX;
5460 /* Verify the required arguments in the original call. */
5461 if (nargs == 0)
5462 return NULL_RTX;
5463 fmt = CALL_EXPR_ARG (exp, 0);
5464 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5465 return NULL_RTX;
5467 /* Check whether the format is a literal string constant. */
5468 fmt_str = c_getstr (fmt);
5469 if (fmt_str == NULL)
5470 return NULL_RTX;
5472 if (!init_target_chars ())
5473 return NULL_RTX;
5475 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5476 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5478 if ((nargs != 2)
5479 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5480 return NULL_RTX;
5481 if (fn_puts)
5482 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5484 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5485 else if (strcmp (fmt_str, target_percent_c) == 0)
5487 if ((nargs != 2)
5488 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5489 return NULL_RTX;
5490 if (fn_putchar)
5491 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5493 else
5495 /* We can't handle anything else with % args or %% ... yet. */
5496 if (strchr (fmt_str, target_percent))
5497 return NULL_RTX;
5499 if (nargs > 1)
5500 return NULL_RTX;
5502 /* If the format specifier was "", printf does nothing. */
5503 if (fmt_str[0] == '\0')
5504 return const0_rtx;
5505 /* If the format specifier has length of 1, call putchar. */
5506 if (fmt_str[1] == '\0')
5508 /* Given printf("c"), (where c is any one character,)
5509 convert "c"[0] to an int and pass that to the replacement
5510 function. */
5511 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5512 if (fn_putchar)
5513 fn = build_call_expr (fn_putchar, 1, arg);
5515 else
5517 /* If the format specifier was "string\n", call puts("string"). */
5518 size_t len = strlen (fmt_str);
5519 if ((unsigned char)fmt_str[len - 1] == target_newline)
5521 /* Create a NUL-terminated string that's one char shorter
5522 than the original, stripping off the trailing '\n'. */
5523 char *newstr = XALLOCAVEC (char, len);
5524 memcpy (newstr, fmt_str, len - 1);
5525 newstr[len - 1] = 0;
5526 arg = build_string_literal (len, newstr);
5527 if (fn_puts)
5528 fn = build_call_expr (fn_puts, 1, arg);
5530 else
5531 /* We'd like to arrange to call fputs(string,stdout) here,
5532 but we need stdout and don't have a way to get it yet. */
5533 return NULL_RTX;
5537 if (!fn)
5538 return NULL_RTX;
5539 if (TREE_CODE (fn) == CALL_EXPR)
5540 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5541 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5544 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5545 Return NULL_RTX if a normal call should be emitted rather than transforming
5546 the function inline. If convenient, the result should be placed in
5547 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5548 call. */
5549 static rtx
5550 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5551 bool unlocked)
5553 /* If we're using an unlocked function, assume the other unlocked
5554 functions exist explicitly. */
5555 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5556 : implicit_built_in_decls[BUILT_IN_FPUTC];
5557 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5558 : implicit_built_in_decls[BUILT_IN_FPUTS];
5559 const char *fmt_str;
5560 tree fn = 0;
5561 tree fmt, fp, arg;
5562 int nargs = call_expr_nargs (exp);
5564 /* If the return value is used, don't do the transformation. */
5565 if (target != const0_rtx)
5566 return NULL_RTX;
5568 /* Verify the required arguments in the original call. */
5569 if (nargs < 2)
5570 return NULL_RTX;
5571 fp = CALL_EXPR_ARG (exp, 0);
5572 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5573 return NULL_RTX;
5574 fmt = CALL_EXPR_ARG (exp, 1);
5575 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5576 return NULL_RTX;
5578 /* Check whether the format is a literal string constant. */
5579 fmt_str = c_getstr (fmt);
5580 if (fmt_str == NULL)
5581 return NULL_RTX;
5583 if (!init_target_chars ())
5584 return NULL_RTX;
5586 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5587 if (strcmp (fmt_str, target_percent_s) == 0)
5589 if ((nargs != 3)
5590 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5591 return NULL_RTX;
5592 arg = CALL_EXPR_ARG (exp, 2);
5593 if (fn_fputs)
5594 fn = build_call_expr (fn_fputs, 2, arg, fp);
5596 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5597 else if (strcmp (fmt_str, target_percent_c) == 0)
5599 if ((nargs != 3)
5600 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5601 return NULL_RTX;
5602 arg = CALL_EXPR_ARG (exp, 2);
5603 if (fn_fputc)
5604 fn = build_call_expr (fn_fputc, 2, arg, fp);
5606 else
5608 /* We can't handle anything else with % args or %% ... yet. */
5609 if (strchr (fmt_str, target_percent))
5610 return NULL_RTX;
5612 if (nargs > 2)
5613 return NULL_RTX;
5615 /* If the format specifier was "", fprintf does nothing. */
5616 if (fmt_str[0] == '\0')
5618 /* Evaluate and ignore FILE* argument for side-effects. */
5619 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5620 return const0_rtx;
5623 /* When "string" doesn't contain %, replace all cases of
5624 fprintf(stream,string) with fputs(string,stream). The fputs
5625 builtin will take care of special cases like length == 1. */
5626 if (fn_fputs)
5627 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5630 if (!fn)
5631 return NULL_RTX;
5632 if (TREE_CODE (fn) == CALL_EXPR)
5633 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5634 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5637 /* Expand a call EXP to sprintf. Return NULL_RTX if
5638 a normal call should be emitted rather than expanding the function
5639 inline. If convenient, the result should be placed in TARGET with
5640 mode MODE. */
5642 static rtx
5643 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5645 tree dest, fmt;
5646 const char *fmt_str;
5647 int nargs = call_expr_nargs (exp);
5649 /* Verify the required arguments in the original call. */
5650 if (nargs < 2)
5651 return NULL_RTX;
5652 dest = CALL_EXPR_ARG (exp, 0);
5653 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5654 return NULL_RTX;
5655 fmt = CALL_EXPR_ARG (exp, 0);
5656 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5657 return NULL_RTX;
5659 /* Check whether the format is a literal string constant. */
5660 fmt_str = c_getstr (fmt);
5661 if (fmt_str == NULL)
5662 return NULL_RTX;
5664 if (!init_target_chars ())
5665 return NULL_RTX;
5667 /* If the format doesn't contain % args or %%, use strcpy. */
5668 if (strchr (fmt_str, target_percent) == 0)
5670 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5671 tree exp;
5673 if ((nargs > 2) || ! fn)
5674 return NULL_RTX;
5675 expand_expr (build_call_expr (fn, 2, dest, fmt),
5676 const0_rtx, VOIDmode, EXPAND_NORMAL);
5677 if (target == const0_rtx)
5678 return const0_rtx;
5679 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5680 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5682 /* If the format is "%s", use strcpy if the result isn't used. */
5683 else if (strcmp (fmt_str, target_percent_s) == 0)
5685 tree fn, arg, len;
5686 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5688 if (! fn)
5689 return NULL_RTX;
5690 if (nargs != 3)
5691 return NULL_RTX;
5692 arg = CALL_EXPR_ARG (exp, 2);
5693 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5694 return NULL_RTX;
5696 if (target != const0_rtx)
5698 len = c_strlen (arg, 1);
5699 if (! len || TREE_CODE (len) != INTEGER_CST)
5700 return NULL_RTX;
5702 else
5703 len = NULL_TREE;
5705 expand_expr (build_call_expr (fn, 2, dest, arg),
5706 const0_rtx, VOIDmode, EXPAND_NORMAL);
5708 if (target == const0_rtx)
5709 return const0_rtx;
5710 return expand_expr (len, target, mode, EXPAND_NORMAL);
5713 return NULL_RTX;
5716 /* Expand a call to either the entry or exit function profiler. */
5718 static rtx
5719 expand_builtin_profile_func (bool exitp)
5721 rtx this_rtx, which;
5723 this_rtx = DECL_RTL (current_function_decl);
5724 gcc_assert (MEM_P (this_rtx));
5725 this_rtx = XEXP (this_rtx, 0);
5727 if (exitp)
5728 which = profile_function_exit_libfunc;
5729 else
5730 which = profile_function_entry_libfunc;
5732 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5733 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5735 Pmode);
5737 return const0_rtx;
5740 /* Expand a call to __builtin___clear_cache. */
5742 static rtx
5743 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5745 #ifndef HAVE_clear_cache
5746 #ifdef CLEAR_INSN_CACHE
5747 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5748 does something. Just do the default expansion to a call to
5749 __clear_cache(). */
5750 return NULL_RTX;
5751 #else
5752 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5753 does nothing. There is no need to call it. Do nothing. */
5754 return const0_rtx;
5755 #endif /* CLEAR_INSN_CACHE */
5756 #else
5757 /* We have a "clear_cache" insn, and it will handle everything. */
5758 tree begin, end;
5759 rtx begin_rtx, end_rtx;
5760 enum insn_code icode;
5762 /* We must not expand to a library call. If we did, any
5763 fallback library function in libgcc that might contain a call to
5764 __builtin___clear_cache() would recurse infinitely. */
5765 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5767 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5768 return const0_rtx;
5771 if (HAVE_clear_cache)
5773 icode = CODE_FOR_clear_cache;
5775 begin = CALL_EXPR_ARG (exp, 0);
5776 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5777 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5778 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5779 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5781 end = CALL_EXPR_ARG (exp, 1);
5782 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5783 end_rtx = convert_memory_address (Pmode, end_rtx);
5784 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5785 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5787 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5789 return const0_rtx;
5790 #endif /* HAVE_clear_cache */
5793 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5795 static rtx
5796 round_trampoline_addr (rtx tramp)
5798 rtx temp, addend, mask;
5800 /* If we don't need too much alignment, we'll have been guaranteed
5801 proper alignment by get_trampoline_type. */
5802 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5803 return tramp;
5805 /* Round address up to desired boundary. */
5806 temp = gen_reg_rtx (Pmode);
5807 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5808 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5810 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5811 temp, 0, OPTAB_LIB_WIDEN);
5812 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5813 temp, 0, OPTAB_LIB_WIDEN);
5815 return tramp;
5818 static rtx
5819 expand_builtin_init_trampoline (tree exp)
5821 tree t_tramp, t_func, t_chain;
5822 rtx r_tramp, r_func, r_chain;
5823 #ifdef TRAMPOLINE_TEMPLATE
5824 rtx blktramp;
5825 #endif
5827 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5828 POINTER_TYPE, VOID_TYPE))
5829 return NULL_RTX;
5831 t_tramp = CALL_EXPR_ARG (exp, 0);
5832 t_func = CALL_EXPR_ARG (exp, 1);
5833 t_chain = CALL_EXPR_ARG (exp, 2);
5835 r_tramp = expand_normal (t_tramp);
5836 r_func = expand_normal (t_func);
5837 r_chain = expand_normal (t_chain);
5839 /* Generate insns to initialize the trampoline. */
5840 r_tramp = round_trampoline_addr (r_tramp);
5841 #ifdef TRAMPOLINE_TEMPLATE
5842 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5843 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5844 emit_block_move (blktramp, assemble_trampoline_template (),
5845 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5846 #endif
5847 trampolines_created = 1;
5848 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5850 return const0_rtx;
5853 static rtx
5854 expand_builtin_adjust_trampoline (tree exp)
5856 rtx tramp;
5858 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5859 return NULL_RTX;
5861 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5862 tramp = round_trampoline_addr (tramp);
5863 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5864 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5865 #endif
5867 return tramp;
5870 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5871 function. The function first checks whether the back end provides
5872 an insn to implement signbit for the respective mode. If not, it
5873 checks whether the floating point format of the value is such that
5874 the sign bit can be extracted. If that is not the case, the
5875 function returns NULL_RTX to indicate that a normal call should be
5876 emitted rather than expanding the function in-line. EXP is the
5877 expression that is a call to the builtin function; if convenient,
5878 the result should be placed in TARGET. */
5879 static rtx
5880 expand_builtin_signbit (tree exp, rtx target)
5882 const struct real_format *fmt;
5883 enum machine_mode fmode, imode, rmode;
5884 HOST_WIDE_INT hi, lo;
5885 tree arg;
5886 int word, bitpos;
5887 enum insn_code icode;
5888 rtx temp;
5889 location_t loc = EXPR_LOCATION (exp);
5891 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5892 return NULL_RTX;
5894 arg = CALL_EXPR_ARG (exp, 0);
5895 fmode = TYPE_MODE (TREE_TYPE (arg));
5896 rmode = TYPE_MODE (TREE_TYPE (exp));
5897 fmt = REAL_MODE_FORMAT (fmode);
5899 arg = builtin_save_expr (arg);
5901 /* Expand the argument yielding a RTX expression. */
5902 temp = expand_normal (arg);
5904 /* Check if the back end provides an insn that handles signbit for the
5905 argument's mode. */
5906 icode = signbit_optab->handlers [(int) fmode].insn_code;
5907 if (icode != CODE_FOR_nothing)
5909 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5910 emit_unop_insn (icode, target, temp, UNKNOWN);
5911 return target;
5914 /* For floating point formats without a sign bit, implement signbit
5915 as "ARG < 0.0". */
5916 bitpos = fmt->signbit_ro;
5917 if (bitpos < 0)
5919 /* But we can't do this if the format supports signed zero. */
5920 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5921 return NULL_RTX;
5923 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5924 build_real (TREE_TYPE (arg), dconst0));
5925 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5928 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5930 imode = int_mode_for_mode (fmode);
5931 if (imode == BLKmode)
5932 return NULL_RTX;
5933 temp = gen_lowpart (imode, temp);
5935 else
5937 imode = word_mode;
5938 /* Handle targets with different FP word orders. */
5939 if (FLOAT_WORDS_BIG_ENDIAN)
5940 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5941 else
5942 word = bitpos / BITS_PER_WORD;
5943 temp = operand_subword_force (temp, word, fmode);
5944 bitpos = bitpos % BITS_PER_WORD;
5947 /* Force the intermediate word_mode (or narrower) result into a
5948 register. This avoids attempting to create paradoxical SUBREGs
5949 of floating point modes below. */
5950 temp = force_reg (imode, temp);
5952 /* If the bitpos is within the "result mode" lowpart, the operation
5953 can be implement with a single bitwise AND. Otherwise, we need
5954 a right shift and an AND. */
5956 if (bitpos < GET_MODE_BITSIZE (rmode))
5958 if (bitpos < HOST_BITS_PER_WIDE_INT)
5960 hi = 0;
5961 lo = (HOST_WIDE_INT) 1 << bitpos;
5963 else
5965 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5966 lo = 0;
5969 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5970 temp = gen_lowpart (rmode, temp);
5971 temp = expand_binop (rmode, and_optab, temp,
5972 immed_double_const (lo, hi, rmode),
5973 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5975 else
5977 /* Perform a logical right shift to place the signbit in the least
5978 significant bit, then truncate the result to the desired mode
5979 and mask just this bit. */
5980 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5981 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5982 temp = gen_lowpart (rmode, temp);
5983 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5984 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5987 return temp;
5990 /* Expand fork or exec calls. TARGET is the desired target of the
5991 call. EXP is the call. FN is the
5992 identificator of the actual function. IGNORE is nonzero if the
5993 value is to be ignored. */
5995 static rtx
5996 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5998 tree id, decl;
5999 tree call;
6001 /* If we are not profiling, just call the function. */
6002 if (!profile_arc_flag)
6003 return NULL_RTX;
6005 /* Otherwise call the wrapper. This should be equivalent for the rest of
6006 compiler, so the code does not diverge, and the wrapper may run the
6007 code necessary for keeping the profiling sane. */
6009 switch (DECL_FUNCTION_CODE (fn))
6011 case BUILT_IN_FORK:
6012 id = get_identifier ("__gcov_fork");
6013 break;
6015 case BUILT_IN_EXECL:
6016 id = get_identifier ("__gcov_execl");
6017 break;
6019 case BUILT_IN_EXECV:
6020 id = get_identifier ("__gcov_execv");
6021 break;
6023 case BUILT_IN_EXECLP:
6024 id = get_identifier ("__gcov_execlp");
6025 break;
6027 case BUILT_IN_EXECLE:
6028 id = get_identifier ("__gcov_execle");
6029 break;
6031 case BUILT_IN_EXECVP:
6032 id = get_identifier ("__gcov_execvp");
6033 break;
6035 case BUILT_IN_EXECVE:
6036 id = get_identifier ("__gcov_execve");
6037 break;
6039 default:
6040 gcc_unreachable ();
6043 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6044 FUNCTION_DECL, id, TREE_TYPE (fn));
6045 DECL_EXTERNAL (decl) = 1;
6046 TREE_PUBLIC (decl) = 1;
6047 DECL_ARTIFICIAL (decl) = 1;
6048 TREE_NOTHROW (decl) = 1;
6049 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6050 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6051 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6052 return expand_call (call, target, ignore);
6057 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6058 the pointer in these functions is void*, the tree optimizers may remove
6059 casts. The mode computed in expand_builtin isn't reliable either, due
6060 to __sync_bool_compare_and_swap.
6062 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6063 group of builtins. This gives us log2 of the mode size. */
6065 static inline enum machine_mode
6066 get_builtin_sync_mode (int fcode_diff)
6068 /* The size is not negotiable, so ask not to get BLKmode in return
6069 if the target indicates that a smaller size would be better. */
6070 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6073 /* Expand the memory expression LOC and return the appropriate memory operand
6074 for the builtin_sync operations. */
6076 static rtx
6077 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6079 rtx addr, mem;
6081 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6083 /* Note that we explicitly do not want any alias information for this
6084 memory, so that we kill all other live memories. Otherwise we don't
6085 satisfy the full barrier semantics of the intrinsic. */
6086 mem = validize_mem (gen_rtx_MEM (mode, addr));
6088 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6089 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6090 MEM_VOLATILE_P (mem) = 1;
6092 return mem;
6095 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6096 EXP is the CALL_EXPR. CODE is the rtx code
6097 that corresponds to the arithmetic or logical operation from the name;
6098 an exception here is that NOT actually means NAND. TARGET is an optional
6099 place for us to store the results; AFTER is true if this is the
6100 fetch_and_xxx form. IGNORE is true if we don't actually care about
6101 the result of the operation at all. */
6103 static rtx
6104 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6105 enum rtx_code code, bool after,
6106 rtx target, bool ignore)
6108 rtx val, mem;
6109 enum machine_mode old_mode;
6110 location_t loc = EXPR_LOCATION (exp);
6112 if (code == NOT && warn_sync_nand)
6114 tree fndecl = get_callee_fndecl (exp);
6115 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6117 static bool warned_f_a_n, warned_n_a_f;
6119 switch (fcode)
6121 case BUILT_IN_FETCH_AND_NAND_1:
6122 case BUILT_IN_FETCH_AND_NAND_2:
6123 case BUILT_IN_FETCH_AND_NAND_4:
6124 case BUILT_IN_FETCH_AND_NAND_8:
6125 case BUILT_IN_FETCH_AND_NAND_16:
6127 if (warned_f_a_n)
6128 break;
6130 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6131 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6132 warned_f_a_n = true;
6133 break;
6135 case BUILT_IN_NAND_AND_FETCH_1:
6136 case BUILT_IN_NAND_AND_FETCH_2:
6137 case BUILT_IN_NAND_AND_FETCH_4:
6138 case BUILT_IN_NAND_AND_FETCH_8:
6139 case BUILT_IN_NAND_AND_FETCH_16:
6141 if (warned_n_a_f)
6142 break;
6144 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6145 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6146 warned_n_a_f = true;
6147 break;
6149 default:
6150 gcc_unreachable ();
6154 /* Expand the operands. */
6155 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6157 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6158 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6159 of CONST_INTs, where we know the old_mode only from the call argument. */
6160 old_mode = GET_MODE (val);
6161 if (old_mode == VOIDmode)
6162 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6163 val = convert_modes (mode, old_mode, val, 1);
6165 if (ignore)
6166 return expand_sync_operation (mem, val, code);
6167 else
6168 return expand_sync_fetch_operation (mem, val, code, after, target);
6171 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6172 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6173 true if this is the boolean form. TARGET is a place for us to store the
6174 results; this is NOT optional if IS_BOOL is true. */
6176 static rtx
6177 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6178 bool is_bool, rtx target)
6180 rtx old_val, new_val, mem;
6181 enum machine_mode old_mode;
6183 /* Expand the operands. */
6184 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6187 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6188 mode, EXPAND_NORMAL);
6189 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6190 of CONST_INTs, where we know the old_mode only from the call argument. */
6191 old_mode = GET_MODE (old_val);
6192 if (old_mode == VOIDmode)
6193 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6194 old_val = convert_modes (mode, old_mode, old_val, 1);
6196 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6197 mode, EXPAND_NORMAL);
6198 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6199 of CONST_INTs, where we know the old_mode only from the call argument. */
6200 old_mode = GET_MODE (new_val);
6201 if (old_mode == VOIDmode)
6202 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6203 new_val = convert_modes (mode, old_mode, new_val, 1);
6205 if (is_bool)
6206 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6207 else
6208 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6211 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6212 general form is actually an atomic exchange, and some targets only
6213 support a reduced form with the second argument being a constant 1.
6214 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6215 the results. */
6217 static rtx
6218 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6219 rtx target)
6221 rtx val, mem;
6222 enum machine_mode old_mode;
6224 /* Expand the operands. */
6225 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6226 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6227 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6228 of CONST_INTs, where we know the old_mode only from the call argument. */
6229 old_mode = GET_MODE (val);
6230 if (old_mode == VOIDmode)
6231 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6232 val = convert_modes (mode, old_mode, val, 1);
6234 return expand_sync_lock_test_and_set (mem, val, target);
6237 /* Expand the __sync_synchronize intrinsic. */
6239 static void
6240 expand_builtin_synchronize (void)
6242 tree x;
6244 #ifdef HAVE_memory_barrier
6245 if (HAVE_memory_barrier)
6247 emit_insn (gen_memory_barrier ());
6248 return;
6250 #endif
6252 if (synchronize_libfunc != NULL_RTX)
6254 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6255 return;
6258 /* If no explicit memory barrier instruction is available, create an
6259 empty asm stmt with a memory clobber. */
6260 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6261 tree_cons (NULL, build_string (6, "memory"), NULL));
6262 ASM_VOLATILE_P (x) = 1;
6263 expand_asm_expr (x);
6266 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6268 static void
6269 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6271 enum insn_code icode;
6272 rtx mem, insn;
6273 rtx val = const0_rtx;
6275 /* Expand the operands. */
6276 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6278 /* If there is an explicit operation in the md file, use it. */
6279 icode = sync_lock_release[mode];
6280 if (icode != CODE_FOR_nothing)
6282 if (!insn_data[icode].operand[1].predicate (val, mode))
6283 val = force_reg (mode, val);
6285 insn = GEN_FCN (icode) (mem, val);
6286 if (insn)
6288 emit_insn (insn);
6289 return;
6293 /* Otherwise we can implement this operation by emitting a barrier
6294 followed by a store of zero. */
6295 expand_builtin_synchronize ();
6296 emit_move_insn (mem, val);
6299 /* Expand an expression EXP that calls a built-in function,
6300 with result going to TARGET if that's convenient
6301 (and in mode MODE if that's convenient).
6302 SUBTARGET may be used as the target for computing one of EXP's operands.
6303 IGNORE is nonzero if the value is to be ignored. */
6306 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6307 int ignore)
6309 tree fndecl = get_callee_fndecl (exp);
6310 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6311 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6313 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6314 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6316 /* When not optimizing, generate calls to library functions for a certain
6317 set of builtins. */
6318 if (!optimize
6319 && !called_as_built_in (fndecl)
6320 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6321 && fcode != BUILT_IN_ALLOCA
6322 && fcode != BUILT_IN_FREE)
6323 return expand_call (exp, target, ignore);
6325 /* The built-in function expanders test for target == const0_rtx
6326 to determine whether the function's result will be ignored. */
6327 if (ignore)
6328 target = const0_rtx;
6330 /* If the result of a pure or const built-in function is ignored, and
6331 none of its arguments are volatile, we can avoid expanding the
6332 built-in call and just evaluate the arguments for side-effects. */
6333 if (target == const0_rtx
6334 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6336 bool volatilep = false;
6337 tree arg;
6338 call_expr_arg_iterator iter;
6340 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6341 if (TREE_THIS_VOLATILE (arg))
6343 volatilep = true;
6344 break;
6347 if (! volatilep)
6349 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6350 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6351 return const0_rtx;
6355 switch (fcode)
6357 CASE_FLT_FN (BUILT_IN_FABS):
6358 target = expand_builtin_fabs (exp, target, subtarget);
6359 if (target)
6360 return target;
6361 break;
6363 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6364 target = expand_builtin_copysign (exp, target, subtarget);
6365 if (target)
6366 return target;
6367 break;
6369 /* Just do a normal library call if we were unable to fold
6370 the values. */
6371 CASE_FLT_FN (BUILT_IN_CABS):
6372 break;
6374 CASE_FLT_FN (BUILT_IN_EXP):
6375 CASE_FLT_FN (BUILT_IN_EXP10):
6376 CASE_FLT_FN (BUILT_IN_POW10):
6377 CASE_FLT_FN (BUILT_IN_EXP2):
6378 CASE_FLT_FN (BUILT_IN_EXPM1):
6379 CASE_FLT_FN (BUILT_IN_LOGB):
6380 CASE_FLT_FN (BUILT_IN_LOG):
6381 CASE_FLT_FN (BUILT_IN_LOG10):
6382 CASE_FLT_FN (BUILT_IN_LOG2):
6383 CASE_FLT_FN (BUILT_IN_LOG1P):
6384 CASE_FLT_FN (BUILT_IN_TAN):
6385 CASE_FLT_FN (BUILT_IN_ASIN):
6386 CASE_FLT_FN (BUILT_IN_ACOS):
6387 CASE_FLT_FN (BUILT_IN_ATAN):
6388 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6389 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6390 because of possible accuracy problems. */
6391 if (! flag_unsafe_math_optimizations)
6392 break;
6393 CASE_FLT_FN (BUILT_IN_SQRT):
6394 CASE_FLT_FN (BUILT_IN_FLOOR):
6395 CASE_FLT_FN (BUILT_IN_CEIL):
6396 CASE_FLT_FN (BUILT_IN_TRUNC):
6397 CASE_FLT_FN (BUILT_IN_ROUND):
6398 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6399 CASE_FLT_FN (BUILT_IN_RINT):
6400 target = expand_builtin_mathfn (exp, target, subtarget);
6401 if (target)
6402 return target;
6403 break;
6405 CASE_FLT_FN (BUILT_IN_ILOGB):
6406 if (! flag_unsafe_math_optimizations)
6407 break;
6408 CASE_FLT_FN (BUILT_IN_ISINF):
6409 CASE_FLT_FN (BUILT_IN_FINITE):
6410 case BUILT_IN_ISFINITE:
6411 case BUILT_IN_ISNORMAL:
6412 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6413 if (target)
6414 return target;
6415 break;
6417 CASE_FLT_FN (BUILT_IN_LCEIL):
6418 CASE_FLT_FN (BUILT_IN_LLCEIL):
6419 CASE_FLT_FN (BUILT_IN_LFLOOR):
6420 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6421 target = expand_builtin_int_roundingfn (exp, target);
6422 if (target)
6423 return target;
6424 break;
6426 CASE_FLT_FN (BUILT_IN_LRINT):
6427 CASE_FLT_FN (BUILT_IN_LLRINT):
6428 CASE_FLT_FN (BUILT_IN_LROUND):
6429 CASE_FLT_FN (BUILT_IN_LLROUND):
6430 target = expand_builtin_int_roundingfn_2 (exp, target);
6431 if (target)
6432 return target;
6433 break;
6435 CASE_FLT_FN (BUILT_IN_POW):
6436 target = expand_builtin_pow (exp, target, subtarget);
6437 if (target)
6438 return target;
6439 break;
6441 CASE_FLT_FN (BUILT_IN_POWI):
6442 target = expand_builtin_powi (exp, target, subtarget);
6443 if (target)
6444 return target;
6445 break;
6447 CASE_FLT_FN (BUILT_IN_ATAN2):
6448 CASE_FLT_FN (BUILT_IN_LDEXP):
6449 CASE_FLT_FN (BUILT_IN_SCALB):
6450 CASE_FLT_FN (BUILT_IN_SCALBN):
6451 CASE_FLT_FN (BUILT_IN_SCALBLN):
6452 if (! flag_unsafe_math_optimizations)
6453 break;
6455 CASE_FLT_FN (BUILT_IN_FMOD):
6456 CASE_FLT_FN (BUILT_IN_REMAINDER):
6457 CASE_FLT_FN (BUILT_IN_DREM):
6458 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6459 if (target)
6460 return target;
6461 break;
6463 CASE_FLT_FN (BUILT_IN_CEXPI):
6464 target = expand_builtin_cexpi (exp, target, subtarget);
6465 gcc_assert (target);
6466 return target;
6468 CASE_FLT_FN (BUILT_IN_SIN):
6469 CASE_FLT_FN (BUILT_IN_COS):
6470 if (! flag_unsafe_math_optimizations)
6471 break;
6472 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6473 if (target)
6474 return target;
6475 break;
6477 CASE_FLT_FN (BUILT_IN_SINCOS):
6478 if (! flag_unsafe_math_optimizations)
6479 break;
6480 target = expand_builtin_sincos (exp);
6481 if (target)
6482 return target;
6483 break;
6485 case BUILT_IN_APPLY_ARGS:
6486 return expand_builtin_apply_args ();
6488 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6489 FUNCTION with a copy of the parameters described by
6490 ARGUMENTS, and ARGSIZE. It returns a block of memory
6491 allocated on the stack into which is stored all the registers
6492 that might possibly be used for returning the result of a
6493 function. ARGUMENTS is the value returned by
6494 __builtin_apply_args. ARGSIZE is the number of bytes of
6495 arguments that must be copied. ??? How should this value be
6496 computed? We'll also need a safe worst case value for varargs
6497 functions. */
6498 case BUILT_IN_APPLY:
6499 if (!validate_arglist (exp, POINTER_TYPE,
6500 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6501 && !validate_arglist (exp, REFERENCE_TYPE,
6502 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6503 return const0_rtx;
6504 else
6506 rtx ops[3];
6508 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6509 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6510 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6512 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6515 /* __builtin_return (RESULT) causes the function to return the
6516 value described by RESULT. RESULT is address of the block of
6517 memory returned by __builtin_apply. */
6518 case BUILT_IN_RETURN:
6519 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6520 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6521 return const0_rtx;
6523 case BUILT_IN_SAVEREGS:
6524 return expand_builtin_saveregs ();
6526 case BUILT_IN_ARGS_INFO:
6527 return expand_builtin_args_info (exp);
6529 case BUILT_IN_VA_ARG_PACK:
6530 /* All valid uses of __builtin_va_arg_pack () are removed during
6531 inlining. */
6532 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6533 return const0_rtx;
6535 case BUILT_IN_VA_ARG_PACK_LEN:
6536 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6537 inlining. */
6538 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6539 return const0_rtx;
6541 /* Return the address of the first anonymous stack arg. */
6542 case BUILT_IN_NEXT_ARG:
6543 if (fold_builtin_next_arg (exp, false))
6544 return const0_rtx;
6545 return expand_builtin_next_arg ();
6547 case BUILT_IN_CLEAR_CACHE:
6548 target = expand_builtin___clear_cache (exp);
6549 if (target)
6550 return target;
6551 break;
6553 case BUILT_IN_CLASSIFY_TYPE:
6554 return expand_builtin_classify_type (exp);
6556 case BUILT_IN_CONSTANT_P:
6557 return const0_rtx;
6559 case BUILT_IN_FRAME_ADDRESS:
6560 case BUILT_IN_RETURN_ADDRESS:
6561 return expand_builtin_frame_address (fndecl, exp);
6563 /* Returns the address of the area where the structure is returned.
6564 0 otherwise. */
6565 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6566 if (call_expr_nargs (exp) != 0
6567 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6568 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6569 return const0_rtx;
6570 else
6571 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6573 case BUILT_IN_ALLOCA:
6574 target = expand_builtin_alloca (exp, target);
6575 if (target)
6576 return target;
6577 break;
6579 case BUILT_IN_STACK_SAVE:
6580 return expand_stack_save ();
6582 case BUILT_IN_STACK_RESTORE:
6583 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6584 return const0_rtx;
6586 case BUILT_IN_BSWAP32:
6587 case BUILT_IN_BSWAP64:
6588 target = expand_builtin_bswap (exp, target, subtarget);
6590 if (target)
6591 return target;
6592 break;
6594 CASE_INT_FN (BUILT_IN_FFS):
6595 case BUILT_IN_FFSIMAX:
6596 target = expand_builtin_unop (target_mode, exp, target,
6597 subtarget, ffs_optab);
6598 if (target)
6599 return target;
6600 break;
6602 CASE_INT_FN (BUILT_IN_CLZ):
6603 case BUILT_IN_CLZIMAX:
6604 target = expand_builtin_unop (target_mode, exp, target,
6605 subtarget, clz_optab);
6606 if (target)
6607 return target;
6608 break;
6610 CASE_INT_FN (BUILT_IN_CTZ):
6611 case BUILT_IN_CTZIMAX:
6612 target = expand_builtin_unop (target_mode, exp, target,
6613 subtarget, ctz_optab);
6614 if (target)
6615 return target;
6616 break;
6618 CASE_INT_FN (BUILT_IN_POPCOUNT):
6619 case BUILT_IN_POPCOUNTIMAX:
6620 target = expand_builtin_unop (target_mode, exp, target,
6621 subtarget, popcount_optab);
6622 if (target)
6623 return target;
6624 break;
6626 CASE_INT_FN (BUILT_IN_PARITY):
6627 case BUILT_IN_PARITYIMAX:
6628 target = expand_builtin_unop (target_mode, exp, target,
6629 subtarget, parity_optab);
6630 if (target)
6631 return target;
6632 break;
6634 case BUILT_IN_STRLEN:
6635 target = expand_builtin_strlen (exp, target, target_mode);
6636 if (target)
6637 return target;
6638 break;
6640 case BUILT_IN_STRCPY:
6641 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6642 if (target)
6643 return target;
6644 break;
6646 case BUILT_IN_STRNCPY:
6647 target = expand_builtin_strncpy (exp, target, mode);
6648 if (target)
6649 return target;
6650 break;
6652 case BUILT_IN_STPCPY:
6653 target = expand_builtin_stpcpy (exp, target, mode);
6654 if (target)
6655 return target;
6656 break;
6658 case BUILT_IN_STRCAT:
6659 target = expand_builtin_strcat (fndecl, exp, target, mode);
6660 if (target)
6661 return target;
6662 break;
6664 case BUILT_IN_STRNCAT:
6665 target = expand_builtin_strncat (exp, target, mode);
6666 if (target)
6667 return target;
6668 break;
6670 case BUILT_IN_STRSPN:
6671 target = expand_builtin_strspn (exp, target, mode);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_STRCSPN:
6677 target = expand_builtin_strcspn (exp, target, mode);
6678 if (target)
6679 return target;
6680 break;
6682 case BUILT_IN_STRSTR:
6683 target = expand_builtin_strstr (exp, target, mode);
6684 if (target)
6685 return target;
6686 break;
6688 case BUILT_IN_STRPBRK:
6689 target = expand_builtin_strpbrk (exp, target, mode);
6690 if (target)
6691 return target;
6692 break;
6694 case BUILT_IN_INDEX:
6695 case BUILT_IN_STRCHR:
6696 target = expand_builtin_strchr (exp, target, mode);
6697 if (target)
6698 return target;
6699 break;
6701 case BUILT_IN_RINDEX:
6702 case BUILT_IN_STRRCHR:
6703 target = expand_builtin_strrchr (exp, target, mode);
6704 if (target)
6705 return target;
6706 break;
6708 case BUILT_IN_MEMCPY:
6709 target = expand_builtin_memcpy (exp, target, mode);
6710 if (target)
6711 return target;
6712 break;
6714 case BUILT_IN_MEMPCPY:
6715 target = expand_builtin_mempcpy (exp, target, mode);
6716 if (target)
6717 return target;
6718 break;
6720 case BUILT_IN_MEMMOVE:
6721 target = expand_builtin_memmove (exp, target, mode, ignore);
6722 if (target)
6723 return target;
6724 break;
6726 case BUILT_IN_BCOPY:
6727 target = expand_builtin_bcopy (exp, ignore);
6728 if (target)
6729 return target;
6730 break;
6732 case BUILT_IN_MEMSET:
6733 target = expand_builtin_memset (exp, target, mode);
6734 if (target)
6735 return target;
6736 break;
6738 case BUILT_IN_BZERO:
6739 target = expand_builtin_bzero (exp);
6740 if (target)
6741 return target;
6742 break;
6744 case BUILT_IN_STRCMP:
6745 target = expand_builtin_strcmp (exp, target, mode);
6746 if (target)
6747 return target;
6748 break;
6750 case BUILT_IN_STRNCMP:
6751 target = expand_builtin_strncmp (exp, target, mode);
6752 if (target)
6753 return target;
6754 break;
6756 case BUILT_IN_MEMCHR:
6757 target = expand_builtin_memchr (exp, target, mode);
6758 if (target)
6759 return target;
6760 break;
6762 case BUILT_IN_BCMP:
6763 case BUILT_IN_MEMCMP:
6764 target = expand_builtin_memcmp (exp, target, mode);
6765 if (target)
6766 return target;
6767 break;
6769 case BUILT_IN_SETJMP:
6770 /* This should have been lowered to the builtins below. */
6771 gcc_unreachable ();
6773 case BUILT_IN_SETJMP_SETUP:
6774 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6775 and the receiver label. */
6776 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6778 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6779 VOIDmode, EXPAND_NORMAL);
6780 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6781 rtx label_r = label_rtx (label);
6783 /* This is copied from the handling of non-local gotos. */
6784 expand_builtin_setjmp_setup (buf_addr, label_r);
6785 nonlocal_goto_handler_labels
6786 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6787 nonlocal_goto_handler_labels);
6788 /* ??? Do not let expand_label treat us as such since we would
6789 not want to be both on the list of non-local labels and on
6790 the list of forced labels. */
6791 FORCED_LABEL (label) = 0;
6792 return const0_rtx;
6794 break;
6796 case BUILT_IN_SETJMP_DISPATCHER:
6797 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6798 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6800 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6801 rtx label_r = label_rtx (label);
6803 /* Remove the dispatcher label from the list of non-local labels
6804 since the receiver labels have been added to it above. */
6805 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6806 return const0_rtx;
6808 break;
6810 case BUILT_IN_SETJMP_RECEIVER:
6811 /* __builtin_setjmp_receiver is passed the receiver label. */
6812 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6814 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6815 rtx label_r = label_rtx (label);
6817 expand_builtin_setjmp_receiver (label_r);
6818 return const0_rtx;
6820 break;
6822 /* __builtin_longjmp is passed a pointer to an array of five words.
6823 It's similar to the C library longjmp function but works with
6824 __builtin_setjmp above. */
6825 case BUILT_IN_LONGJMP:
6826 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6828 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6829 VOIDmode, EXPAND_NORMAL);
6830 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6832 if (value != const1_rtx)
6834 error ("%<__builtin_longjmp%> second argument must be 1");
6835 return const0_rtx;
6838 expand_builtin_longjmp (buf_addr, value);
6839 return const0_rtx;
6841 break;
6843 case BUILT_IN_NONLOCAL_GOTO:
6844 target = expand_builtin_nonlocal_goto (exp);
6845 if (target)
6846 return target;
6847 break;
6849 /* This updates the setjmp buffer that is its argument with the value
6850 of the current stack pointer. */
6851 case BUILT_IN_UPDATE_SETJMP_BUF:
6852 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6854 rtx buf_addr
6855 = expand_normal (CALL_EXPR_ARG (exp, 0));
6857 expand_builtin_update_setjmp_buf (buf_addr);
6858 return const0_rtx;
6860 break;
6862 case BUILT_IN_TRAP:
6863 expand_builtin_trap ();
6864 return const0_rtx;
6866 case BUILT_IN_UNREACHABLE:
6867 expand_builtin_unreachable ();
6868 return const0_rtx;
6870 case BUILT_IN_PRINTF:
6871 target = expand_builtin_printf (exp, target, mode, false);
6872 if (target)
6873 return target;
6874 break;
6876 case BUILT_IN_PRINTF_UNLOCKED:
6877 target = expand_builtin_printf (exp, target, mode, true);
6878 if (target)
6879 return target;
6880 break;
6882 case BUILT_IN_FPUTS:
6883 target = expand_builtin_fputs (exp, target, false);
6884 if (target)
6885 return target;
6886 break;
6887 case BUILT_IN_FPUTS_UNLOCKED:
6888 target = expand_builtin_fputs (exp, target, true);
6889 if (target)
6890 return target;
6891 break;
6893 case BUILT_IN_FPRINTF:
6894 target = expand_builtin_fprintf (exp, target, mode, false);
6895 if (target)
6896 return target;
6897 break;
6899 case BUILT_IN_FPRINTF_UNLOCKED:
6900 target = expand_builtin_fprintf (exp, target, mode, true);
6901 if (target)
6902 return target;
6903 break;
6905 case BUILT_IN_SPRINTF:
6906 target = expand_builtin_sprintf (exp, target, mode);
6907 if (target)
6908 return target;
6909 break;
6911 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6912 case BUILT_IN_SIGNBITD32:
6913 case BUILT_IN_SIGNBITD64:
6914 case BUILT_IN_SIGNBITD128:
6915 target = expand_builtin_signbit (exp, target);
6916 if (target)
6917 return target;
6918 break;
6920 /* Various hooks for the DWARF 2 __throw routine. */
6921 case BUILT_IN_UNWIND_INIT:
6922 expand_builtin_unwind_init ();
6923 return const0_rtx;
6924 case BUILT_IN_DWARF_CFA:
6925 return virtual_cfa_rtx;
6926 #ifdef DWARF2_UNWIND_INFO
6927 case BUILT_IN_DWARF_SP_COLUMN:
6928 return expand_builtin_dwarf_sp_column ();
6929 case BUILT_IN_INIT_DWARF_REG_SIZES:
6930 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6931 return const0_rtx;
6932 #endif
6933 case BUILT_IN_FROB_RETURN_ADDR:
6934 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6935 case BUILT_IN_EXTRACT_RETURN_ADDR:
6936 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6937 case BUILT_IN_EH_RETURN:
6938 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6939 CALL_EXPR_ARG (exp, 1));
6940 return const0_rtx;
6941 #ifdef EH_RETURN_DATA_REGNO
6942 case BUILT_IN_EH_RETURN_DATA_REGNO:
6943 return expand_builtin_eh_return_data_regno (exp);
6944 #endif
6945 case BUILT_IN_EXTEND_POINTER:
6946 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6948 case BUILT_IN_VA_START:
6949 return expand_builtin_va_start (exp);
6950 case BUILT_IN_VA_END:
6951 return expand_builtin_va_end (exp);
6952 case BUILT_IN_VA_COPY:
6953 return expand_builtin_va_copy (exp);
6954 case BUILT_IN_EXPECT:
6955 return expand_builtin_expect (exp, target);
6956 case BUILT_IN_PREFETCH:
6957 expand_builtin_prefetch (exp);
6958 return const0_rtx;
6960 case BUILT_IN_PROFILE_FUNC_ENTER:
6961 return expand_builtin_profile_func (false);
6962 case BUILT_IN_PROFILE_FUNC_EXIT:
6963 return expand_builtin_profile_func (true);
6965 case BUILT_IN_INIT_TRAMPOLINE:
6966 return expand_builtin_init_trampoline (exp);
6967 case BUILT_IN_ADJUST_TRAMPOLINE:
6968 return expand_builtin_adjust_trampoline (exp);
6970 case BUILT_IN_FORK:
6971 case BUILT_IN_EXECL:
6972 case BUILT_IN_EXECV:
6973 case BUILT_IN_EXECLP:
6974 case BUILT_IN_EXECLE:
6975 case BUILT_IN_EXECVP:
6976 case BUILT_IN_EXECVE:
6977 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6978 if (target)
6979 return target;
6980 break;
6982 case BUILT_IN_FETCH_AND_ADD_1:
6983 case BUILT_IN_FETCH_AND_ADD_2:
6984 case BUILT_IN_FETCH_AND_ADD_4:
6985 case BUILT_IN_FETCH_AND_ADD_8:
6986 case BUILT_IN_FETCH_AND_ADD_16:
6987 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6988 target = expand_builtin_sync_operation (mode, exp, PLUS,
6989 false, target, ignore);
6990 if (target)
6991 return target;
6992 break;
6994 case BUILT_IN_FETCH_AND_SUB_1:
6995 case BUILT_IN_FETCH_AND_SUB_2:
6996 case BUILT_IN_FETCH_AND_SUB_4:
6997 case BUILT_IN_FETCH_AND_SUB_8:
6998 case BUILT_IN_FETCH_AND_SUB_16:
6999 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
7000 target = expand_builtin_sync_operation (mode, exp, MINUS,
7001 false, target, ignore);
7002 if (target)
7003 return target;
7004 break;
7006 case BUILT_IN_FETCH_AND_OR_1:
7007 case BUILT_IN_FETCH_AND_OR_2:
7008 case BUILT_IN_FETCH_AND_OR_4:
7009 case BUILT_IN_FETCH_AND_OR_8:
7010 case BUILT_IN_FETCH_AND_OR_16:
7011 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
7012 target = expand_builtin_sync_operation (mode, exp, IOR,
7013 false, target, ignore);
7014 if (target)
7015 return target;
7016 break;
7018 case BUILT_IN_FETCH_AND_AND_1:
7019 case BUILT_IN_FETCH_AND_AND_2:
7020 case BUILT_IN_FETCH_AND_AND_4:
7021 case BUILT_IN_FETCH_AND_AND_8:
7022 case BUILT_IN_FETCH_AND_AND_16:
7023 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
7024 target = expand_builtin_sync_operation (mode, exp, AND,
7025 false, target, ignore);
7026 if (target)
7027 return target;
7028 break;
7030 case BUILT_IN_FETCH_AND_XOR_1:
7031 case BUILT_IN_FETCH_AND_XOR_2:
7032 case BUILT_IN_FETCH_AND_XOR_4:
7033 case BUILT_IN_FETCH_AND_XOR_8:
7034 case BUILT_IN_FETCH_AND_XOR_16:
7035 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
7036 target = expand_builtin_sync_operation (mode, exp, XOR,
7037 false, target, ignore);
7038 if (target)
7039 return target;
7040 break;
7042 case BUILT_IN_FETCH_AND_NAND_1:
7043 case BUILT_IN_FETCH_AND_NAND_2:
7044 case BUILT_IN_FETCH_AND_NAND_4:
7045 case BUILT_IN_FETCH_AND_NAND_8:
7046 case BUILT_IN_FETCH_AND_NAND_16:
7047 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
7048 target = expand_builtin_sync_operation (mode, exp, NOT,
7049 false, target, ignore);
7050 if (target)
7051 return target;
7052 break;
7054 case BUILT_IN_ADD_AND_FETCH_1:
7055 case BUILT_IN_ADD_AND_FETCH_2:
7056 case BUILT_IN_ADD_AND_FETCH_4:
7057 case BUILT_IN_ADD_AND_FETCH_8:
7058 case BUILT_IN_ADD_AND_FETCH_16:
7059 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7060 target = expand_builtin_sync_operation (mode, exp, PLUS,
7061 true, target, ignore);
7062 if (target)
7063 return target;
7064 break;
7066 case BUILT_IN_SUB_AND_FETCH_1:
7067 case BUILT_IN_SUB_AND_FETCH_2:
7068 case BUILT_IN_SUB_AND_FETCH_4:
7069 case BUILT_IN_SUB_AND_FETCH_8:
7070 case BUILT_IN_SUB_AND_FETCH_16:
7071 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7072 target = expand_builtin_sync_operation (mode, exp, MINUS,
7073 true, target, ignore);
7074 if (target)
7075 return target;
7076 break;
7078 case BUILT_IN_OR_AND_FETCH_1:
7079 case BUILT_IN_OR_AND_FETCH_2:
7080 case BUILT_IN_OR_AND_FETCH_4:
7081 case BUILT_IN_OR_AND_FETCH_8:
7082 case BUILT_IN_OR_AND_FETCH_16:
7083 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7084 target = expand_builtin_sync_operation (mode, exp, IOR,
7085 true, target, ignore);
7086 if (target)
7087 return target;
7088 break;
7090 case BUILT_IN_AND_AND_FETCH_1:
7091 case BUILT_IN_AND_AND_FETCH_2:
7092 case BUILT_IN_AND_AND_FETCH_4:
7093 case BUILT_IN_AND_AND_FETCH_8:
7094 case BUILT_IN_AND_AND_FETCH_16:
7095 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7096 target = expand_builtin_sync_operation (mode, exp, AND,
7097 true, target, ignore);
7098 if (target)
7099 return target;
7100 break;
7102 case BUILT_IN_XOR_AND_FETCH_1:
7103 case BUILT_IN_XOR_AND_FETCH_2:
7104 case BUILT_IN_XOR_AND_FETCH_4:
7105 case BUILT_IN_XOR_AND_FETCH_8:
7106 case BUILT_IN_XOR_AND_FETCH_16:
7107 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7108 target = expand_builtin_sync_operation (mode, exp, XOR,
7109 true, target, ignore);
7110 if (target)
7111 return target;
7112 break;
7114 case BUILT_IN_NAND_AND_FETCH_1:
7115 case BUILT_IN_NAND_AND_FETCH_2:
7116 case BUILT_IN_NAND_AND_FETCH_4:
7117 case BUILT_IN_NAND_AND_FETCH_8:
7118 case BUILT_IN_NAND_AND_FETCH_16:
7119 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7120 target = expand_builtin_sync_operation (mode, exp, NOT,
7121 true, target, ignore);
7122 if (target)
7123 return target;
7124 break;
7126 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7127 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7128 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7129 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7130 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7131 if (mode == VOIDmode)
7132 mode = TYPE_MODE (boolean_type_node);
7133 if (!target || !register_operand (target, mode))
7134 target = gen_reg_rtx (mode);
7136 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7137 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7138 if (target)
7139 return target;
7140 break;
7142 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7143 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7144 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7145 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7146 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7147 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7148 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7149 if (target)
7150 return target;
7151 break;
7153 case BUILT_IN_LOCK_TEST_AND_SET_1:
7154 case BUILT_IN_LOCK_TEST_AND_SET_2:
7155 case BUILT_IN_LOCK_TEST_AND_SET_4:
7156 case BUILT_IN_LOCK_TEST_AND_SET_8:
7157 case BUILT_IN_LOCK_TEST_AND_SET_16:
7158 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7159 target = expand_builtin_lock_test_and_set (mode, exp, target);
7160 if (target)
7161 return target;
7162 break;
7164 case BUILT_IN_LOCK_RELEASE_1:
7165 case BUILT_IN_LOCK_RELEASE_2:
7166 case BUILT_IN_LOCK_RELEASE_4:
7167 case BUILT_IN_LOCK_RELEASE_8:
7168 case BUILT_IN_LOCK_RELEASE_16:
7169 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7170 expand_builtin_lock_release (mode, exp);
7171 return const0_rtx;
7173 case BUILT_IN_SYNCHRONIZE:
7174 expand_builtin_synchronize ();
7175 return const0_rtx;
7177 case BUILT_IN_OBJECT_SIZE:
7178 return expand_builtin_object_size (exp);
7180 case BUILT_IN_MEMCPY_CHK:
7181 case BUILT_IN_MEMPCPY_CHK:
7182 case BUILT_IN_MEMMOVE_CHK:
7183 case BUILT_IN_MEMSET_CHK:
7184 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7185 if (target)
7186 return target;
7187 break;
7189 case BUILT_IN_STRCPY_CHK:
7190 case BUILT_IN_STPCPY_CHK:
7191 case BUILT_IN_STRNCPY_CHK:
7192 case BUILT_IN_STRCAT_CHK:
7193 case BUILT_IN_STRNCAT_CHK:
7194 case BUILT_IN_SNPRINTF_CHK:
7195 case BUILT_IN_VSNPRINTF_CHK:
7196 maybe_emit_chk_warning (exp, fcode);
7197 break;
7199 case BUILT_IN_SPRINTF_CHK:
7200 case BUILT_IN_VSPRINTF_CHK:
7201 maybe_emit_sprintf_chk_warning (exp, fcode);
7202 break;
7204 case BUILT_IN_FREE:
7205 maybe_emit_free_warning (exp);
7206 break;
7208 default: /* just do library call, if unknown builtin */
7209 break;
7212 /* The switch statement above can drop through to cause the function
7213 to be called normally. */
7214 return expand_call (exp, target, ignore);
7217 /* Determine whether a tree node represents a call to a built-in
7218 function. If the tree T is a call to a built-in function with
7219 the right number of arguments of the appropriate types, return
7220 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7221 Otherwise the return value is END_BUILTINS. */
7223 enum built_in_function
7224 builtin_mathfn_code (const_tree t)
7226 const_tree fndecl, arg, parmlist;
7227 const_tree argtype, parmtype;
7228 const_call_expr_arg_iterator iter;
7230 if (TREE_CODE (t) != CALL_EXPR
7231 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7232 return END_BUILTINS;
7234 fndecl = get_callee_fndecl (t);
7235 if (fndecl == NULL_TREE
7236 || TREE_CODE (fndecl) != FUNCTION_DECL
7237 || ! DECL_BUILT_IN (fndecl)
7238 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7239 return END_BUILTINS;
7241 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7242 init_const_call_expr_arg_iterator (t, &iter);
7243 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7245 /* If a function doesn't take a variable number of arguments,
7246 the last element in the list will have type `void'. */
7247 parmtype = TREE_VALUE (parmlist);
7248 if (VOID_TYPE_P (parmtype))
7250 if (more_const_call_expr_args_p (&iter))
7251 return END_BUILTINS;
7252 return DECL_FUNCTION_CODE (fndecl);
7255 if (! more_const_call_expr_args_p (&iter))
7256 return END_BUILTINS;
7258 arg = next_const_call_expr_arg (&iter);
7259 argtype = TREE_TYPE (arg);
7261 if (SCALAR_FLOAT_TYPE_P (parmtype))
7263 if (! SCALAR_FLOAT_TYPE_P (argtype))
7264 return END_BUILTINS;
7266 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7268 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7269 return END_BUILTINS;
7271 else if (POINTER_TYPE_P (parmtype))
7273 if (! POINTER_TYPE_P (argtype))
7274 return END_BUILTINS;
7276 else if (INTEGRAL_TYPE_P (parmtype))
7278 if (! INTEGRAL_TYPE_P (argtype))
7279 return END_BUILTINS;
7281 else
7282 return END_BUILTINS;
7285 /* Variable-length argument list. */
7286 return DECL_FUNCTION_CODE (fndecl);
7289 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7290 evaluate to a constant. */
7292 static tree
7293 fold_builtin_constant_p (tree arg)
7295 /* We return 1 for a numeric type that's known to be a constant
7296 value at compile-time or for an aggregate type that's a
7297 literal constant. */
7298 STRIP_NOPS (arg);
7300 /* If we know this is a constant, emit the constant of one. */
7301 if (CONSTANT_CLASS_P (arg)
7302 || (TREE_CODE (arg) == CONSTRUCTOR
7303 && TREE_CONSTANT (arg)))
7304 return integer_one_node;
7305 if (TREE_CODE (arg) == ADDR_EXPR)
7307 tree op = TREE_OPERAND (arg, 0);
7308 if (TREE_CODE (op) == STRING_CST
7309 || (TREE_CODE (op) == ARRAY_REF
7310 && integer_zerop (TREE_OPERAND (op, 1))
7311 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7312 return integer_one_node;
7315 /* If this expression has side effects, show we don't know it to be a
7316 constant. Likewise if it's a pointer or aggregate type since in
7317 those case we only want literals, since those are only optimized
7318 when generating RTL, not later.
7319 And finally, if we are compiling an initializer, not code, we
7320 need to return a definite result now; there's not going to be any
7321 more optimization done. */
7322 if (TREE_SIDE_EFFECTS (arg)
7323 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7324 || POINTER_TYPE_P (TREE_TYPE (arg))
7325 || cfun == 0
7326 || folding_initializer)
7327 return integer_zero_node;
7329 return NULL_TREE;
7332 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7333 return it as a truthvalue. */
7335 static tree
7336 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7338 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7340 fn = built_in_decls[BUILT_IN_EXPECT];
7341 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7342 ret_type = TREE_TYPE (TREE_TYPE (fn));
7343 pred_type = TREE_VALUE (arg_types);
7344 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7346 pred = fold_convert_loc (loc, pred_type, pred);
7347 expected = fold_convert_loc (loc, expected_type, expected);
7348 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7350 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7351 build_int_cst (ret_type, 0));
7354 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7355 NULL_TREE if no simplification is possible. */
7357 static tree
7358 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7360 tree inner, fndecl;
7361 enum tree_code code;
7363 /* If this is a builtin_expect within a builtin_expect keep the
7364 inner one. See through a comparison against a constant. It
7365 might have been added to create a thruthvalue. */
7366 inner = arg0;
7367 if (COMPARISON_CLASS_P (inner)
7368 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7369 inner = TREE_OPERAND (inner, 0);
7371 if (TREE_CODE (inner) == CALL_EXPR
7372 && (fndecl = get_callee_fndecl (inner))
7373 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7374 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7375 return arg0;
7377 /* Distribute the expected value over short-circuiting operators.
7378 See through the cast from truthvalue_type_node to long. */
7379 inner = arg0;
7380 while (TREE_CODE (inner) == NOP_EXPR
7381 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7382 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7383 inner = TREE_OPERAND (inner, 0);
7385 code = TREE_CODE (inner);
7386 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7388 tree op0 = TREE_OPERAND (inner, 0);
7389 tree op1 = TREE_OPERAND (inner, 1);
7391 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7392 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7393 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7395 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7398 /* If the argument isn't invariant then there's nothing else we can do. */
7399 if (!TREE_CONSTANT (arg0))
7400 return NULL_TREE;
7402 /* If we expect that a comparison against the argument will fold to
7403 a constant return the constant. In practice, this means a true
7404 constant or the address of a non-weak symbol. */
7405 inner = arg0;
7406 STRIP_NOPS (inner);
7407 if (TREE_CODE (inner) == ADDR_EXPR)
7411 inner = TREE_OPERAND (inner, 0);
7413 while (TREE_CODE (inner) == COMPONENT_REF
7414 || TREE_CODE (inner) == ARRAY_REF);
7415 if ((TREE_CODE (inner) == VAR_DECL
7416 || TREE_CODE (inner) == FUNCTION_DECL)
7417 && DECL_WEAK (inner))
7418 return NULL_TREE;
7421 /* Otherwise, ARG0 already has the proper type for the return value. */
7422 return arg0;
7425 /* Fold a call to __builtin_classify_type with argument ARG. */
7427 static tree
7428 fold_builtin_classify_type (tree arg)
7430 if (arg == 0)
7431 return build_int_cst (NULL_TREE, no_type_class);
7433 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7436 /* Fold a call to __builtin_strlen with argument ARG. */
7438 static tree
7439 fold_builtin_strlen (location_t loc, tree arg)
7441 if (!validate_arg (arg, POINTER_TYPE))
7442 return NULL_TREE;
7443 else
7445 tree len = c_strlen (arg, 0);
7447 if (len)
7449 /* Convert from the internal "sizetype" type to "size_t". */
7450 if (size_type_node)
7451 len = fold_convert_loc (loc, size_type_node, len);
7452 return len;
7455 return NULL_TREE;
7459 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7461 static tree
7462 fold_builtin_inf (location_t loc, tree type, int warn)
7464 REAL_VALUE_TYPE real;
7466 /* __builtin_inff is intended to be usable to define INFINITY on all
7467 targets. If an infinity is not available, INFINITY expands "to a
7468 positive constant of type float that overflows at translation
7469 time", footnote "In this case, using INFINITY will violate the
7470 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7471 Thus we pedwarn to ensure this constraint violation is
7472 diagnosed. */
7473 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7474 pedwarn (loc, 0, "target format does not support infinity");
7476 real_inf (&real);
7477 return build_real (type, real);
7480 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7482 static tree
7483 fold_builtin_nan (tree arg, tree type, int quiet)
7485 REAL_VALUE_TYPE real;
7486 const char *str;
7488 if (!validate_arg (arg, POINTER_TYPE))
7489 return NULL_TREE;
7490 str = c_getstr (arg);
7491 if (!str)
7492 return NULL_TREE;
7494 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7495 return NULL_TREE;
7497 return build_real (type, real);
7500 /* Return true if the floating point expression T has an integer value.
7501 We also allow +Inf, -Inf and NaN to be considered integer values. */
7503 static bool
7504 integer_valued_real_p (tree t)
7506 switch (TREE_CODE (t))
7508 case FLOAT_EXPR:
7509 return true;
7511 case ABS_EXPR:
7512 case SAVE_EXPR:
7513 return integer_valued_real_p (TREE_OPERAND (t, 0));
7515 case COMPOUND_EXPR:
7516 case MODIFY_EXPR:
7517 case BIND_EXPR:
7518 return integer_valued_real_p (TREE_OPERAND (t, 1));
7520 case PLUS_EXPR:
7521 case MINUS_EXPR:
7522 case MULT_EXPR:
7523 case MIN_EXPR:
7524 case MAX_EXPR:
7525 return integer_valued_real_p (TREE_OPERAND (t, 0))
7526 && integer_valued_real_p (TREE_OPERAND (t, 1));
7528 case COND_EXPR:
7529 return integer_valued_real_p (TREE_OPERAND (t, 1))
7530 && integer_valued_real_p (TREE_OPERAND (t, 2));
7532 case REAL_CST:
7533 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7535 case NOP_EXPR:
7537 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7538 if (TREE_CODE (type) == INTEGER_TYPE)
7539 return true;
7540 if (TREE_CODE (type) == REAL_TYPE)
7541 return integer_valued_real_p (TREE_OPERAND (t, 0));
7542 break;
7545 case CALL_EXPR:
7546 switch (builtin_mathfn_code (t))
7548 CASE_FLT_FN (BUILT_IN_CEIL):
7549 CASE_FLT_FN (BUILT_IN_FLOOR):
7550 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7551 CASE_FLT_FN (BUILT_IN_RINT):
7552 CASE_FLT_FN (BUILT_IN_ROUND):
7553 CASE_FLT_FN (BUILT_IN_TRUNC):
7554 return true;
7556 CASE_FLT_FN (BUILT_IN_FMIN):
7557 CASE_FLT_FN (BUILT_IN_FMAX):
7558 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7559 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7561 default:
7562 break;
7564 break;
7566 default:
7567 break;
7569 return false;
7572 /* FNDECL is assumed to be a builtin where truncation can be propagated
7573 across (for instance floor((double)f) == (double)floorf (f).
7574 Do the transformation for a call with argument ARG. */
7576 static tree
7577 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7579 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7581 if (!validate_arg (arg, REAL_TYPE))
7582 return NULL_TREE;
7584 /* Integer rounding functions are idempotent. */
7585 if (fcode == builtin_mathfn_code (arg))
7586 return arg;
7588 /* If argument is already integer valued, and we don't need to worry
7589 about setting errno, there's no need to perform rounding. */
7590 if (! flag_errno_math && integer_valued_real_p (arg))
7591 return arg;
7593 if (optimize)
7595 tree arg0 = strip_float_extensions (arg);
7596 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7597 tree newtype = TREE_TYPE (arg0);
7598 tree decl;
7600 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7601 && (decl = mathfn_built_in (newtype, fcode)))
7602 return fold_convert_loc (loc, ftype,
7603 build_call_expr_loc (loc, decl, 1,
7604 fold_convert_loc (loc,
7605 newtype,
7606 arg0)));
7608 return NULL_TREE;
7611 /* FNDECL is assumed to be builtin which can narrow the FP type of
7612 the argument, for instance lround((double)f) -> lroundf (f).
7613 Do the transformation for a call with argument ARG. */
7615 static tree
7616 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7618 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7620 if (!validate_arg (arg, REAL_TYPE))
7621 return NULL_TREE;
7623 /* If argument is already integer valued, and we don't need to worry
7624 about setting errno, there's no need to perform rounding. */
7625 if (! flag_errno_math && integer_valued_real_p (arg))
7626 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7627 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7629 if (optimize)
7631 tree ftype = TREE_TYPE (arg);
7632 tree arg0 = strip_float_extensions (arg);
7633 tree newtype = TREE_TYPE (arg0);
7634 tree decl;
7636 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7637 && (decl = mathfn_built_in (newtype, fcode)))
7638 return build_call_expr_loc (loc, decl, 1,
7639 fold_convert_loc (loc, newtype, arg0));
7642 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7643 sizeof (long long) == sizeof (long). */
7644 if (TYPE_PRECISION (long_long_integer_type_node)
7645 == TYPE_PRECISION (long_integer_type_node))
7647 tree newfn = NULL_TREE;
7648 switch (fcode)
7650 CASE_FLT_FN (BUILT_IN_LLCEIL):
7651 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7652 break;
7654 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7655 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7656 break;
7658 CASE_FLT_FN (BUILT_IN_LLROUND):
7659 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7660 break;
7662 CASE_FLT_FN (BUILT_IN_LLRINT):
7663 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7664 break;
7666 default:
7667 break;
7670 if (newfn)
7672 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7673 return fold_convert_loc (loc,
7674 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7678 return NULL_TREE;
7681 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7682 return type. Return NULL_TREE if no simplification can be made. */
7684 static tree
7685 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7687 tree res;
7689 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7690 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7691 return NULL_TREE;
7693 /* Calculate the result when the argument is a constant. */
7694 if (TREE_CODE (arg) == COMPLEX_CST
7695 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7696 type, mpfr_hypot)))
7697 return res;
7699 if (TREE_CODE (arg) == COMPLEX_EXPR)
7701 tree real = TREE_OPERAND (arg, 0);
7702 tree imag = TREE_OPERAND (arg, 1);
7704 /* If either part is zero, cabs is fabs of the other. */
7705 if (real_zerop (real))
7706 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7707 if (real_zerop (imag))
7708 return fold_build1_loc (loc, ABS_EXPR, type, real);
7710 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7711 if (flag_unsafe_math_optimizations
7712 && operand_equal_p (real, imag, OEP_PURE_SAME))
7714 const REAL_VALUE_TYPE sqrt2_trunc
7715 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7716 STRIP_NOPS (real);
7717 return fold_build2_loc (loc, MULT_EXPR, type,
7718 fold_build1_loc (loc, ABS_EXPR, type, real),
7719 build_real (type, sqrt2_trunc));
7723 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7724 if (TREE_CODE (arg) == NEGATE_EXPR
7725 || TREE_CODE (arg) == CONJ_EXPR)
7726 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7728 /* Don't do this when optimizing for size. */
7729 if (flag_unsafe_math_optimizations
7730 && optimize && optimize_function_for_speed_p (cfun))
7732 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7734 if (sqrtfn != NULL_TREE)
7736 tree rpart, ipart, result;
7738 arg = builtin_save_expr (arg);
7740 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7741 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7743 rpart = builtin_save_expr (rpart);
7744 ipart = builtin_save_expr (ipart);
7746 result = fold_build2_loc (loc, PLUS_EXPR, type,
7747 fold_build2_loc (loc, MULT_EXPR, type,
7748 rpart, rpart),
7749 fold_build2_loc (loc, MULT_EXPR, type,
7750 ipart, ipart));
7752 return build_call_expr_loc (loc, sqrtfn, 1, result);
7756 return NULL_TREE;
7759 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7760 Return NULL_TREE if no simplification can be made. */
7762 static tree
7763 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7766 enum built_in_function fcode;
7767 tree res;
7769 if (!validate_arg (arg, REAL_TYPE))
7770 return NULL_TREE;
7772 /* Calculate the result when the argument is a constant. */
7773 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7774 return res;
7776 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7777 fcode = builtin_mathfn_code (arg);
7778 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7780 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7781 arg = fold_build2_loc (loc, MULT_EXPR, type,
7782 CALL_EXPR_ARG (arg, 0),
7783 build_real (type, dconsthalf));
7784 return build_call_expr_loc (loc, expfn, 1, arg);
7787 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7788 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7790 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7792 if (powfn)
7794 tree arg0 = CALL_EXPR_ARG (arg, 0);
7795 tree tree_root;
7796 /* The inner root was either sqrt or cbrt. */
7797 /* This was a conditional expression but it triggered a bug
7798 in Sun C 5.5. */
7799 REAL_VALUE_TYPE dconstroot;
7800 if (BUILTIN_SQRT_P (fcode))
7801 dconstroot = dconsthalf;
7802 else
7803 dconstroot = dconst_third ();
7805 /* Adjust for the outer root. */
7806 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7807 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7808 tree_root = build_real (type, dconstroot);
7809 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7813 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7814 if (flag_unsafe_math_optimizations
7815 && (fcode == BUILT_IN_POW
7816 || fcode == BUILT_IN_POWF
7817 || fcode == BUILT_IN_POWL))
7819 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7820 tree arg0 = CALL_EXPR_ARG (arg, 0);
7821 tree arg1 = CALL_EXPR_ARG (arg, 1);
7822 tree narg1;
7823 if (!tree_expr_nonnegative_p (arg0))
7824 arg0 = build1 (ABS_EXPR, type, arg0);
7825 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7826 build_real (type, dconsthalf));
7827 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7830 return NULL_TREE;
7833 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7834 Return NULL_TREE if no simplification can be made. */
7836 static tree
7837 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7839 const enum built_in_function fcode = builtin_mathfn_code (arg);
7840 tree res;
7842 if (!validate_arg (arg, REAL_TYPE))
7843 return NULL_TREE;
7845 /* Calculate the result when the argument is a constant. */
7846 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7847 return res;
7849 if (flag_unsafe_math_optimizations)
7851 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7852 if (BUILTIN_EXPONENT_P (fcode))
7854 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7855 const REAL_VALUE_TYPE third_trunc =
7856 real_value_truncate (TYPE_MODE (type), dconst_third ());
7857 arg = fold_build2_loc (loc, MULT_EXPR, type,
7858 CALL_EXPR_ARG (arg, 0),
7859 build_real (type, third_trunc));
7860 return build_call_expr_loc (loc, expfn, 1, arg);
7863 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7864 if (BUILTIN_SQRT_P (fcode))
7866 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7868 if (powfn)
7870 tree arg0 = CALL_EXPR_ARG (arg, 0);
7871 tree tree_root;
7872 REAL_VALUE_TYPE dconstroot = dconst_third ();
7874 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7875 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7876 tree_root = build_real (type, dconstroot);
7877 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7881 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7882 if (BUILTIN_CBRT_P (fcode))
7884 tree arg0 = CALL_EXPR_ARG (arg, 0);
7885 if (tree_expr_nonnegative_p (arg0))
7887 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7889 if (powfn)
7891 tree tree_root;
7892 REAL_VALUE_TYPE dconstroot;
7894 real_arithmetic (&dconstroot, MULT_EXPR,
7895 dconst_third_ptr (), dconst_third_ptr ());
7896 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7897 tree_root = build_real (type, dconstroot);
7898 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7903 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7904 if (fcode == BUILT_IN_POW
7905 || fcode == BUILT_IN_POWF
7906 || fcode == BUILT_IN_POWL)
7908 tree arg00 = CALL_EXPR_ARG (arg, 0);
7909 tree arg01 = CALL_EXPR_ARG (arg, 1);
7910 if (tree_expr_nonnegative_p (arg00))
7912 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7913 const REAL_VALUE_TYPE dconstroot
7914 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7915 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7916 build_real (type, dconstroot));
7917 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7921 return NULL_TREE;
7924 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7925 TYPE is the type of the return value. Return NULL_TREE if no
7926 simplification can be made. */
7928 static tree
7929 fold_builtin_cos (location_t loc,
7930 tree arg, tree type, tree fndecl)
7932 tree res, narg;
7934 if (!validate_arg (arg, REAL_TYPE))
7935 return NULL_TREE;
7937 /* Calculate the result when the argument is a constant. */
7938 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7939 return res;
7941 /* Optimize cos(-x) into cos (x). */
7942 if ((narg = fold_strip_sign_ops (arg)))
7943 return build_call_expr_loc (loc, fndecl, 1, narg);
7945 return NULL_TREE;
7948 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7949 Return NULL_TREE if no simplification can be made. */
7951 static tree
7952 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7954 if (validate_arg (arg, REAL_TYPE))
7956 tree res, narg;
7958 /* Calculate the result when the argument is a constant. */
7959 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7960 return res;
7962 /* Optimize cosh(-x) into cosh (x). */
7963 if ((narg = fold_strip_sign_ops (arg)))
7964 return build_call_expr_loc (loc, fndecl, 1, narg);
7967 return NULL_TREE;
7970 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7971 argument ARG. TYPE is the type of the return value. Return
7972 NULL_TREE if no simplification can be made. */
7974 static tree
7975 fold_builtin_ccos (location_t loc,
7976 tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7977 bool hyper ATTRIBUTE_UNUSED)
7979 if (validate_arg (arg, COMPLEX_TYPE)
7980 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7982 tree tmp;
7984 #ifdef HAVE_mpc
7985 /* Calculate the result when the argument is a constant. */
7986 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7987 return tmp;
7988 #endif
7990 /* Optimize fn(-x) into fn(x). */
7991 if ((tmp = fold_strip_sign_ops (arg)))
7992 return build_call_expr_loc (loc, fndecl, 1, tmp);
7995 return NULL_TREE;
7998 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7999 Return NULL_TREE if no simplification can be made. */
8001 static tree
8002 fold_builtin_tan (tree arg, tree type)
8004 enum built_in_function fcode;
8005 tree res;
8007 if (!validate_arg (arg, REAL_TYPE))
8008 return NULL_TREE;
8010 /* Calculate the result when the argument is a constant. */
8011 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8012 return res;
8014 /* Optimize tan(atan(x)) = x. */
8015 fcode = builtin_mathfn_code (arg);
8016 if (flag_unsafe_math_optimizations
8017 && (fcode == BUILT_IN_ATAN
8018 || fcode == BUILT_IN_ATANF
8019 || fcode == BUILT_IN_ATANL))
8020 return CALL_EXPR_ARG (arg, 0);
8022 return NULL_TREE;
8025 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8026 NULL_TREE if no simplification can be made. */
8028 static tree
8029 fold_builtin_sincos (location_t loc,
8030 tree arg0, tree arg1, tree arg2)
8032 tree type;
8033 tree res, fn, call;
8035 if (!validate_arg (arg0, REAL_TYPE)
8036 || !validate_arg (arg1, POINTER_TYPE)
8037 || !validate_arg (arg2, POINTER_TYPE))
8038 return NULL_TREE;
8040 type = TREE_TYPE (arg0);
8042 /* Calculate the result when the argument is a constant. */
8043 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8044 return res;
8046 /* Canonicalize sincos to cexpi. */
8047 if (!TARGET_C99_FUNCTIONS)
8048 return NULL_TREE;
8049 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8050 if (!fn)
8051 return NULL_TREE;
8053 call = build_call_expr_loc (loc, fn, 1, arg0);
8054 call = builtin_save_expr (call);
8056 return build2 (COMPOUND_EXPR, void_type_node,
8057 build2 (MODIFY_EXPR, void_type_node,
8058 build_fold_indirect_ref_loc (loc, arg1),
8059 build1 (IMAGPART_EXPR, type, call)),
8060 build2 (MODIFY_EXPR, void_type_node,
8061 build_fold_indirect_ref_loc (loc, arg2),
8062 build1 (REALPART_EXPR, type, call)));
8065 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8066 NULL_TREE if no simplification can be made. */
8068 static tree
8069 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8071 tree rtype;
8072 tree realp, imagp, ifn;
8073 #ifdef HAVE_mpc
8074 tree res;
8075 #endif
8077 if (!validate_arg (arg0, COMPLEX_TYPE)
8078 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8079 return NULL_TREE;
8081 #ifdef HAVE_mpc
8082 /* Calculate the result when the argument is a constant. */
8083 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8084 return res;
8085 #endif
8087 rtype = TREE_TYPE (TREE_TYPE (arg0));
8089 /* In case we can figure out the real part of arg0 and it is constant zero
8090 fold to cexpi. */
8091 if (!TARGET_C99_FUNCTIONS)
8092 return NULL_TREE;
8093 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8094 if (!ifn)
8095 return NULL_TREE;
8097 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8098 && real_zerop (realp))
8100 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8101 return build_call_expr_loc (loc, ifn, 1, narg);
8104 /* In case we can easily decompose real and imaginary parts split cexp
8105 to exp (r) * cexpi (i). */
8106 if (flag_unsafe_math_optimizations
8107 && realp)
8109 tree rfn, rcall, icall;
8111 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8112 if (!rfn)
8113 return NULL_TREE;
8115 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8116 if (!imagp)
8117 return NULL_TREE;
8119 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8120 icall = builtin_save_expr (icall);
8121 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8122 rcall = builtin_save_expr (rcall);
8123 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8124 fold_build2_loc (loc, MULT_EXPR, rtype,
8125 rcall,
8126 fold_build1_loc (loc, REALPART_EXPR,
8127 rtype, icall)),
8128 fold_build2_loc (loc, MULT_EXPR, rtype,
8129 rcall,
8130 fold_build1_loc (loc, IMAGPART_EXPR,
8131 rtype, icall)));
8134 return NULL_TREE;
8137 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8138 Return NULL_TREE if no simplification can be made. */
8140 static tree
8141 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8143 if (!validate_arg (arg, REAL_TYPE))
8144 return NULL_TREE;
8146 /* Optimize trunc of constant value. */
8147 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8149 REAL_VALUE_TYPE r, x;
8150 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8152 x = TREE_REAL_CST (arg);
8153 real_trunc (&r, TYPE_MODE (type), &x);
8154 return build_real (type, r);
8157 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8160 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8161 Return NULL_TREE if no simplification can be made. */
8163 static tree
8164 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8166 if (!validate_arg (arg, REAL_TYPE))
8167 return NULL_TREE;
8169 /* Optimize floor of constant value. */
8170 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8172 REAL_VALUE_TYPE x;
8174 x = TREE_REAL_CST (arg);
8175 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8177 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8178 REAL_VALUE_TYPE r;
8180 real_floor (&r, TYPE_MODE (type), &x);
8181 return build_real (type, r);
8185 /* Fold floor (x) where x is nonnegative to trunc (x). */
8186 if (tree_expr_nonnegative_p (arg))
8188 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8189 if (truncfn)
8190 return build_call_expr_loc (loc, truncfn, 1, arg);
8193 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8196 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8197 Return NULL_TREE if no simplification can be made. */
8199 static tree
8200 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8202 if (!validate_arg (arg, REAL_TYPE))
8203 return NULL_TREE;
8205 /* Optimize ceil of constant value. */
8206 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8208 REAL_VALUE_TYPE x;
8210 x = TREE_REAL_CST (arg);
8211 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8213 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8214 REAL_VALUE_TYPE r;
8216 real_ceil (&r, TYPE_MODE (type), &x);
8217 return build_real (type, r);
8221 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8224 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8225 Return NULL_TREE if no simplification can be made. */
8227 static tree
8228 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8230 if (!validate_arg (arg, REAL_TYPE))
8231 return NULL_TREE;
8233 /* Optimize round of constant value. */
8234 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8236 REAL_VALUE_TYPE x;
8238 x = TREE_REAL_CST (arg);
8239 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8241 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8242 REAL_VALUE_TYPE r;
8244 real_round (&r, TYPE_MODE (type), &x);
8245 return build_real (type, r);
8249 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8252 /* Fold function call to builtin lround, lroundf or lroundl (or the
8253 corresponding long long versions) and other rounding functions. ARG
8254 is the argument to the call. Return NULL_TREE if no simplification
8255 can be made. */
8257 static tree
8258 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8260 if (!validate_arg (arg, REAL_TYPE))
8261 return NULL_TREE;
8263 /* Optimize lround of constant value. */
8264 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8266 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8268 if (real_isfinite (&x))
8270 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8271 tree ftype = TREE_TYPE (arg);
8272 unsigned HOST_WIDE_INT lo2;
8273 HOST_WIDE_INT hi, lo;
8274 REAL_VALUE_TYPE r;
8276 switch (DECL_FUNCTION_CODE (fndecl))
8278 CASE_FLT_FN (BUILT_IN_LFLOOR):
8279 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8280 real_floor (&r, TYPE_MODE (ftype), &x);
8281 break;
8283 CASE_FLT_FN (BUILT_IN_LCEIL):
8284 CASE_FLT_FN (BUILT_IN_LLCEIL):
8285 real_ceil (&r, TYPE_MODE (ftype), &x);
8286 break;
8288 CASE_FLT_FN (BUILT_IN_LROUND):
8289 CASE_FLT_FN (BUILT_IN_LLROUND):
8290 real_round (&r, TYPE_MODE (ftype), &x);
8291 break;
8293 default:
8294 gcc_unreachable ();
8297 REAL_VALUE_TO_INT (&lo, &hi, r);
8298 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8299 return build_int_cst_wide (itype, lo2, hi);
8303 switch (DECL_FUNCTION_CODE (fndecl))
8305 CASE_FLT_FN (BUILT_IN_LFLOOR):
8306 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8307 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8308 if (tree_expr_nonnegative_p (arg))
8309 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8310 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8311 break;
8312 default:;
8315 return fold_fixed_mathfn (loc, fndecl, arg);
8318 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8319 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8320 the argument to the call. Return NULL_TREE if no simplification can
8321 be made. */
8323 static tree
8324 fold_builtin_bitop (tree fndecl, tree arg)
8326 if (!validate_arg (arg, INTEGER_TYPE))
8327 return NULL_TREE;
8329 /* Optimize for constant argument. */
8330 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8332 HOST_WIDE_INT hi, width, result;
8333 unsigned HOST_WIDE_INT lo;
8334 tree type;
8336 type = TREE_TYPE (arg);
8337 width = TYPE_PRECISION (type);
8338 lo = TREE_INT_CST_LOW (arg);
8340 /* Clear all the bits that are beyond the type's precision. */
8341 if (width > HOST_BITS_PER_WIDE_INT)
8343 hi = TREE_INT_CST_HIGH (arg);
8344 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8345 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8347 else
8349 hi = 0;
8350 if (width < HOST_BITS_PER_WIDE_INT)
8351 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8354 switch (DECL_FUNCTION_CODE (fndecl))
8356 CASE_INT_FN (BUILT_IN_FFS):
8357 if (lo != 0)
8358 result = exact_log2 (lo & -lo) + 1;
8359 else if (hi != 0)
8360 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8361 else
8362 result = 0;
8363 break;
8365 CASE_INT_FN (BUILT_IN_CLZ):
8366 if (hi != 0)
8367 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8368 else if (lo != 0)
8369 result = width - floor_log2 (lo) - 1;
8370 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8371 result = width;
8372 break;
8374 CASE_INT_FN (BUILT_IN_CTZ):
8375 if (lo != 0)
8376 result = exact_log2 (lo & -lo);
8377 else if (hi != 0)
8378 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8379 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8380 result = width;
8381 break;
8383 CASE_INT_FN (BUILT_IN_POPCOUNT):
8384 result = 0;
8385 while (lo)
8386 result++, lo &= lo - 1;
8387 while (hi)
8388 result++, hi &= hi - 1;
8389 break;
8391 CASE_INT_FN (BUILT_IN_PARITY):
8392 result = 0;
8393 while (lo)
8394 result++, lo &= lo - 1;
8395 while (hi)
8396 result++, hi &= hi - 1;
8397 result &= 1;
8398 break;
8400 default:
8401 gcc_unreachable ();
8404 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8407 return NULL_TREE;
8410 /* Fold function call to builtin_bswap and the long and long long
8411 variants. Return NULL_TREE if no simplification can be made. */
8412 static tree
8413 fold_builtin_bswap (tree fndecl, tree arg)
8415 if (! validate_arg (arg, INTEGER_TYPE))
8416 return NULL_TREE;
8418 /* Optimize constant value. */
8419 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8421 HOST_WIDE_INT hi, width, r_hi = 0;
8422 unsigned HOST_WIDE_INT lo, r_lo = 0;
8423 tree type;
8425 type = TREE_TYPE (arg);
8426 width = TYPE_PRECISION (type);
8427 lo = TREE_INT_CST_LOW (arg);
8428 hi = TREE_INT_CST_HIGH (arg);
8430 switch (DECL_FUNCTION_CODE (fndecl))
8432 case BUILT_IN_BSWAP32:
8433 case BUILT_IN_BSWAP64:
8435 int s;
8437 for (s = 0; s < width; s += 8)
8439 int d = width - s - 8;
8440 unsigned HOST_WIDE_INT byte;
8442 if (s < HOST_BITS_PER_WIDE_INT)
8443 byte = (lo >> s) & 0xff;
8444 else
8445 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8447 if (d < HOST_BITS_PER_WIDE_INT)
8448 r_lo |= byte << d;
8449 else
8450 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8454 break;
8456 default:
8457 gcc_unreachable ();
8460 if (width < HOST_BITS_PER_WIDE_INT)
8461 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8462 else
8463 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8466 return NULL_TREE;
8469 /* A subroutine of fold_builtin to fold the various logarithmic
8470 functions. Return NULL_TREE if no simplification can me made.
8471 FUNC is the corresponding MPFR logarithm function. */
8473 static tree
8474 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8475 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8477 if (validate_arg (arg, REAL_TYPE))
8479 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8480 tree res;
8481 const enum built_in_function fcode = builtin_mathfn_code (arg);
8483 /* Calculate the result when the argument is a constant. */
8484 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8485 return res;
8487 /* Special case, optimize logN(expN(x)) = x. */
8488 if (flag_unsafe_math_optimizations
8489 && ((func == mpfr_log
8490 && (fcode == BUILT_IN_EXP
8491 || fcode == BUILT_IN_EXPF
8492 || fcode == BUILT_IN_EXPL))
8493 || (func == mpfr_log2
8494 && (fcode == BUILT_IN_EXP2
8495 || fcode == BUILT_IN_EXP2F
8496 || fcode == BUILT_IN_EXP2L))
8497 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8498 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8500 /* Optimize logN(func()) for various exponential functions. We
8501 want to determine the value "x" and the power "exponent" in
8502 order to transform logN(x**exponent) into exponent*logN(x). */
8503 if (flag_unsafe_math_optimizations)
8505 tree exponent = 0, x = 0;
8507 switch (fcode)
8509 CASE_FLT_FN (BUILT_IN_EXP):
8510 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8511 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8512 dconst_e ()));
8513 exponent = CALL_EXPR_ARG (arg, 0);
8514 break;
8515 CASE_FLT_FN (BUILT_IN_EXP2):
8516 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8517 x = build_real (type, dconst2);
8518 exponent = CALL_EXPR_ARG (arg, 0);
8519 break;
8520 CASE_FLT_FN (BUILT_IN_EXP10):
8521 CASE_FLT_FN (BUILT_IN_POW10):
8522 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8524 REAL_VALUE_TYPE dconst10;
8525 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8526 x = build_real (type, dconst10);
8528 exponent = CALL_EXPR_ARG (arg, 0);
8529 break;
8530 CASE_FLT_FN (BUILT_IN_SQRT):
8531 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8532 x = CALL_EXPR_ARG (arg, 0);
8533 exponent = build_real (type, dconsthalf);
8534 break;
8535 CASE_FLT_FN (BUILT_IN_CBRT):
8536 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8537 x = CALL_EXPR_ARG (arg, 0);
8538 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8539 dconst_third ()));
8540 break;
8541 CASE_FLT_FN (BUILT_IN_POW):
8542 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8543 x = CALL_EXPR_ARG (arg, 0);
8544 exponent = CALL_EXPR_ARG (arg, 1);
8545 break;
8546 default:
8547 break;
8550 /* Now perform the optimization. */
8551 if (x && exponent)
8553 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8554 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8559 return NULL_TREE;
8562 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8563 NULL_TREE if no simplification can be made. */
8565 static tree
8566 fold_builtin_hypot (location_t loc, tree fndecl,
8567 tree arg0, tree arg1, tree type)
8569 tree res, narg0, narg1;
8571 if (!validate_arg (arg0, REAL_TYPE)
8572 || !validate_arg (arg1, REAL_TYPE))
8573 return NULL_TREE;
8575 /* Calculate the result when the argument is a constant. */
8576 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8577 return res;
8579 /* If either argument to hypot has a negate or abs, strip that off.
8580 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8581 narg0 = fold_strip_sign_ops (arg0);
8582 narg1 = fold_strip_sign_ops (arg1);
8583 if (narg0 || narg1)
8585 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8586 narg1 ? narg1 : arg1);
8589 /* If either argument is zero, hypot is fabs of the other. */
8590 if (real_zerop (arg0))
8591 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8592 else if (real_zerop (arg1))
8593 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8595 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8596 if (flag_unsafe_math_optimizations
8597 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8599 const REAL_VALUE_TYPE sqrt2_trunc
8600 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8601 return fold_build2_loc (loc, MULT_EXPR, type,
8602 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8603 build_real (type, sqrt2_trunc));
8606 return NULL_TREE;
8610 /* Fold a builtin function call to pow, powf, or powl. Return
8611 NULL_TREE if no simplification can be made. */
8612 static tree
8613 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8615 tree res;
8617 if (!validate_arg (arg0, REAL_TYPE)
8618 || !validate_arg (arg1, REAL_TYPE))
8619 return NULL_TREE;
8621 /* Calculate the result when the argument is a constant. */
8622 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8623 return res;
8625 /* Optimize pow(1.0,y) = 1.0. */
8626 if (real_onep (arg0))
8627 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8629 if (TREE_CODE (arg1) == REAL_CST
8630 && !TREE_OVERFLOW (arg1))
8632 REAL_VALUE_TYPE cint;
8633 REAL_VALUE_TYPE c;
8634 HOST_WIDE_INT n;
8636 c = TREE_REAL_CST (arg1);
8638 /* Optimize pow(x,0.0) = 1.0. */
8639 if (REAL_VALUES_EQUAL (c, dconst0))
8640 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8641 arg0);
8643 /* Optimize pow(x,1.0) = x. */
8644 if (REAL_VALUES_EQUAL (c, dconst1))
8645 return arg0;
8647 /* Optimize pow(x,-1.0) = 1.0/x. */
8648 if (REAL_VALUES_EQUAL (c, dconstm1))
8649 return fold_build2_loc (loc, RDIV_EXPR, type,
8650 build_real (type, dconst1), arg0);
8652 /* Optimize pow(x,0.5) = sqrt(x). */
8653 if (flag_unsafe_math_optimizations
8654 && REAL_VALUES_EQUAL (c, dconsthalf))
8656 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8658 if (sqrtfn != NULL_TREE)
8659 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8662 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8663 if (flag_unsafe_math_optimizations)
8665 const REAL_VALUE_TYPE dconstroot
8666 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8668 if (REAL_VALUES_EQUAL (c, dconstroot))
8670 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8671 if (cbrtfn != NULL_TREE)
8672 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8676 /* Check for an integer exponent. */
8677 n = real_to_integer (&c);
8678 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8679 if (real_identical (&c, &cint))
8681 /* Attempt to evaluate pow at compile-time, unless this should
8682 raise an exception. */
8683 if (TREE_CODE (arg0) == REAL_CST
8684 && !TREE_OVERFLOW (arg0)
8685 && (n > 0
8686 || (!flag_trapping_math && !flag_errno_math)
8687 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8689 REAL_VALUE_TYPE x;
8690 bool inexact;
8692 x = TREE_REAL_CST (arg0);
8693 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8694 if (flag_unsafe_math_optimizations || !inexact)
8695 return build_real (type, x);
8698 /* Strip sign ops from even integer powers. */
8699 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8701 tree narg0 = fold_strip_sign_ops (arg0);
8702 if (narg0)
8703 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8708 if (flag_unsafe_math_optimizations)
8710 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8712 /* Optimize pow(expN(x),y) = expN(x*y). */
8713 if (BUILTIN_EXPONENT_P (fcode))
8715 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8716 tree arg = CALL_EXPR_ARG (arg0, 0);
8717 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8718 return build_call_expr_loc (loc, expfn, 1, arg);
8721 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8722 if (BUILTIN_SQRT_P (fcode))
8724 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8725 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8726 build_real (type, dconsthalf));
8727 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8730 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8731 if (BUILTIN_CBRT_P (fcode))
8733 tree arg = CALL_EXPR_ARG (arg0, 0);
8734 if (tree_expr_nonnegative_p (arg))
8736 const REAL_VALUE_TYPE dconstroot
8737 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8738 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8739 build_real (type, dconstroot));
8740 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8744 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8745 if (fcode == BUILT_IN_POW
8746 || fcode == BUILT_IN_POWF
8747 || fcode == BUILT_IN_POWL)
8749 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8750 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8751 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8752 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8756 return NULL_TREE;
8759 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8760 Return NULL_TREE if no simplification can be made. */
8761 static tree
8762 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8763 tree arg0, tree arg1, tree type)
8765 if (!validate_arg (arg0, REAL_TYPE)
8766 || !validate_arg (arg1, INTEGER_TYPE))
8767 return NULL_TREE;
8769 /* Optimize pow(1.0,y) = 1.0. */
8770 if (real_onep (arg0))
8771 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8773 if (host_integerp (arg1, 0))
8775 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8777 /* Evaluate powi at compile-time. */
8778 if (TREE_CODE (arg0) == REAL_CST
8779 && !TREE_OVERFLOW (arg0))
8781 REAL_VALUE_TYPE x;
8782 x = TREE_REAL_CST (arg0);
8783 real_powi (&x, TYPE_MODE (type), &x, c);
8784 return build_real (type, x);
8787 /* Optimize pow(x,0) = 1.0. */
8788 if (c == 0)
8789 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8790 arg0);
8792 /* Optimize pow(x,1) = x. */
8793 if (c == 1)
8794 return arg0;
8796 /* Optimize pow(x,-1) = 1.0/x. */
8797 if (c == -1)
8798 return fold_build2_loc (loc, RDIV_EXPR, type,
8799 build_real (type, dconst1), arg0);
8802 return NULL_TREE;
8805 /* A subroutine of fold_builtin to fold the various exponent
8806 functions. Return NULL_TREE if no simplification can be made.
8807 FUNC is the corresponding MPFR exponent function. */
8809 static tree
8810 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8811 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8813 if (validate_arg (arg, REAL_TYPE))
8815 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8816 tree res;
8818 /* Calculate the result when the argument is a constant. */
8819 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8820 return res;
8822 /* Optimize expN(logN(x)) = x. */
8823 if (flag_unsafe_math_optimizations)
8825 const enum built_in_function fcode = builtin_mathfn_code (arg);
8827 if ((func == mpfr_exp
8828 && (fcode == BUILT_IN_LOG
8829 || fcode == BUILT_IN_LOGF
8830 || fcode == BUILT_IN_LOGL))
8831 || (func == mpfr_exp2
8832 && (fcode == BUILT_IN_LOG2
8833 || fcode == BUILT_IN_LOG2F
8834 || fcode == BUILT_IN_LOG2L))
8835 || (func == mpfr_exp10
8836 && (fcode == BUILT_IN_LOG10
8837 || fcode == BUILT_IN_LOG10F
8838 || fcode == BUILT_IN_LOG10L)))
8839 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8843 return NULL_TREE;
8846 /* Return true if VAR is a VAR_DECL or a component thereof. */
8848 static bool
8849 var_decl_component_p (tree var)
8851 tree inner = var;
8852 while (handled_component_p (inner))
8853 inner = TREE_OPERAND (inner, 0);
8854 return SSA_VAR_P (inner);
8857 /* Fold function call to builtin memset. Return
8858 NULL_TREE if no simplification can be made. */
8860 static tree
8861 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8862 tree type, bool ignore)
8864 tree var, ret, etype;
8865 unsigned HOST_WIDE_INT length, cval;
8867 if (! validate_arg (dest, POINTER_TYPE)
8868 || ! validate_arg (c, INTEGER_TYPE)
8869 || ! validate_arg (len, INTEGER_TYPE))
8870 return NULL_TREE;
8872 if (! host_integerp (len, 1))
8873 return NULL_TREE;
8875 /* If the LEN parameter is zero, return DEST. */
8876 if (integer_zerop (len))
8877 return omit_one_operand_loc (loc, type, dest, c);
8879 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8880 return NULL_TREE;
8882 var = dest;
8883 STRIP_NOPS (var);
8884 if (TREE_CODE (var) != ADDR_EXPR)
8885 return NULL_TREE;
8887 var = TREE_OPERAND (var, 0);
8888 if (TREE_THIS_VOLATILE (var))
8889 return NULL_TREE;
8891 etype = TREE_TYPE (var);
8892 if (TREE_CODE (etype) == ARRAY_TYPE)
8893 etype = TREE_TYPE (etype);
8895 if (!INTEGRAL_TYPE_P (etype)
8896 && !POINTER_TYPE_P (etype))
8897 return NULL_TREE;
8899 if (! var_decl_component_p (var))
8900 return NULL_TREE;
8902 length = tree_low_cst (len, 1);
8903 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8904 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8905 < (int) length)
8906 return NULL_TREE;
8908 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8909 return NULL_TREE;
8911 if (integer_zerop (c))
8912 cval = 0;
8913 else
8915 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8916 return NULL_TREE;
8918 cval = tree_low_cst (c, 1);
8919 cval &= 0xff;
8920 cval |= cval << 8;
8921 cval |= cval << 16;
8922 cval |= (cval << 31) << 1;
8925 ret = build_int_cst_type (etype, cval);
8926 var = build_fold_indirect_ref_loc (loc,
8927 fold_convert_loc (loc,
8928 build_pointer_type (etype),
8929 dest));
8930 ret = build2 (MODIFY_EXPR, etype, var, ret);
8931 if (ignore)
8932 return ret;
8934 return omit_one_operand_loc (loc, type, dest, ret);
8937 /* Fold function call to builtin memset. Return
8938 NULL_TREE if no simplification can be made. */
8940 static tree
8941 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8943 if (! validate_arg (dest, POINTER_TYPE)
8944 || ! validate_arg (size, INTEGER_TYPE))
8945 return NULL_TREE;
8947 if (!ignore)
8948 return NULL_TREE;
8950 /* New argument list transforming bzero(ptr x, int y) to
8951 memset(ptr x, int 0, size_t y). This is done this way
8952 so that if it isn't expanded inline, we fallback to
8953 calling bzero instead of memset. */
8955 return fold_builtin_memset (loc, dest, integer_zero_node,
8956 fold_convert_loc (loc, sizetype, size),
8957 void_type_node, ignore);
8960 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8961 NULL_TREE if no simplification can be made.
8962 If ENDP is 0, return DEST (like memcpy).
8963 If ENDP is 1, return DEST+LEN (like mempcpy).
8964 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8965 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8966 (memmove). */
8968 static tree
8969 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8970 tree len, tree type, bool ignore, int endp)
8972 tree destvar, srcvar, expr;
8974 if (! validate_arg (dest, POINTER_TYPE)
8975 || ! validate_arg (src, POINTER_TYPE)
8976 || ! validate_arg (len, INTEGER_TYPE))
8977 return NULL_TREE;
8979 /* If the LEN parameter is zero, return DEST. */
8980 if (integer_zerop (len))
8981 return omit_one_operand_loc (loc, type, dest, src);
8983 /* If SRC and DEST are the same (and not volatile), return
8984 DEST{,+LEN,+LEN-1}. */
8985 if (operand_equal_p (src, dest, 0))
8986 expr = len;
8987 else
8989 tree srctype, desttype;
8990 int src_align, dest_align;
8992 if (endp == 3)
8994 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8995 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8997 /* Both DEST and SRC must be pointer types.
8998 ??? This is what old code did. Is the testing for pointer types
8999 really mandatory?
9001 If either SRC is readonly or length is 1, we can use memcpy. */
9002 if (!dest_align || !src_align)
9003 return NULL_TREE;
9004 if (readonly_data_expr (src)
9005 || (host_integerp (len, 1)
9006 && (MIN (src_align, dest_align) / BITS_PER_UNIT
9007 >= tree_low_cst (len, 1))))
9009 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9010 if (!fn)
9011 return NULL_TREE;
9012 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9015 /* If *src and *dest can't overlap, optimize into memcpy as well. */
9016 srcvar = build_fold_indirect_ref_loc (loc, src);
9017 destvar = build_fold_indirect_ref_loc (loc, dest);
9018 if (srcvar
9019 && !TREE_THIS_VOLATILE (srcvar)
9020 && destvar
9021 && !TREE_THIS_VOLATILE (destvar))
9023 tree src_base, dest_base, fn;
9024 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
9025 HOST_WIDE_INT size = -1;
9026 HOST_WIDE_INT maxsize = -1;
9028 src_base = srcvar;
9029 if (handled_component_p (src_base))
9030 src_base = get_ref_base_and_extent (src_base, &src_offset,
9031 &size, &maxsize);
9032 dest_base = destvar;
9033 if (handled_component_p (dest_base))
9034 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
9035 &size, &maxsize);
9036 if (host_integerp (len, 1))
9038 maxsize = tree_low_cst (len, 1);
9039 if (maxsize
9040 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
9041 maxsize = -1;
9042 else
9043 maxsize *= BITS_PER_UNIT;
9045 else
9046 maxsize = -1;
9047 if (SSA_VAR_P (src_base)
9048 && SSA_VAR_P (dest_base))
9050 if (operand_equal_p (src_base, dest_base, 0)
9051 && ranges_overlap_p (src_offset, maxsize,
9052 dest_offset, maxsize))
9053 return NULL_TREE;
9055 else if (TREE_CODE (src_base) == INDIRECT_REF
9056 && TREE_CODE (dest_base) == INDIRECT_REF)
9058 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
9059 TREE_OPERAND (dest_base, 0), 0)
9060 || ranges_overlap_p (src_offset, maxsize,
9061 dest_offset, maxsize))
9062 return NULL_TREE;
9064 else
9065 return NULL_TREE;
9067 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9068 if (!fn)
9069 return NULL_TREE;
9070 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9072 return NULL_TREE;
9075 if (!host_integerp (len, 0))
9076 return NULL_TREE;
9077 /* FIXME:
9078 This logic lose for arguments like (type *)malloc (sizeof (type)),
9079 since we strip the casts of up to VOID return value from malloc.
9080 Perhaps we ought to inherit type from non-VOID argument here? */
9081 STRIP_NOPS (src);
9082 STRIP_NOPS (dest);
9083 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9084 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9086 tree tem = TREE_OPERAND (src, 0);
9087 STRIP_NOPS (tem);
9088 if (tem != TREE_OPERAND (src, 0))
9089 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9091 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9093 tree tem = TREE_OPERAND (dest, 0);
9094 STRIP_NOPS (tem);
9095 if (tem != TREE_OPERAND (dest, 0))
9096 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9098 srctype = TREE_TYPE (TREE_TYPE (src));
9099 if (srctype
9100 && TREE_CODE (srctype) == ARRAY_TYPE
9101 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9103 srctype = TREE_TYPE (srctype);
9104 STRIP_NOPS (src);
9105 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9107 desttype = TREE_TYPE (TREE_TYPE (dest));
9108 if (desttype
9109 && TREE_CODE (desttype) == ARRAY_TYPE
9110 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9112 desttype = TREE_TYPE (desttype);
9113 STRIP_NOPS (dest);
9114 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9116 if (!srctype || !desttype
9117 || !TYPE_SIZE_UNIT (srctype)
9118 || !TYPE_SIZE_UNIT (desttype)
9119 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9120 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9121 || TYPE_VOLATILE (srctype)
9122 || TYPE_VOLATILE (desttype))
9123 return NULL_TREE;
9125 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9126 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9127 if (dest_align < (int) TYPE_ALIGN (desttype)
9128 || src_align < (int) TYPE_ALIGN (srctype))
9129 return NULL_TREE;
9131 if (!ignore)
9132 dest = builtin_save_expr (dest);
9134 srcvar = NULL_TREE;
9135 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9137 srcvar = build_fold_indirect_ref_loc (loc, src);
9138 if (TREE_THIS_VOLATILE (srcvar))
9139 return NULL_TREE;
9140 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9141 srcvar = NULL_TREE;
9142 /* With memcpy, it is possible to bypass aliasing rules, so without
9143 this check i.e. execute/20060930-2.c would be misoptimized,
9144 because it use conflicting alias set to hold argument for the
9145 memcpy call. This check is probably unnecessary with
9146 -fno-strict-aliasing. Similarly for destvar. See also
9147 PR29286. */
9148 else if (!var_decl_component_p (srcvar))
9149 srcvar = NULL_TREE;
9152 destvar = NULL_TREE;
9153 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9155 destvar = build_fold_indirect_ref_loc (loc, dest);
9156 if (TREE_THIS_VOLATILE (destvar))
9157 return NULL_TREE;
9158 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9159 destvar = NULL_TREE;
9160 else if (!var_decl_component_p (destvar))
9161 destvar = NULL_TREE;
9164 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9165 return NULL_TREE;
9167 if (srcvar == NULL_TREE)
9169 tree srcptype;
9170 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9171 return NULL_TREE;
9173 srctype = build_qualified_type (desttype, 0);
9174 if (src_align < (int) TYPE_ALIGN (srctype))
9176 if (AGGREGATE_TYPE_P (srctype)
9177 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9178 return NULL_TREE;
9180 srctype = build_variant_type_copy (srctype);
9181 TYPE_ALIGN (srctype) = src_align;
9182 TYPE_USER_ALIGN (srctype) = 1;
9183 TYPE_PACKED (srctype) = 1;
9185 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9186 src = fold_convert_loc (loc, srcptype, src);
9187 srcvar = build_fold_indirect_ref_loc (loc, src);
9189 else if (destvar == NULL_TREE)
9191 tree destptype;
9192 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9193 return NULL_TREE;
9195 desttype = build_qualified_type (srctype, 0);
9196 if (dest_align < (int) TYPE_ALIGN (desttype))
9198 if (AGGREGATE_TYPE_P (desttype)
9199 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9200 return NULL_TREE;
9202 desttype = build_variant_type_copy (desttype);
9203 TYPE_ALIGN (desttype) = dest_align;
9204 TYPE_USER_ALIGN (desttype) = 1;
9205 TYPE_PACKED (desttype) = 1;
9207 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9208 dest = fold_convert_loc (loc, destptype, dest);
9209 destvar = build_fold_indirect_ref_loc (loc, dest);
9212 if (srctype == desttype
9213 || (gimple_in_ssa_p (cfun)
9214 && useless_type_conversion_p (desttype, srctype)))
9215 expr = srcvar;
9216 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9217 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9218 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9219 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9220 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
9221 else
9222 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
9223 TREE_TYPE (destvar), srcvar);
9224 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9227 if (ignore)
9228 return expr;
9230 if (endp == 0 || endp == 3)
9231 return omit_one_operand_loc (loc, type, dest, expr);
9233 if (expr == len)
9234 expr = NULL_TREE;
9236 if (endp == 2)
9237 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9238 ssize_int (1));
9240 len = fold_convert_loc (loc, sizetype, len);
9241 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9242 dest = fold_convert_loc (loc, type, dest);
9243 if (expr)
9244 dest = omit_one_operand_loc (loc, type, dest, expr);
9245 return dest;
9248 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9249 If LEN is not NULL, it represents the length of the string to be
9250 copied. Return NULL_TREE if no simplification can be made. */
9252 tree
9253 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9255 tree fn;
9257 if (!validate_arg (dest, POINTER_TYPE)
9258 || !validate_arg (src, POINTER_TYPE))
9259 return NULL_TREE;
9261 /* If SRC and DEST are the same (and not volatile), return DEST. */
9262 if (operand_equal_p (src, dest, 0))
9263 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9265 if (optimize_function_for_size_p (cfun))
9266 return NULL_TREE;
9268 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9269 if (!fn)
9270 return NULL_TREE;
9272 if (!len)
9274 len = c_strlen (src, 1);
9275 if (! len || TREE_SIDE_EFFECTS (len))
9276 return NULL_TREE;
9279 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
9280 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9281 build_call_expr_loc (loc, fn, 3, dest, src, len));
9284 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9285 If SLEN is not NULL, it represents the length of the source string.
9286 Return NULL_TREE if no simplification can be made. */
9288 tree
9289 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9290 tree src, tree len, tree slen)
9292 tree fn;
9294 if (!validate_arg (dest, POINTER_TYPE)
9295 || !validate_arg (src, POINTER_TYPE)
9296 || !validate_arg (len, INTEGER_TYPE))
9297 return NULL_TREE;
9299 /* If the LEN parameter is zero, return DEST. */
9300 if (integer_zerop (len))
9301 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9303 /* We can't compare slen with len as constants below if len is not a
9304 constant. */
9305 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9306 return NULL_TREE;
9308 if (!slen)
9309 slen = c_strlen (src, 1);
9311 /* Now, we must be passed a constant src ptr parameter. */
9312 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9313 return NULL_TREE;
9315 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9317 /* We do not support simplification of this case, though we do
9318 support it when expanding trees into RTL. */
9319 /* FIXME: generate a call to __builtin_memset. */
9320 if (tree_int_cst_lt (slen, len))
9321 return NULL_TREE;
9323 /* OK transform into builtin memcpy. */
9324 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9325 if (!fn)
9326 return NULL_TREE;
9327 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9328 build_call_expr_loc (loc, fn, 3, dest, src, len));
9331 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9332 arguments to the call, and TYPE is its return type.
9333 Return NULL_TREE if no simplification can be made. */
9335 static tree
9336 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9338 if (!validate_arg (arg1, POINTER_TYPE)
9339 || !validate_arg (arg2, INTEGER_TYPE)
9340 || !validate_arg (len, INTEGER_TYPE))
9341 return NULL_TREE;
9342 else
9344 const char *p1;
9346 if (TREE_CODE (arg2) != INTEGER_CST
9347 || !host_integerp (len, 1))
9348 return NULL_TREE;
9350 p1 = c_getstr (arg1);
9351 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9353 char c;
9354 const char *r;
9355 tree tem;
9357 if (target_char_cast (arg2, &c))
9358 return NULL_TREE;
9360 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9362 if (r == NULL)
9363 return build_int_cst (TREE_TYPE (arg1), 0);
9365 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9366 size_int (r - p1));
9367 return fold_convert_loc (loc, type, tem);
9369 return NULL_TREE;
9373 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9374 Return NULL_TREE if no simplification can be made. */
9376 static tree
9377 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9379 const char *p1, *p2;
9381 if (!validate_arg (arg1, POINTER_TYPE)
9382 || !validate_arg (arg2, POINTER_TYPE)
9383 || !validate_arg (len, INTEGER_TYPE))
9384 return NULL_TREE;
9386 /* If the LEN parameter is zero, return zero. */
9387 if (integer_zerop (len))
9388 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9389 arg1, arg2);
9391 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9392 if (operand_equal_p (arg1, arg2, 0))
9393 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9395 p1 = c_getstr (arg1);
9396 p2 = c_getstr (arg2);
9398 /* If all arguments are constant, and the value of len is not greater
9399 than the lengths of arg1 and arg2, evaluate at compile-time. */
9400 if (host_integerp (len, 1) && p1 && p2
9401 && compare_tree_int (len, strlen (p1) + 1) <= 0
9402 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9404 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9406 if (r > 0)
9407 return integer_one_node;
9408 else if (r < 0)
9409 return integer_minus_one_node;
9410 else
9411 return integer_zero_node;
9414 /* If len parameter is one, return an expression corresponding to
9415 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9416 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9418 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9419 tree cst_uchar_ptr_node
9420 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9422 tree ind1
9423 = fold_convert_loc (loc, integer_type_node,
9424 build1 (INDIRECT_REF, cst_uchar_node,
9425 fold_convert_loc (loc,
9426 cst_uchar_ptr_node,
9427 arg1)));
9428 tree ind2
9429 = fold_convert_loc (loc, integer_type_node,
9430 build1 (INDIRECT_REF, cst_uchar_node,
9431 fold_convert_loc (loc,
9432 cst_uchar_ptr_node,
9433 arg2)));
9434 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9437 return NULL_TREE;
9440 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9441 Return NULL_TREE if no simplification can be made. */
9443 static tree
9444 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9446 const char *p1, *p2;
9448 if (!validate_arg (arg1, POINTER_TYPE)
9449 || !validate_arg (arg2, POINTER_TYPE))
9450 return NULL_TREE;
9452 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9453 if (operand_equal_p (arg1, arg2, 0))
9454 return integer_zero_node;
9456 p1 = c_getstr (arg1);
9457 p2 = c_getstr (arg2);
9459 if (p1 && p2)
9461 const int i = strcmp (p1, p2);
9462 if (i < 0)
9463 return integer_minus_one_node;
9464 else if (i > 0)
9465 return integer_one_node;
9466 else
9467 return integer_zero_node;
9470 /* If the second arg is "", return *(const unsigned char*)arg1. */
9471 if (p2 && *p2 == '\0')
9473 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9474 tree cst_uchar_ptr_node
9475 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9477 return fold_convert_loc (loc, integer_type_node,
9478 build1 (INDIRECT_REF, cst_uchar_node,
9479 fold_convert_loc (loc,
9480 cst_uchar_ptr_node,
9481 arg1)));
9484 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9485 if (p1 && *p1 == '\0')
9487 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9488 tree cst_uchar_ptr_node
9489 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9491 tree temp
9492 = fold_convert_loc (loc, integer_type_node,
9493 build1 (INDIRECT_REF, cst_uchar_node,
9494 fold_convert_loc (loc,
9495 cst_uchar_ptr_node,
9496 arg2)));
9497 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9500 return NULL_TREE;
9503 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9504 Return NULL_TREE if no simplification can be made. */
9506 static tree
9507 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9509 const char *p1, *p2;
9511 if (!validate_arg (arg1, POINTER_TYPE)
9512 || !validate_arg (arg2, POINTER_TYPE)
9513 || !validate_arg (len, INTEGER_TYPE))
9514 return NULL_TREE;
9516 /* If the LEN parameter is zero, return zero. */
9517 if (integer_zerop (len))
9518 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9519 arg1, arg2);
9521 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9522 if (operand_equal_p (arg1, arg2, 0))
9523 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9525 p1 = c_getstr (arg1);
9526 p2 = c_getstr (arg2);
9528 if (host_integerp (len, 1) && p1 && p2)
9530 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9531 if (i > 0)
9532 return integer_one_node;
9533 else if (i < 0)
9534 return integer_minus_one_node;
9535 else
9536 return integer_zero_node;
9539 /* If the second arg is "", and the length is greater than zero,
9540 return *(const unsigned char*)arg1. */
9541 if (p2 && *p2 == '\0'
9542 && TREE_CODE (len) == INTEGER_CST
9543 && tree_int_cst_sgn (len) == 1)
9545 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9546 tree cst_uchar_ptr_node
9547 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9549 return fold_convert_loc (loc, integer_type_node,
9550 build1 (INDIRECT_REF, cst_uchar_node,
9551 fold_convert_loc (loc,
9552 cst_uchar_ptr_node,
9553 arg1)));
9556 /* If the first arg is "", and the length is greater than zero,
9557 return -*(const unsigned char*)arg2. */
9558 if (p1 && *p1 == '\0'
9559 && TREE_CODE (len) == INTEGER_CST
9560 && tree_int_cst_sgn (len) == 1)
9562 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9563 tree cst_uchar_ptr_node
9564 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9566 tree temp = fold_convert_loc (loc, integer_type_node,
9567 build1 (INDIRECT_REF, cst_uchar_node,
9568 fold_convert_loc (loc,
9569 cst_uchar_ptr_node,
9570 arg2)));
9571 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9574 /* If len parameter is one, return an expression corresponding to
9575 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9576 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9578 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9579 tree cst_uchar_ptr_node
9580 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9582 tree ind1 = fold_convert_loc (loc, integer_type_node,
9583 build1 (INDIRECT_REF, cst_uchar_node,
9584 fold_convert_loc (loc,
9585 cst_uchar_ptr_node,
9586 arg1)));
9587 tree ind2 = fold_convert_loc (loc, integer_type_node,
9588 build1 (INDIRECT_REF, cst_uchar_node,
9589 fold_convert_loc (loc,
9590 cst_uchar_ptr_node,
9591 arg2)));
9592 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9595 return NULL_TREE;
9598 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9599 ARG. Return NULL_TREE if no simplification can be made. */
9601 static tree
9602 fold_builtin_signbit (location_t loc, tree arg, tree type)
9604 tree temp;
9606 if (!validate_arg (arg, REAL_TYPE))
9607 return NULL_TREE;
9609 /* If ARG is a compile-time constant, determine the result. */
9610 if (TREE_CODE (arg) == REAL_CST
9611 && !TREE_OVERFLOW (arg))
9613 REAL_VALUE_TYPE c;
9615 c = TREE_REAL_CST (arg);
9616 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9617 return fold_convert_loc (loc, type, temp);
9620 /* If ARG is non-negative, the result is always zero. */
9621 if (tree_expr_nonnegative_p (arg))
9622 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9624 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9625 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9626 return fold_build2_loc (loc, LT_EXPR, type, arg,
9627 build_real (TREE_TYPE (arg), dconst0));
9629 return NULL_TREE;
9632 /* Fold function call to builtin copysign, copysignf or copysignl with
9633 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9634 be made. */
9636 static tree
9637 fold_builtin_copysign (location_t loc, tree fndecl,
9638 tree arg1, tree arg2, tree type)
9640 tree tem;
9642 if (!validate_arg (arg1, REAL_TYPE)
9643 || !validate_arg (arg2, REAL_TYPE))
9644 return NULL_TREE;
9646 /* copysign(X,X) is X. */
9647 if (operand_equal_p (arg1, arg2, 0))
9648 return fold_convert_loc (loc, type, arg1);
9650 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9651 if (TREE_CODE (arg1) == REAL_CST
9652 && TREE_CODE (arg2) == REAL_CST
9653 && !TREE_OVERFLOW (arg1)
9654 && !TREE_OVERFLOW (arg2))
9656 REAL_VALUE_TYPE c1, c2;
9658 c1 = TREE_REAL_CST (arg1);
9659 c2 = TREE_REAL_CST (arg2);
9660 /* c1.sign := c2.sign. */
9661 real_copysign (&c1, &c2);
9662 return build_real (type, c1);
9665 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9666 Remember to evaluate Y for side-effects. */
9667 if (tree_expr_nonnegative_p (arg2))
9668 return omit_one_operand_loc (loc, type,
9669 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9670 arg2);
9672 /* Strip sign changing operations for the first argument. */
9673 tem = fold_strip_sign_ops (arg1);
9674 if (tem)
9675 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9677 return NULL_TREE;
9680 /* Fold a call to builtin isascii with argument ARG. */
9682 static tree
9683 fold_builtin_isascii (location_t loc, tree arg)
9685 if (!validate_arg (arg, INTEGER_TYPE))
9686 return NULL_TREE;
9687 else
9689 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9690 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9691 build_int_cst (NULL_TREE,
9692 ~ (unsigned HOST_WIDE_INT) 0x7f));
9693 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9694 arg, integer_zero_node);
9698 /* Fold a call to builtin toascii with argument ARG. */
9700 static tree
9701 fold_builtin_toascii (location_t loc, tree arg)
9703 if (!validate_arg (arg, INTEGER_TYPE))
9704 return NULL_TREE;
9706 /* Transform toascii(c) -> (c & 0x7f). */
9707 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9708 build_int_cst (NULL_TREE, 0x7f));
9711 /* Fold a call to builtin isdigit with argument ARG. */
9713 static tree
9714 fold_builtin_isdigit (location_t loc, tree arg)
9716 if (!validate_arg (arg, INTEGER_TYPE))
9717 return NULL_TREE;
9718 else
9720 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9721 /* According to the C standard, isdigit is unaffected by locale.
9722 However, it definitely is affected by the target character set. */
9723 unsigned HOST_WIDE_INT target_digit0
9724 = lang_hooks.to_target_charset ('0');
9726 if (target_digit0 == 0)
9727 return NULL_TREE;
9729 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9730 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9731 build_int_cst (unsigned_type_node, target_digit0));
9732 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9733 build_int_cst (unsigned_type_node, 9));
9737 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9739 static tree
9740 fold_builtin_fabs (location_t loc, tree arg, tree type)
9742 if (!validate_arg (arg, REAL_TYPE))
9743 return NULL_TREE;
9745 arg = fold_convert_loc (loc, type, arg);
9746 if (TREE_CODE (arg) == REAL_CST)
9747 return fold_abs_const (arg, type);
9748 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9751 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9753 static tree
9754 fold_builtin_abs (location_t loc, tree arg, tree type)
9756 if (!validate_arg (arg, INTEGER_TYPE))
9757 return NULL_TREE;
9759 arg = fold_convert_loc (loc, type, arg);
9760 if (TREE_CODE (arg) == INTEGER_CST)
9761 return fold_abs_const (arg, type);
9762 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9765 /* Fold a call to builtin fmin or fmax. */
9767 static tree
9768 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9769 tree type, bool max)
9771 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9773 /* Calculate the result when the argument is a constant. */
9774 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9776 if (res)
9777 return res;
9779 /* If either argument is NaN, return the other one. Avoid the
9780 transformation if we get (and honor) a signalling NaN. Using
9781 omit_one_operand() ensures we create a non-lvalue. */
9782 if (TREE_CODE (arg0) == REAL_CST
9783 && real_isnan (&TREE_REAL_CST (arg0))
9784 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9785 || ! TREE_REAL_CST (arg0).signalling))
9786 return omit_one_operand_loc (loc, type, arg1, arg0);
9787 if (TREE_CODE (arg1) == REAL_CST
9788 && real_isnan (&TREE_REAL_CST (arg1))
9789 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9790 || ! TREE_REAL_CST (arg1).signalling))
9791 return omit_one_operand_loc (loc, type, arg0, arg1);
9793 /* Transform fmin/fmax(x,x) -> x. */
9794 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9795 return omit_one_operand_loc (loc, type, arg0, arg1);
9797 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9798 functions to return the numeric arg if the other one is NaN.
9799 These tree codes don't honor that, so only transform if
9800 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9801 handled, so we don't have to worry about it either. */
9802 if (flag_finite_math_only)
9803 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9804 fold_convert_loc (loc, type, arg0),
9805 fold_convert_loc (loc, type, arg1));
9807 return NULL_TREE;
9810 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9812 static tree
9813 fold_builtin_carg (location_t loc, tree arg, tree type)
9815 if (validate_arg (arg, COMPLEX_TYPE)
9816 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9818 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9820 if (atan2_fn)
9822 tree new_arg = builtin_save_expr (arg);
9823 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9824 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9825 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9829 return NULL_TREE;
9832 /* Fold a call to builtin logb/ilogb. */
9834 static tree
9835 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9837 if (! validate_arg (arg, REAL_TYPE))
9838 return NULL_TREE;
9840 STRIP_NOPS (arg);
9842 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9844 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9846 switch (value->cl)
9848 case rvc_nan:
9849 case rvc_inf:
9850 /* If arg is Inf or NaN and we're logb, return it. */
9851 if (TREE_CODE (rettype) == REAL_TYPE)
9852 return fold_convert_loc (loc, rettype, arg);
9853 /* Fall through... */
9854 case rvc_zero:
9855 /* Zero may set errno and/or raise an exception for logb, also
9856 for ilogb we don't know FP_ILOGB0. */
9857 return NULL_TREE;
9858 case rvc_normal:
9859 /* For normal numbers, proceed iff radix == 2. In GCC,
9860 normalized significands are in the range [0.5, 1.0). We
9861 want the exponent as if they were [1.0, 2.0) so get the
9862 exponent and subtract 1. */
9863 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9864 return fold_convert_loc (loc, rettype,
9865 build_int_cst (NULL_TREE,
9866 REAL_EXP (value)-1));
9867 break;
9871 return NULL_TREE;
9874 /* Fold a call to builtin significand, if radix == 2. */
9876 static tree
9877 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9879 if (! validate_arg (arg, REAL_TYPE))
9880 return NULL_TREE;
9882 STRIP_NOPS (arg);
9884 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9886 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9888 switch (value->cl)
9890 case rvc_zero:
9891 case rvc_nan:
9892 case rvc_inf:
9893 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9894 return fold_convert_loc (loc, rettype, arg);
9895 case rvc_normal:
9896 /* For normal numbers, proceed iff radix == 2. */
9897 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9899 REAL_VALUE_TYPE result = *value;
9900 /* In GCC, normalized significands are in the range [0.5,
9901 1.0). We want them to be [1.0, 2.0) so set the
9902 exponent to 1. */
9903 SET_REAL_EXP (&result, 1);
9904 return build_real (rettype, result);
9906 break;
9910 return NULL_TREE;
9913 /* Fold a call to builtin frexp, we can assume the base is 2. */
9915 static tree
9916 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9918 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9919 return NULL_TREE;
9921 STRIP_NOPS (arg0);
9923 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9924 return NULL_TREE;
9926 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9928 /* Proceed if a valid pointer type was passed in. */
9929 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9931 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9932 tree frac, exp;
9934 switch (value->cl)
9936 case rvc_zero:
9937 /* For +-0, return (*exp = 0, +-0). */
9938 exp = integer_zero_node;
9939 frac = arg0;
9940 break;
9941 case rvc_nan:
9942 case rvc_inf:
9943 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9944 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9945 case rvc_normal:
9947 /* Since the frexp function always expects base 2, and in
9948 GCC normalized significands are already in the range
9949 [0.5, 1.0), we have exactly what frexp wants. */
9950 REAL_VALUE_TYPE frac_rvt = *value;
9951 SET_REAL_EXP (&frac_rvt, 0);
9952 frac = build_real (rettype, frac_rvt);
9953 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9955 break;
9956 default:
9957 gcc_unreachable ();
9960 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9961 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9962 TREE_SIDE_EFFECTS (arg1) = 1;
9963 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9966 return NULL_TREE;
9969 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9970 then we can assume the base is two. If it's false, then we have to
9971 check the mode of the TYPE parameter in certain cases. */
9973 static tree
9974 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9975 tree type, bool ldexp)
9977 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9979 STRIP_NOPS (arg0);
9980 STRIP_NOPS (arg1);
9982 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9983 if (real_zerop (arg0) || integer_zerop (arg1)
9984 || (TREE_CODE (arg0) == REAL_CST
9985 && !real_isfinite (&TREE_REAL_CST (arg0))))
9986 return omit_one_operand_loc (loc, type, arg0, arg1);
9988 /* If both arguments are constant, then try to evaluate it. */
9989 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9990 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9991 && host_integerp (arg1, 0))
9993 /* Bound the maximum adjustment to twice the range of the
9994 mode's valid exponents. Use abs to ensure the range is
9995 positive as a sanity check. */
9996 const long max_exp_adj = 2 *
9997 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9998 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
10000 /* Get the user-requested adjustment. */
10001 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
10003 /* The requested adjustment must be inside this range. This
10004 is a preliminary cap to avoid things like overflow, we
10005 may still fail to compute the result for other reasons. */
10006 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
10008 REAL_VALUE_TYPE initial_result;
10010 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
10012 /* Ensure we didn't overflow. */
10013 if (! real_isinf (&initial_result))
10015 const REAL_VALUE_TYPE trunc_result
10016 = real_value_truncate (TYPE_MODE (type), initial_result);
10018 /* Only proceed if the target mode can hold the
10019 resulting value. */
10020 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
10021 return build_real (type, trunc_result);
10027 return NULL_TREE;
10030 /* Fold a call to builtin modf. */
10032 static tree
10033 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10035 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10036 return NULL_TREE;
10038 STRIP_NOPS (arg0);
10040 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10041 return NULL_TREE;
10043 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10045 /* Proceed if a valid pointer type was passed in. */
10046 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10048 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10049 REAL_VALUE_TYPE trunc, frac;
10051 switch (value->cl)
10053 case rvc_nan:
10054 case rvc_zero:
10055 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10056 trunc = frac = *value;
10057 break;
10058 case rvc_inf:
10059 /* For +-Inf, return (*arg1 = arg0, +-0). */
10060 frac = dconst0;
10061 frac.sign = value->sign;
10062 trunc = *value;
10063 break;
10064 case rvc_normal:
10065 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10066 real_trunc (&trunc, VOIDmode, value);
10067 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10068 /* If the original number was negative and already
10069 integral, then the fractional part is -0.0. */
10070 if (value->sign && frac.cl == rvc_zero)
10071 frac.sign = value->sign;
10072 break;
10075 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10076 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10077 build_real (rettype, trunc));
10078 TREE_SIDE_EFFECTS (arg1) = 1;
10079 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10080 build_real (rettype, frac));
10083 return NULL_TREE;
10086 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10087 ARG is the argument for the call. */
10089 static tree
10090 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10092 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10093 REAL_VALUE_TYPE r;
10095 if (!validate_arg (arg, REAL_TYPE))
10096 return NULL_TREE;
10098 switch (builtin_index)
10100 case BUILT_IN_ISINF:
10101 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10102 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10104 if (TREE_CODE (arg) == REAL_CST)
10106 r = TREE_REAL_CST (arg);
10107 if (real_isinf (&r))
10108 return real_compare (GT_EXPR, &r, &dconst0)
10109 ? integer_one_node : integer_minus_one_node;
10110 else
10111 return integer_zero_node;
10114 return NULL_TREE;
10116 case BUILT_IN_ISINF_SIGN:
10118 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10119 /* In a boolean context, GCC will fold the inner COND_EXPR to
10120 1. So e.g. "if (isinf_sign(x))" would be folded to just
10121 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10122 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10123 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10124 tree tmp = NULL_TREE;
10126 arg = builtin_save_expr (arg);
10128 if (signbit_fn && isinf_fn)
10130 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10131 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10133 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10134 signbit_call, integer_zero_node);
10135 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10136 isinf_call, integer_zero_node);
10138 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10139 integer_minus_one_node, integer_one_node);
10140 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10141 isinf_call, tmp,
10142 integer_zero_node);
10145 return tmp;
10148 case BUILT_IN_ISFINITE:
10149 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10150 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10151 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10153 if (TREE_CODE (arg) == REAL_CST)
10155 r = TREE_REAL_CST (arg);
10156 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10159 return NULL_TREE;
10161 case BUILT_IN_ISNAN:
10162 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10163 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10165 if (TREE_CODE (arg) == REAL_CST)
10167 r = TREE_REAL_CST (arg);
10168 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10171 arg = builtin_save_expr (arg);
10172 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10174 default:
10175 gcc_unreachable ();
10179 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10180 This builtin will generate code to return the appropriate floating
10181 point classification depending on the value of the floating point
10182 number passed in. The possible return values must be supplied as
10183 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10184 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10185 one floating point argument which is "type generic". */
10187 static tree
10188 fold_builtin_fpclassify (location_t loc, tree exp)
10190 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10191 arg, type, res, tmp;
10192 enum machine_mode mode;
10193 REAL_VALUE_TYPE r;
10194 char buf[128];
10196 /* Verify the required arguments in the original call. */
10197 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10198 INTEGER_TYPE, INTEGER_TYPE,
10199 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10200 return NULL_TREE;
10202 fp_nan = CALL_EXPR_ARG (exp, 0);
10203 fp_infinite = CALL_EXPR_ARG (exp, 1);
10204 fp_normal = CALL_EXPR_ARG (exp, 2);
10205 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10206 fp_zero = CALL_EXPR_ARG (exp, 4);
10207 arg = CALL_EXPR_ARG (exp, 5);
10208 type = TREE_TYPE (arg);
10209 mode = TYPE_MODE (type);
10210 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10212 /* fpclassify(x) ->
10213 isnan(x) ? FP_NAN :
10214 (fabs(x) == Inf ? FP_INFINITE :
10215 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10216 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10218 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10219 build_real (type, dconst0));
10220 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10221 tmp, fp_zero, fp_subnormal);
10223 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10224 real_from_string (&r, buf);
10225 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10226 arg, build_real (type, r));
10227 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10229 if (HONOR_INFINITIES (mode))
10231 real_inf (&r);
10232 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10233 build_real (type, r));
10234 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10235 fp_infinite, res);
10238 if (HONOR_NANS (mode))
10240 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10241 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10244 return res;
10247 /* Fold a call to an unordered comparison function such as
10248 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10249 being called and ARG0 and ARG1 are the arguments for the call.
10250 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10251 the opposite of the desired result. UNORDERED_CODE is used
10252 for modes that can hold NaNs and ORDERED_CODE is used for
10253 the rest. */
10255 static tree
10256 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10257 enum tree_code unordered_code,
10258 enum tree_code ordered_code)
10260 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10261 enum tree_code code;
10262 tree type0, type1;
10263 enum tree_code code0, code1;
10264 tree cmp_type = NULL_TREE;
10266 type0 = TREE_TYPE (arg0);
10267 type1 = TREE_TYPE (arg1);
10269 code0 = TREE_CODE (type0);
10270 code1 = TREE_CODE (type1);
10272 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10273 /* Choose the wider of two real types. */
10274 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10275 ? type0 : type1;
10276 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10277 cmp_type = type0;
10278 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10279 cmp_type = type1;
10281 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10282 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10284 if (unordered_code == UNORDERED_EXPR)
10286 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10287 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10288 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10291 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10292 : ordered_code;
10293 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10294 fold_build2_loc (loc, code, type, arg0, arg1));
10297 /* Fold a call to built-in function FNDECL with 0 arguments.
10298 IGNORE is true if the result of the function call is ignored. This
10299 function returns NULL_TREE if no simplification was possible. */
10301 static tree
10302 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10304 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10305 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10306 switch (fcode)
10308 CASE_FLT_FN (BUILT_IN_INF):
10309 case BUILT_IN_INFD32:
10310 case BUILT_IN_INFD64:
10311 case BUILT_IN_INFD128:
10312 return fold_builtin_inf (loc, type, true);
10314 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10315 return fold_builtin_inf (loc, type, false);
10317 case BUILT_IN_CLASSIFY_TYPE:
10318 return fold_builtin_classify_type (NULL_TREE);
10320 default:
10321 break;
10323 return NULL_TREE;
10326 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10327 IGNORE is true if the result of the function call is ignored. This
10328 function returns NULL_TREE if no simplification was possible. */
10330 static tree
10331 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10333 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10334 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10335 switch (fcode)
10338 case BUILT_IN_CONSTANT_P:
10340 tree val = fold_builtin_constant_p (arg0);
10342 /* Gimplification will pull the CALL_EXPR for the builtin out of
10343 an if condition. When not optimizing, we'll not CSE it back.
10344 To avoid link error types of regressions, return false now. */
10345 if (!val && !optimize)
10346 val = integer_zero_node;
10348 return val;
10351 case BUILT_IN_CLASSIFY_TYPE:
10352 return fold_builtin_classify_type (arg0);
10354 case BUILT_IN_STRLEN:
10355 return fold_builtin_strlen (loc, arg0);
10357 CASE_FLT_FN (BUILT_IN_FABS):
10358 return fold_builtin_fabs (loc, arg0, type);
10360 case BUILT_IN_ABS:
10361 case BUILT_IN_LABS:
10362 case BUILT_IN_LLABS:
10363 case BUILT_IN_IMAXABS:
10364 return fold_builtin_abs (loc, arg0, type);
10366 CASE_FLT_FN (BUILT_IN_CONJ):
10367 if (validate_arg (arg0, COMPLEX_TYPE)
10368 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10369 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10370 break;
10372 CASE_FLT_FN (BUILT_IN_CREAL):
10373 if (validate_arg (arg0, COMPLEX_TYPE)
10374 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10375 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10376 break;
10378 CASE_FLT_FN (BUILT_IN_CIMAG):
10379 if (validate_arg (arg0, COMPLEX_TYPE))
10380 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10381 break;
10383 CASE_FLT_FN (BUILT_IN_CCOS):
10384 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10386 CASE_FLT_FN (BUILT_IN_CCOSH):
10387 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10389 #ifdef HAVE_mpc
10390 CASE_FLT_FN (BUILT_IN_CSIN):
10391 if (validate_arg (arg0, COMPLEX_TYPE)
10392 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10393 return do_mpc_arg1 (arg0, type, mpc_sin);
10394 break;
10396 CASE_FLT_FN (BUILT_IN_CSINH):
10397 if (validate_arg (arg0, COMPLEX_TYPE)
10398 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10399 return do_mpc_arg1 (arg0, type, mpc_sinh);
10400 break;
10402 CASE_FLT_FN (BUILT_IN_CTAN):
10403 if (validate_arg (arg0, COMPLEX_TYPE)
10404 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10405 return do_mpc_arg1 (arg0, type, mpc_tan);
10406 break;
10408 CASE_FLT_FN (BUILT_IN_CTANH):
10409 if (validate_arg (arg0, COMPLEX_TYPE)
10410 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10411 return do_mpc_arg1 (arg0, type, mpc_tanh);
10412 break;
10414 CASE_FLT_FN (BUILT_IN_CLOG):
10415 if (validate_arg (arg0, COMPLEX_TYPE)
10416 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10417 return do_mpc_arg1 (arg0, type, mpc_log);
10418 break;
10420 CASE_FLT_FN (BUILT_IN_CSQRT):
10421 if (validate_arg (arg0, COMPLEX_TYPE)
10422 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10423 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10424 break;
10425 #endif
10427 CASE_FLT_FN (BUILT_IN_CABS):
10428 return fold_builtin_cabs (loc, arg0, type, fndecl);
10430 CASE_FLT_FN (BUILT_IN_CARG):
10431 return fold_builtin_carg (loc, arg0, type);
10433 CASE_FLT_FN (BUILT_IN_SQRT):
10434 return fold_builtin_sqrt (loc, arg0, type);
10436 CASE_FLT_FN (BUILT_IN_CBRT):
10437 return fold_builtin_cbrt (loc, arg0, type);
10439 CASE_FLT_FN (BUILT_IN_ASIN):
10440 if (validate_arg (arg0, REAL_TYPE))
10441 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10442 &dconstm1, &dconst1, true);
10443 break;
10445 CASE_FLT_FN (BUILT_IN_ACOS):
10446 if (validate_arg (arg0, REAL_TYPE))
10447 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10448 &dconstm1, &dconst1, true);
10449 break;
10451 CASE_FLT_FN (BUILT_IN_ATAN):
10452 if (validate_arg (arg0, REAL_TYPE))
10453 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10454 break;
10456 CASE_FLT_FN (BUILT_IN_ASINH):
10457 if (validate_arg (arg0, REAL_TYPE))
10458 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_ACOSH):
10462 if (validate_arg (arg0, REAL_TYPE))
10463 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10464 &dconst1, NULL, true);
10465 break;
10467 CASE_FLT_FN (BUILT_IN_ATANH):
10468 if (validate_arg (arg0, REAL_TYPE))
10469 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10470 &dconstm1, &dconst1, false);
10471 break;
10473 CASE_FLT_FN (BUILT_IN_SIN):
10474 if (validate_arg (arg0, REAL_TYPE))
10475 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10476 break;
10478 CASE_FLT_FN (BUILT_IN_COS):
10479 return fold_builtin_cos (loc, arg0, type, fndecl);
10481 CASE_FLT_FN (BUILT_IN_TAN):
10482 return fold_builtin_tan (arg0, type);
10484 CASE_FLT_FN (BUILT_IN_CEXP):
10485 return fold_builtin_cexp (loc, arg0, type);
10487 CASE_FLT_FN (BUILT_IN_CEXPI):
10488 if (validate_arg (arg0, REAL_TYPE))
10489 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10490 break;
10492 CASE_FLT_FN (BUILT_IN_SINH):
10493 if (validate_arg (arg0, REAL_TYPE))
10494 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10495 break;
10497 CASE_FLT_FN (BUILT_IN_COSH):
10498 return fold_builtin_cosh (loc, arg0, type, fndecl);
10500 CASE_FLT_FN (BUILT_IN_TANH):
10501 if (validate_arg (arg0, REAL_TYPE))
10502 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10503 break;
10505 CASE_FLT_FN (BUILT_IN_ERF):
10506 if (validate_arg (arg0, REAL_TYPE))
10507 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10508 break;
10510 CASE_FLT_FN (BUILT_IN_ERFC):
10511 if (validate_arg (arg0, REAL_TYPE))
10512 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10513 break;
10515 CASE_FLT_FN (BUILT_IN_TGAMMA):
10516 if (validate_arg (arg0, REAL_TYPE))
10517 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10518 break;
10520 CASE_FLT_FN (BUILT_IN_EXP):
10521 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10523 CASE_FLT_FN (BUILT_IN_EXP2):
10524 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10526 CASE_FLT_FN (BUILT_IN_EXP10):
10527 CASE_FLT_FN (BUILT_IN_POW10):
10528 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10530 CASE_FLT_FN (BUILT_IN_EXPM1):
10531 if (validate_arg (arg0, REAL_TYPE))
10532 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10533 break;
10535 CASE_FLT_FN (BUILT_IN_LOG):
10536 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10538 CASE_FLT_FN (BUILT_IN_LOG2):
10539 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10541 CASE_FLT_FN (BUILT_IN_LOG10):
10542 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10544 CASE_FLT_FN (BUILT_IN_LOG1P):
10545 if (validate_arg (arg0, REAL_TYPE))
10546 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10547 &dconstm1, NULL, false);
10548 break;
10550 CASE_FLT_FN (BUILT_IN_J0):
10551 if (validate_arg (arg0, REAL_TYPE))
10552 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10553 NULL, NULL, 0);
10554 break;
10556 CASE_FLT_FN (BUILT_IN_J1):
10557 if (validate_arg (arg0, REAL_TYPE))
10558 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10559 NULL, NULL, 0);
10560 break;
10562 CASE_FLT_FN (BUILT_IN_Y0):
10563 if (validate_arg (arg0, REAL_TYPE))
10564 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10565 &dconst0, NULL, false);
10566 break;
10568 CASE_FLT_FN (BUILT_IN_Y1):
10569 if (validate_arg (arg0, REAL_TYPE))
10570 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10571 &dconst0, NULL, false);
10572 break;
10574 CASE_FLT_FN (BUILT_IN_NAN):
10575 case BUILT_IN_NAND32:
10576 case BUILT_IN_NAND64:
10577 case BUILT_IN_NAND128:
10578 return fold_builtin_nan (arg0, type, true);
10580 CASE_FLT_FN (BUILT_IN_NANS):
10581 return fold_builtin_nan (arg0, type, false);
10583 CASE_FLT_FN (BUILT_IN_FLOOR):
10584 return fold_builtin_floor (loc, fndecl, arg0);
10586 CASE_FLT_FN (BUILT_IN_CEIL):
10587 return fold_builtin_ceil (loc, fndecl, arg0);
10589 CASE_FLT_FN (BUILT_IN_TRUNC):
10590 return fold_builtin_trunc (loc, fndecl, arg0);
10592 CASE_FLT_FN (BUILT_IN_ROUND):
10593 return fold_builtin_round (loc, fndecl, arg0);
10595 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10596 CASE_FLT_FN (BUILT_IN_RINT):
10597 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10599 CASE_FLT_FN (BUILT_IN_LCEIL):
10600 CASE_FLT_FN (BUILT_IN_LLCEIL):
10601 CASE_FLT_FN (BUILT_IN_LFLOOR):
10602 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10603 CASE_FLT_FN (BUILT_IN_LROUND):
10604 CASE_FLT_FN (BUILT_IN_LLROUND):
10605 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10607 CASE_FLT_FN (BUILT_IN_LRINT):
10608 CASE_FLT_FN (BUILT_IN_LLRINT):
10609 return fold_fixed_mathfn (loc, fndecl, arg0);
10611 case BUILT_IN_BSWAP32:
10612 case BUILT_IN_BSWAP64:
10613 return fold_builtin_bswap (fndecl, arg0);
10615 CASE_INT_FN (BUILT_IN_FFS):
10616 CASE_INT_FN (BUILT_IN_CLZ):
10617 CASE_INT_FN (BUILT_IN_CTZ):
10618 CASE_INT_FN (BUILT_IN_POPCOUNT):
10619 CASE_INT_FN (BUILT_IN_PARITY):
10620 return fold_builtin_bitop (fndecl, arg0);
10622 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10623 return fold_builtin_signbit (loc, arg0, type);
10625 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10626 return fold_builtin_significand (loc, arg0, type);
10628 CASE_FLT_FN (BUILT_IN_ILOGB):
10629 CASE_FLT_FN (BUILT_IN_LOGB):
10630 return fold_builtin_logb (loc, arg0, type);
10632 case BUILT_IN_ISASCII:
10633 return fold_builtin_isascii (loc, arg0);
10635 case BUILT_IN_TOASCII:
10636 return fold_builtin_toascii (loc, arg0);
10638 case BUILT_IN_ISDIGIT:
10639 return fold_builtin_isdigit (loc, arg0);
10641 CASE_FLT_FN (BUILT_IN_FINITE):
10642 case BUILT_IN_FINITED32:
10643 case BUILT_IN_FINITED64:
10644 case BUILT_IN_FINITED128:
10645 case BUILT_IN_ISFINITE:
10646 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10648 CASE_FLT_FN (BUILT_IN_ISINF):
10649 case BUILT_IN_ISINFD32:
10650 case BUILT_IN_ISINFD64:
10651 case BUILT_IN_ISINFD128:
10652 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10654 case BUILT_IN_ISINF_SIGN:
10655 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10657 CASE_FLT_FN (BUILT_IN_ISNAN):
10658 case BUILT_IN_ISNAND32:
10659 case BUILT_IN_ISNAND64:
10660 case BUILT_IN_ISNAND128:
10661 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10663 case BUILT_IN_PRINTF:
10664 case BUILT_IN_PRINTF_UNLOCKED:
10665 case BUILT_IN_VPRINTF:
10666 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10668 default:
10669 break;
10672 return NULL_TREE;
10676 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10677 IGNORE is true if the result of the function call is ignored. This
10678 function returns NULL_TREE if no simplification was possible. */
10680 static tree
10681 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10683 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10684 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10686 switch (fcode)
10688 CASE_FLT_FN (BUILT_IN_JN):
10689 if (validate_arg (arg0, INTEGER_TYPE)
10690 && validate_arg (arg1, REAL_TYPE))
10691 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10692 break;
10694 CASE_FLT_FN (BUILT_IN_YN):
10695 if (validate_arg (arg0, INTEGER_TYPE)
10696 && validate_arg (arg1, REAL_TYPE))
10697 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10698 &dconst0, false);
10699 break;
10701 CASE_FLT_FN (BUILT_IN_DREM):
10702 CASE_FLT_FN (BUILT_IN_REMAINDER):
10703 if (validate_arg (arg0, REAL_TYPE)
10704 && validate_arg(arg1, REAL_TYPE))
10705 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10706 break;
10708 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10709 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10710 if (validate_arg (arg0, REAL_TYPE)
10711 && validate_arg(arg1, POINTER_TYPE))
10712 return do_mpfr_lgamma_r (arg0, arg1, type);
10713 break;
10715 CASE_FLT_FN (BUILT_IN_ATAN2):
10716 if (validate_arg (arg0, REAL_TYPE)
10717 && validate_arg(arg1, REAL_TYPE))
10718 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10719 break;
10721 CASE_FLT_FN (BUILT_IN_FDIM):
10722 if (validate_arg (arg0, REAL_TYPE)
10723 && validate_arg(arg1, REAL_TYPE))
10724 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10725 break;
10727 CASE_FLT_FN (BUILT_IN_HYPOT):
10728 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10730 #ifdef HAVE_mpc_pow
10731 CASE_FLT_FN (BUILT_IN_CPOW):
10732 if (validate_arg (arg0, COMPLEX_TYPE)
10733 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10734 && validate_arg (arg1, COMPLEX_TYPE)
10735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10736 return do_mpc_arg2 (arg0, arg1, type, mpc_pow);
10737 break;
10738 #endif
10740 CASE_FLT_FN (BUILT_IN_LDEXP):
10741 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10742 CASE_FLT_FN (BUILT_IN_SCALBN):
10743 CASE_FLT_FN (BUILT_IN_SCALBLN):
10744 return fold_builtin_load_exponent (loc, arg0, arg1,
10745 type, /*ldexp=*/false);
10747 CASE_FLT_FN (BUILT_IN_FREXP):
10748 return fold_builtin_frexp (loc, arg0, arg1, type);
10750 CASE_FLT_FN (BUILT_IN_MODF):
10751 return fold_builtin_modf (loc, arg0, arg1, type);
10753 case BUILT_IN_BZERO:
10754 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10756 case BUILT_IN_FPUTS:
10757 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10759 case BUILT_IN_FPUTS_UNLOCKED:
10760 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10762 case BUILT_IN_STRSTR:
10763 return fold_builtin_strstr (loc, arg0, arg1, type);
10765 case BUILT_IN_STRCAT:
10766 return fold_builtin_strcat (loc, arg0, arg1);
10768 case BUILT_IN_STRSPN:
10769 return fold_builtin_strspn (loc, arg0, arg1);
10771 case BUILT_IN_STRCSPN:
10772 return fold_builtin_strcspn (loc, arg0, arg1);
10774 case BUILT_IN_STRCHR:
10775 case BUILT_IN_INDEX:
10776 return fold_builtin_strchr (loc, arg0, arg1, type);
10778 case BUILT_IN_STRRCHR:
10779 case BUILT_IN_RINDEX:
10780 return fold_builtin_strrchr (loc, arg0, arg1, type);
10782 case BUILT_IN_STRCPY:
10783 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10785 case BUILT_IN_STPCPY:
10786 if (ignore)
10788 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10789 if (!fn)
10790 break;
10792 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10794 break;
10796 case BUILT_IN_STRCMP:
10797 return fold_builtin_strcmp (loc, arg0, arg1);
10799 case BUILT_IN_STRPBRK:
10800 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10802 case BUILT_IN_EXPECT:
10803 return fold_builtin_expect (loc, arg0, arg1);
10805 CASE_FLT_FN (BUILT_IN_POW):
10806 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10808 CASE_FLT_FN (BUILT_IN_POWI):
10809 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10811 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10812 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10814 CASE_FLT_FN (BUILT_IN_FMIN):
10815 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10817 CASE_FLT_FN (BUILT_IN_FMAX):
10818 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10820 case BUILT_IN_ISGREATER:
10821 return fold_builtin_unordered_cmp (loc, fndecl,
10822 arg0, arg1, UNLE_EXPR, LE_EXPR);
10823 case BUILT_IN_ISGREATEREQUAL:
10824 return fold_builtin_unordered_cmp (loc, fndecl,
10825 arg0, arg1, UNLT_EXPR, LT_EXPR);
10826 case BUILT_IN_ISLESS:
10827 return fold_builtin_unordered_cmp (loc, fndecl,
10828 arg0, arg1, UNGE_EXPR, GE_EXPR);
10829 case BUILT_IN_ISLESSEQUAL:
10830 return fold_builtin_unordered_cmp (loc, fndecl,
10831 arg0, arg1, UNGT_EXPR, GT_EXPR);
10832 case BUILT_IN_ISLESSGREATER:
10833 return fold_builtin_unordered_cmp (loc, fndecl,
10834 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10835 case BUILT_IN_ISUNORDERED:
10836 return fold_builtin_unordered_cmp (loc, fndecl,
10837 arg0, arg1, UNORDERED_EXPR,
10838 NOP_EXPR);
10840 /* We do the folding for va_start in the expander. */
10841 case BUILT_IN_VA_START:
10842 break;
10844 case BUILT_IN_SPRINTF:
10845 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10847 case BUILT_IN_OBJECT_SIZE:
10848 return fold_builtin_object_size (arg0, arg1);
10850 case BUILT_IN_PRINTF:
10851 case BUILT_IN_PRINTF_UNLOCKED:
10852 case BUILT_IN_VPRINTF:
10853 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10855 case BUILT_IN_PRINTF_CHK:
10856 case BUILT_IN_VPRINTF_CHK:
10857 if (!validate_arg (arg0, INTEGER_TYPE)
10858 || TREE_SIDE_EFFECTS (arg0))
10859 return NULL_TREE;
10860 else
10861 return fold_builtin_printf (loc, fndecl,
10862 arg1, NULL_TREE, ignore, fcode);
10863 break;
10865 case BUILT_IN_FPRINTF:
10866 case BUILT_IN_FPRINTF_UNLOCKED:
10867 case BUILT_IN_VFPRINTF:
10868 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10869 ignore, fcode);
10871 default:
10872 break;
10874 return NULL_TREE;
10877 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10878 and ARG2. IGNORE is true if the result of the function call is ignored.
10879 This function returns NULL_TREE if no simplification was possible. */
10881 static tree
10882 fold_builtin_3 (location_t loc, tree fndecl,
10883 tree arg0, tree arg1, tree arg2, bool ignore)
10885 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10886 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10887 switch (fcode)
10890 CASE_FLT_FN (BUILT_IN_SINCOS):
10891 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10893 CASE_FLT_FN (BUILT_IN_FMA):
10894 if (validate_arg (arg0, REAL_TYPE)
10895 && validate_arg(arg1, REAL_TYPE)
10896 && validate_arg(arg2, REAL_TYPE))
10897 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10898 break;
10900 CASE_FLT_FN (BUILT_IN_REMQUO):
10901 if (validate_arg (arg0, REAL_TYPE)
10902 && validate_arg(arg1, REAL_TYPE)
10903 && validate_arg(arg2, POINTER_TYPE))
10904 return do_mpfr_remquo (arg0, arg1, arg2);
10905 break;
10907 case BUILT_IN_MEMSET:
10908 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10910 case BUILT_IN_BCOPY:
10911 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10912 void_type_node, true, /*endp=*/3);
10914 case BUILT_IN_MEMCPY:
10915 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10916 type, ignore, /*endp=*/0);
10918 case BUILT_IN_MEMPCPY:
10919 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10920 type, ignore, /*endp=*/1);
10922 case BUILT_IN_MEMMOVE:
10923 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10924 type, ignore, /*endp=*/3);
10926 case BUILT_IN_STRNCAT:
10927 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10929 case BUILT_IN_STRNCPY:
10930 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10932 case BUILT_IN_STRNCMP:
10933 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10935 case BUILT_IN_MEMCHR:
10936 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10938 case BUILT_IN_BCMP:
10939 case BUILT_IN_MEMCMP:
10940 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10942 case BUILT_IN_SPRINTF:
10943 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10945 case BUILT_IN_STRCPY_CHK:
10946 case BUILT_IN_STPCPY_CHK:
10947 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10948 ignore, fcode);
10950 case BUILT_IN_STRCAT_CHK:
10951 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10953 case BUILT_IN_PRINTF_CHK:
10954 case BUILT_IN_VPRINTF_CHK:
10955 if (!validate_arg (arg0, INTEGER_TYPE)
10956 || TREE_SIDE_EFFECTS (arg0))
10957 return NULL_TREE;
10958 else
10959 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10960 break;
10962 case BUILT_IN_FPRINTF:
10963 case BUILT_IN_FPRINTF_UNLOCKED:
10964 case BUILT_IN_VFPRINTF:
10965 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10966 ignore, fcode);
10968 case BUILT_IN_FPRINTF_CHK:
10969 case BUILT_IN_VFPRINTF_CHK:
10970 if (!validate_arg (arg1, INTEGER_TYPE)
10971 || TREE_SIDE_EFFECTS (arg1))
10972 return NULL_TREE;
10973 else
10974 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10975 ignore, fcode);
10977 default:
10978 break;
10980 return NULL_TREE;
10983 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10984 ARG2, and ARG3. IGNORE is true if the result of the function call is
10985 ignored. This function returns NULL_TREE if no simplification was
10986 possible. */
10988 static tree
10989 fold_builtin_4 (location_t loc, tree fndecl,
10990 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10992 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10994 switch (fcode)
10996 case BUILT_IN_MEMCPY_CHK:
10997 case BUILT_IN_MEMPCPY_CHK:
10998 case BUILT_IN_MEMMOVE_CHK:
10999 case BUILT_IN_MEMSET_CHK:
11000 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11001 NULL_TREE, ignore,
11002 DECL_FUNCTION_CODE (fndecl));
11004 case BUILT_IN_STRNCPY_CHK:
11005 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
11007 case BUILT_IN_STRNCAT_CHK:
11008 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11010 case BUILT_IN_FPRINTF_CHK:
11011 case BUILT_IN_VFPRINTF_CHK:
11012 if (!validate_arg (arg1, INTEGER_TYPE)
11013 || TREE_SIDE_EFFECTS (arg1))
11014 return NULL_TREE;
11015 else
11016 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11017 ignore, fcode);
11018 break;
11020 default:
11021 break;
11023 return NULL_TREE;
11026 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11027 arguments, where NARGS <= 4. IGNORE is true if the result of the
11028 function call is ignored. This function returns NULL_TREE if no
11029 simplification was possible. Note that this only folds builtins with
11030 fixed argument patterns. Foldings that do varargs-to-varargs
11031 transformations, or that match calls with more than 4 arguments,
11032 need to be handled with fold_builtin_varargs instead. */
11034 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11036 static tree
11037 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11039 tree ret = NULL_TREE;
11041 switch (nargs)
11043 case 0:
11044 ret = fold_builtin_0 (loc, fndecl, ignore);
11045 break;
11046 case 1:
11047 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11048 break;
11049 case 2:
11050 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11051 break;
11052 case 3:
11053 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11054 break;
11055 case 4:
11056 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11057 ignore);
11058 break;
11059 default:
11060 break;
11062 if (ret)
11064 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11065 SET_EXPR_LOCATION (ret, loc);
11066 TREE_NO_WARNING (ret) = 1;
11067 return ret;
11069 return NULL_TREE;
11072 /* Builtins with folding operations that operate on "..." arguments
11073 need special handling; we need to store the arguments in a convenient
11074 data structure before attempting any folding. Fortunately there are
11075 only a few builtins that fall into this category. FNDECL is the
11076 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11077 result of the function call is ignored. */
11079 static tree
11080 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11081 bool ignore ATTRIBUTE_UNUSED)
11083 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11084 tree ret = NULL_TREE;
11086 switch (fcode)
11088 case BUILT_IN_SPRINTF_CHK:
11089 case BUILT_IN_VSPRINTF_CHK:
11090 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11091 break;
11093 case BUILT_IN_SNPRINTF_CHK:
11094 case BUILT_IN_VSNPRINTF_CHK:
11095 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11096 break;
11098 case BUILT_IN_FPCLASSIFY:
11099 ret = fold_builtin_fpclassify (loc, exp);
11100 break;
11102 default:
11103 break;
11105 if (ret)
11107 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11108 SET_EXPR_LOCATION (ret, loc);
11109 TREE_NO_WARNING (ret) = 1;
11110 return ret;
11112 return NULL_TREE;
11115 /* Return true if FNDECL shouldn't be folded right now.
11116 If a built-in function has an inline attribute always_inline
11117 wrapper, defer folding it after always_inline functions have
11118 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11119 might not be performed. */
11121 static bool
11122 avoid_folding_inline_builtin (tree fndecl)
11124 return (DECL_DECLARED_INLINE_P (fndecl)
11125 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11126 && cfun
11127 && !cfun->always_inline_functions_inlined
11128 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11131 /* A wrapper function for builtin folding that prevents warnings for
11132 "statement without effect" and the like, caused by removing the
11133 call node earlier than the warning is generated. */
11135 tree
11136 fold_call_expr (location_t loc, tree exp, bool ignore)
11138 tree ret = NULL_TREE;
11139 tree fndecl = get_callee_fndecl (exp);
11140 if (fndecl
11141 && TREE_CODE (fndecl) == FUNCTION_DECL
11142 && DECL_BUILT_IN (fndecl)
11143 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11144 yet. Defer folding until we see all the arguments
11145 (after inlining). */
11146 && !CALL_EXPR_VA_ARG_PACK (exp))
11148 int nargs = call_expr_nargs (exp);
11150 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11151 instead last argument is __builtin_va_arg_pack (). Defer folding
11152 even in that case, until arguments are finalized. */
11153 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11155 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11156 if (fndecl2
11157 && TREE_CODE (fndecl2) == FUNCTION_DECL
11158 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11159 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11160 return NULL_TREE;
11163 if (avoid_folding_inline_builtin (fndecl))
11164 return NULL_TREE;
11166 /* FIXME: Don't use a list in this interface. */
11167 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11168 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11169 else
11171 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11173 tree *args = CALL_EXPR_ARGP (exp);
11174 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11176 if (!ret)
11177 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11178 if (ret)
11179 return ret;
11182 return NULL_TREE;
11185 /* Conveniently construct a function call expression. FNDECL names the
11186 function to be called and ARGLIST is a TREE_LIST of arguments. */
11188 tree
11189 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
11191 tree fntype = TREE_TYPE (fndecl);
11192 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11193 int n = list_length (arglist);
11194 tree *argarray = (tree *) alloca (n * sizeof (tree));
11195 int i;
11197 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11198 argarray[i] = TREE_VALUE (arglist);
11199 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11202 /* Conveniently construct a function call expression. FNDECL names the
11203 function to be called, N is the number of arguments, and the "..."
11204 parameters are the argument expressions. */
11206 tree
11207 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11209 va_list ap;
11210 tree fntype = TREE_TYPE (fndecl);
11211 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11212 tree *argarray = (tree *) alloca (n * sizeof (tree));
11213 int i;
11215 va_start (ap, n);
11216 for (i = 0; i < n; i++)
11217 argarray[i] = va_arg (ap, tree);
11218 va_end (ap);
11219 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11222 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11223 N arguments are passed in the array ARGARRAY. */
11225 tree
11226 fold_builtin_call_array (location_t loc, tree type,
11227 tree fn,
11228 int n,
11229 tree *argarray)
11231 tree ret = NULL_TREE;
11232 int i;
11233 tree exp;
11235 if (TREE_CODE (fn) == ADDR_EXPR)
11237 tree fndecl = TREE_OPERAND (fn, 0);
11238 if (TREE_CODE (fndecl) == FUNCTION_DECL
11239 && DECL_BUILT_IN (fndecl))
11241 /* If last argument is __builtin_va_arg_pack (), arguments to this
11242 function are not finalized yet. Defer folding until they are. */
11243 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11245 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11246 if (fndecl2
11247 && TREE_CODE (fndecl2) == FUNCTION_DECL
11248 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11249 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11250 return build_call_array_loc (loc, type, fn, n, argarray);
11252 if (avoid_folding_inline_builtin (fndecl))
11253 return build_call_array_loc (loc, type, fn, n, argarray);
11254 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11256 tree arglist = NULL_TREE;
11257 for (i = n - 1; i >= 0; i--)
11258 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11259 ret = targetm.fold_builtin (fndecl, arglist, false);
11260 if (ret)
11261 return ret;
11262 return build_call_array_loc (loc, type, fn, n, argarray);
11264 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11266 /* First try the transformations that don't require consing up
11267 an exp. */
11268 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11269 if (ret)
11270 return ret;
11273 /* If we got this far, we need to build an exp. */
11274 exp = build_call_array_loc (loc, type, fn, n, argarray);
11275 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11276 return ret ? ret : exp;
11280 return build_call_array_loc (loc, type, fn, n, argarray);
11283 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11284 along with N new arguments specified as the "..." parameters. SKIP
11285 is the number of arguments in EXP to be omitted. This function is used
11286 to do varargs-to-varargs transformations. */
11288 static tree
11289 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11291 int oldnargs = call_expr_nargs (exp);
11292 int nargs = oldnargs - skip + n;
11293 tree fntype = TREE_TYPE (fndecl);
11294 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11295 tree *buffer;
11297 if (n > 0)
11299 int i, j;
11300 va_list ap;
11302 buffer = XALLOCAVEC (tree, nargs);
11303 va_start (ap, n);
11304 for (i = 0; i < n; i++)
11305 buffer[i] = va_arg (ap, tree);
11306 va_end (ap);
11307 for (j = skip; j < oldnargs; j++, i++)
11308 buffer[i] = CALL_EXPR_ARG (exp, j);
11310 else
11311 buffer = CALL_EXPR_ARGP (exp) + skip;
11313 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
11316 /* Validate a single argument ARG against a tree code CODE representing
11317 a type. */
11319 static bool
11320 validate_arg (const_tree arg, enum tree_code code)
11322 if (!arg)
11323 return false;
11324 else if (code == POINTER_TYPE)
11325 return POINTER_TYPE_P (TREE_TYPE (arg));
11326 else if (code == INTEGER_TYPE)
11327 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11328 return code == TREE_CODE (TREE_TYPE (arg));
11331 /* This function validates the types of a function call argument list
11332 against a specified list of tree_codes. If the last specifier is a 0,
11333 that represents an ellipses, otherwise the last specifier must be a
11334 VOID_TYPE.
11336 This is the GIMPLE version of validate_arglist. Eventually we want to
11337 completely convert builtins.c to work from GIMPLEs and the tree based
11338 validate_arglist will then be removed. */
11340 bool
11341 validate_gimple_arglist (const_gimple call, ...)
11343 enum tree_code code;
11344 bool res = 0;
11345 va_list ap;
11346 const_tree arg;
11347 size_t i;
11349 va_start (ap, call);
11350 i = 0;
11354 code = (enum tree_code) va_arg (ap, int);
11355 switch (code)
11357 case 0:
11358 /* This signifies an ellipses, any further arguments are all ok. */
11359 res = true;
11360 goto end;
11361 case VOID_TYPE:
11362 /* This signifies an endlink, if no arguments remain, return
11363 true, otherwise return false. */
11364 res = (i == gimple_call_num_args (call));
11365 goto end;
11366 default:
11367 /* If no parameters remain or the parameter's code does not
11368 match the specified code, return false. Otherwise continue
11369 checking any remaining arguments. */
11370 arg = gimple_call_arg (call, i++);
11371 if (!validate_arg (arg, code))
11372 goto end;
11373 break;
11376 while (1);
11378 /* We need gotos here since we can only have one VA_CLOSE in a
11379 function. */
11380 end: ;
11381 va_end (ap);
11383 return res;
11386 /* This function validates the types of a function call argument list
11387 against a specified list of tree_codes. If the last specifier is a 0,
11388 that represents an ellipses, otherwise the last specifier must be a
11389 VOID_TYPE. */
11391 bool
11392 validate_arglist (const_tree callexpr, ...)
11394 enum tree_code code;
11395 bool res = 0;
11396 va_list ap;
11397 const_call_expr_arg_iterator iter;
11398 const_tree arg;
11400 va_start (ap, callexpr);
11401 init_const_call_expr_arg_iterator (callexpr, &iter);
11405 code = (enum tree_code) va_arg (ap, int);
11406 switch (code)
11408 case 0:
11409 /* This signifies an ellipses, any further arguments are all ok. */
11410 res = true;
11411 goto end;
11412 case VOID_TYPE:
11413 /* This signifies an endlink, if no arguments remain, return
11414 true, otherwise return false. */
11415 res = !more_const_call_expr_args_p (&iter);
11416 goto end;
11417 default:
11418 /* If no parameters remain or the parameter's code does not
11419 match the specified code, return false. Otherwise continue
11420 checking any remaining arguments. */
11421 arg = next_const_call_expr_arg (&iter);
11422 if (!validate_arg (arg, code))
11423 goto end;
11424 break;
11427 while (1);
11429 /* We need gotos here since we can only have one VA_CLOSE in a
11430 function. */
11431 end: ;
11432 va_end (ap);
11434 return res;
11437 /* Default target-specific builtin expander that does nothing. */
11440 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11441 rtx target ATTRIBUTE_UNUSED,
11442 rtx subtarget ATTRIBUTE_UNUSED,
11443 enum machine_mode mode ATTRIBUTE_UNUSED,
11444 int ignore ATTRIBUTE_UNUSED)
11446 return NULL_RTX;
11449 /* Returns true is EXP represents data that would potentially reside
11450 in a readonly section. */
11452 static bool
11453 readonly_data_expr (tree exp)
11455 STRIP_NOPS (exp);
11457 if (TREE_CODE (exp) != ADDR_EXPR)
11458 return false;
11460 exp = get_base_address (TREE_OPERAND (exp, 0));
11461 if (!exp)
11462 return false;
11464 /* Make sure we call decl_readonly_section only for trees it
11465 can handle (since it returns true for everything it doesn't
11466 understand). */
11467 if (TREE_CODE (exp) == STRING_CST
11468 || TREE_CODE (exp) == CONSTRUCTOR
11469 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11470 return decl_readonly_section (exp, 0);
11471 else
11472 return false;
11475 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11476 to the call, and TYPE is its return type.
11478 Return NULL_TREE if no simplification was possible, otherwise return the
11479 simplified form of the call as a tree.
11481 The simplified form may be a constant or other expression which
11482 computes the same value, but in a more efficient manner (including
11483 calls to other builtin functions).
11485 The call may contain arguments which need to be evaluated, but
11486 which are not useful to determine the result of the call. In
11487 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11488 COMPOUND_EXPR will be an argument which must be evaluated.
11489 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11490 COMPOUND_EXPR in the chain will contain the tree for the simplified
11491 form of the builtin function call. */
11493 static tree
11494 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11496 if (!validate_arg (s1, POINTER_TYPE)
11497 || !validate_arg (s2, POINTER_TYPE))
11498 return NULL_TREE;
11499 else
11501 tree fn;
11502 const char *p1, *p2;
11504 p2 = c_getstr (s2);
11505 if (p2 == NULL)
11506 return NULL_TREE;
11508 p1 = c_getstr (s1);
11509 if (p1 != NULL)
11511 const char *r = strstr (p1, p2);
11512 tree tem;
11514 if (r == NULL)
11515 return build_int_cst (TREE_TYPE (s1), 0);
11517 /* Return an offset into the constant string argument. */
11518 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11519 s1, size_int (r - p1));
11520 return fold_convert_loc (loc, type, tem);
11523 /* The argument is const char *, and the result is char *, so we need
11524 a type conversion here to avoid a warning. */
11525 if (p2[0] == '\0')
11526 return fold_convert_loc (loc, type, s1);
11528 if (p2[1] != '\0')
11529 return NULL_TREE;
11531 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11532 if (!fn)
11533 return NULL_TREE;
11535 /* New argument list transforming strstr(s1, s2) to
11536 strchr(s1, s2[0]). */
11537 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11541 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11542 the call, and TYPE is its return type.
11544 Return NULL_TREE if no simplification was possible, otherwise return the
11545 simplified form of the call as a tree.
11547 The simplified form may be a constant or other expression which
11548 computes the same value, but in a more efficient manner (including
11549 calls to other builtin functions).
11551 The call may contain arguments which need to be evaluated, but
11552 which are not useful to determine the result of the call. In
11553 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11554 COMPOUND_EXPR will be an argument which must be evaluated.
11555 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11556 COMPOUND_EXPR in the chain will contain the tree for the simplified
11557 form of the builtin function call. */
11559 static tree
11560 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11562 if (!validate_arg (s1, POINTER_TYPE)
11563 || !validate_arg (s2, INTEGER_TYPE))
11564 return NULL_TREE;
11565 else
11567 const char *p1;
11569 if (TREE_CODE (s2) != INTEGER_CST)
11570 return NULL_TREE;
11572 p1 = c_getstr (s1);
11573 if (p1 != NULL)
11575 char c;
11576 const char *r;
11577 tree tem;
11579 if (target_char_cast (s2, &c))
11580 return NULL_TREE;
11582 r = strchr (p1, c);
11584 if (r == NULL)
11585 return build_int_cst (TREE_TYPE (s1), 0);
11587 /* Return an offset into the constant string argument. */
11588 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11589 s1, size_int (r - p1));
11590 return fold_convert_loc (loc, type, tem);
11592 return NULL_TREE;
11596 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11597 the call, and TYPE is its return type.
11599 Return NULL_TREE if no simplification was possible, otherwise return the
11600 simplified form of the call as a tree.
11602 The simplified form may be a constant or other expression which
11603 computes the same value, but in a more efficient manner (including
11604 calls to other builtin functions).
11606 The call may contain arguments which need to be evaluated, but
11607 which are not useful to determine the result of the call. In
11608 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11609 COMPOUND_EXPR will be an argument which must be evaluated.
11610 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11611 COMPOUND_EXPR in the chain will contain the tree for the simplified
11612 form of the builtin function call. */
11614 static tree
11615 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11617 if (!validate_arg (s1, POINTER_TYPE)
11618 || !validate_arg (s2, INTEGER_TYPE))
11619 return NULL_TREE;
11620 else
11622 tree fn;
11623 const char *p1;
11625 if (TREE_CODE (s2) != INTEGER_CST)
11626 return NULL_TREE;
11628 p1 = c_getstr (s1);
11629 if (p1 != NULL)
11631 char c;
11632 const char *r;
11633 tree tem;
11635 if (target_char_cast (s2, &c))
11636 return NULL_TREE;
11638 r = strrchr (p1, c);
11640 if (r == NULL)
11641 return build_int_cst (TREE_TYPE (s1), 0);
11643 /* Return an offset into the constant string argument. */
11644 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11645 s1, size_int (r - p1));
11646 return fold_convert_loc (loc, type, tem);
11649 if (! integer_zerop (s2))
11650 return NULL_TREE;
11652 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11653 if (!fn)
11654 return NULL_TREE;
11656 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11657 return build_call_expr_loc (loc, fn, 2, s1, s2);
11661 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11662 to the call, and TYPE is its return type.
11664 Return NULL_TREE if no simplification was possible, otherwise return the
11665 simplified form of the call as a tree.
11667 The simplified form may be a constant or other expression which
11668 computes the same value, but in a more efficient manner (including
11669 calls to other builtin functions).
11671 The call may contain arguments which need to be evaluated, but
11672 which are not useful to determine the result of the call. In
11673 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11674 COMPOUND_EXPR will be an argument which must be evaluated.
11675 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11676 COMPOUND_EXPR in the chain will contain the tree for the simplified
11677 form of the builtin function call. */
11679 static tree
11680 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11682 if (!validate_arg (s1, POINTER_TYPE)
11683 || !validate_arg (s2, POINTER_TYPE))
11684 return NULL_TREE;
11685 else
11687 tree fn;
11688 const char *p1, *p2;
11690 p2 = c_getstr (s2);
11691 if (p2 == NULL)
11692 return NULL_TREE;
11694 p1 = c_getstr (s1);
11695 if (p1 != NULL)
11697 const char *r = strpbrk (p1, p2);
11698 tree tem;
11700 if (r == NULL)
11701 return build_int_cst (TREE_TYPE (s1), 0);
11703 /* Return an offset into the constant string argument. */
11704 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11705 s1, size_int (r - p1));
11706 return fold_convert_loc (loc, type, tem);
11709 if (p2[0] == '\0')
11710 /* strpbrk(x, "") == NULL.
11711 Evaluate and ignore s1 in case it had side-effects. */
11712 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11714 if (p2[1] != '\0')
11715 return NULL_TREE; /* Really call strpbrk. */
11717 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11718 if (!fn)
11719 return NULL_TREE;
11721 /* New argument list transforming strpbrk(s1, s2) to
11722 strchr(s1, s2[0]). */
11723 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11727 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11728 to the call.
11730 Return NULL_TREE if no simplification was possible, otherwise return the
11731 simplified form of the call as a tree.
11733 The simplified form may be a constant or other expression which
11734 computes the same value, but in a more efficient manner (including
11735 calls to other builtin functions).
11737 The call may contain arguments which need to be evaluated, but
11738 which are not useful to determine the result of the call. In
11739 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11740 COMPOUND_EXPR will be an argument which must be evaluated.
11741 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11742 COMPOUND_EXPR in the chain will contain the tree for the simplified
11743 form of the builtin function call. */
11745 static tree
11746 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11748 if (!validate_arg (dst, POINTER_TYPE)
11749 || !validate_arg (src, POINTER_TYPE))
11750 return NULL_TREE;
11751 else
11753 const char *p = c_getstr (src);
11755 /* If the string length is zero, return the dst parameter. */
11756 if (p && *p == '\0')
11757 return dst;
11759 return NULL_TREE;
11763 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11764 arguments to the call.
11766 Return NULL_TREE if no simplification was possible, otherwise return the
11767 simplified form of the call as a tree.
11769 The simplified form may be a constant or other expression which
11770 computes the same value, but in a more efficient manner (including
11771 calls to other builtin functions).
11773 The call may contain arguments which need to be evaluated, but
11774 which are not useful to determine the result of the call. In
11775 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11776 COMPOUND_EXPR will be an argument which must be evaluated.
11777 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11778 COMPOUND_EXPR in the chain will contain the tree for the simplified
11779 form of the builtin function call. */
11781 static tree
11782 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11784 if (!validate_arg (dst, POINTER_TYPE)
11785 || !validate_arg (src, POINTER_TYPE)
11786 || !validate_arg (len, INTEGER_TYPE))
11787 return NULL_TREE;
11788 else
11790 const char *p = c_getstr (src);
11792 /* If the requested length is zero, or the src parameter string
11793 length is zero, return the dst parameter. */
11794 if (integer_zerop (len) || (p && *p == '\0'))
11795 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11797 /* If the requested len is greater than or equal to the string
11798 length, call strcat. */
11799 if (TREE_CODE (len) == INTEGER_CST && p
11800 && compare_tree_int (len, strlen (p)) >= 0)
11802 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11804 /* If the replacement _DECL isn't initialized, don't do the
11805 transformation. */
11806 if (!fn)
11807 return NULL_TREE;
11809 return build_call_expr_loc (loc, fn, 2, dst, src);
11811 return NULL_TREE;
11815 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11816 to the call.
11818 Return NULL_TREE if no simplification was possible, otherwise return the
11819 simplified form of the call as a tree.
11821 The simplified form may be a constant or other expression which
11822 computes the same value, but in a more efficient manner (including
11823 calls to other builtin functions).
11825 The call may contain arguments which need to be evaluated, but
11826 which are not useful to determine the result of the call. In
11827 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11828 COMPOUND_EXPR will be an argument which must be evaluated.
11829 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11830 COMPOUND_EXPR in the chain will contain the tree for the simplified
11831 form of the builtin function call. */
11833 static tree
11834 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11836 if (!validate_arg (s1, POINTER_TYPE)
11837 || !validate_arg (s2, POINTER_TYPE))
11838 return NULL_TREE;
11839 else
11841 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11843 /* If both arguments are constants, evaluate at compile-time. */
11844 if (p1 && p2)
11846 const size_t r = strspn (p1, p2);
11847 return size_int (r);
11850 /* If either argument is "", return NULL_TREE. */
11851 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11852 /* Evaluate and ignore both arguments in case either one has
11853 side-effects. */
11854 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11855 s1, s2);
11856 return NULL_TREE;
11860 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11861 to the call.
11863 Return NULL_TREE if no simplification was possible, otherwise return the
11864 simplified form of the call as a tree.
11866 The simplified form may be a constant or other expression which
11867 computes the same value, but in a more efficient manner (including
11868 calls to other builtin functions).
11870 The call may contain arguments which need to be evaluated, but
11871 which are not useful to determine the result of the call. In
11872 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11873 COMPOUND_EXPR will be an argument which must be evaluated.
11874 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11875 COMPOUND_EXPR in the chain will contain the tree for the simplified
11876 form of the builtin function call. */
11878 static tree
11879 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11881 if (!validate_arg (s1, POINTER_TYPE)
11882 || !validate_arg (s2, POINTER_TYPE))
11883 return NULL_TREE;
11884 else
11886 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11888 /* If both arguments are constants, evaluate at compile-time. */
11889 if (p1 && p2)
11891 const size_t r = strcspn (p1, p2);
11892 return size_int (r);
11895 /* If the first argument is "", return NULL_TREE. */
11896 if (p1 && *p1 == '\0')
11898 /* Evaluate and ignore argument s2 in case it has
11899 side-effects. */
11900 return omit_one_operand_loc (loc, size_type_node,
11901 size_zero_node, s2);
11904 /* If the second argument is "", return __builtin_strlen(s1). */
11905 if (p2 && *p2 == '\0')
11907 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11909 /* If the replacement _DECL isn't initialized, don't do the
11910 transformation. */
11911 if (!fn)
11912 return NULL_TREE;
11914 return build_call_expr_loc (loc, fn, 1, s1);
11916 return NULL_TREE;
11920 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11921 to the call. IGNORE is true if the value returned
11922 by the builtin will be ignored. UNLOCKED is true is true if this
11923 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11924 the known length of the string. Return NULL_TREE if no simplification
11925 was possible. */
11927 tree
11928 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11929 bool ignore, bool unlocked, tree len)
11931 /* If we're using an unlocked function, assume the other unlocked
11932 functions exist explicitly. */
11933 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11934 : implicit_built_in_decls[BUILT_IN_FPUTC];
11935 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11936 : implicit_built_in_decls[BUILT_IN_FWRITE];
11938 /* If the return value is used, don't do the transformation. */
11939 if (!ignore)
11940 return NULL_TREE;
11942 /* Verify the arguments in the original call. */
11943 if (!validate_arg (arg0, POINTER_TYPE)
11944 || !validate_arg (arg1, POINTER_TYPE))
11945 return NULL_TREE;
11947 if (! len)
11948 len = c_strlen (arg0, 0);
11950 /* Get the length of the string passed to fputs. If the length
11951 can't be determined, punt. */
11952 if (!len
11953 || TREE_CODE (len) != INTEGER_CST)
11954 return NULL_TREE;
11956 switch (compare_tree_int (len, 1))
11958 case -1: /* length is 0, delete the call entirely . */
11959 return omit_one_operand_loc (loc, integer_type_node,
11960 integer_zero_node, arg1);;
11962 case 0: /* length is 1, call fputc. */
11964 const char *p = c_getstr (arg0);
11966 if (p != NULL)
11968 if (fn_fputc)
11969 return build_call_expr_loc (loc, fn_fputc, 2,
11970 build_int_cst (NULL_TREE, p[0]), arg1);
11971 else
11972 return NULL_TREE;
11975 /* FALLTHROUGH */
11976 case 1: /* length is greater than 1, call fwrite. */
11978 /* If optimizing for size keep fputs. */
11979 if (optimize_function_for_size_p (cfun))
11980 return NULL_TREE;
11981 /* New argument list transforming fputs(string, stream) to
11982 fwrite(string, 1, len, stream). */
11983 if (fn_fwrite)
11984 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11985 size_one_node, len, arg1);
11986 else
11987 return NULL_TREE;
11989 default:
11990 gcc_unreachable ();
11992 return NULL_TREE;
11995 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11996 produced. False otherwise. This is done so that we don't output the error
11997 or warning twice or three times. */
11999 bool
12000 fold_builtin_next_arg (tree exp, bool va_start_p)
12002 tree fntype = TREE_TYPE (current_function_decl);
12003 int nargs = call_expr_nargs (exp);
12004 tree arg;
12006 if (TYPE_ARG_TYPES (fntype) == 0
12007 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
12008 == void_type_node))
12010 error ("%<va_start%> used in function with fixed args");
12011 return true;
12014 if (va_start_p)
12016 if (va_start_p && (nargs != 2))
12018 error ("wrong number of arguments to function %<va_start%>");
12019 return true;
12021 arg = CALL_EXPR_ARG (exp, 1);
12023 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12024 when we checked the arguments and if needed issued a warning. */
12025 else
12027 if (nargs == 0)
12029 /* Evidently an out of date version of <stdarg.h>; can't validate
12030 va_start's second argument, but can still work as intended. */
12031 warning (0, "%<__builtin_next_arg%> called without an argument");
12032 return true;
12034 else if (nargs > 1)
12036 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12037 return true;
12039 arg = CALL_EXPR_ARG (exp, 0);
12042 if (TREE_CODE (arg) == SSA_NAME)
12043 arg = SSA_NAME_VAR (arg);
12045 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12046 or __builtin_next_arg (0) the first time we see it, after checking
12047 the arguments and if needed issuing a warning. */
12048 if (!integer_zerop (arg))
12050 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12052 /* Strip off all nops for the sake of the comparison. This
12053 is not quite the same as STRIP_NOPS. It does more.
12054 We must also strip off INDIRECT_EXPR for C++ reference
12055 parameters. */
12056 while (CONVERT_EXPR_P (arg)
12057 || TREE_CODE (arg) == INDIRECT_REF)
12058 arg = TREE_OPERAND (arg, 0);
12059 if (arg != last_parm)
12061 /* FIXME: Sometimes with the tree optimizers we can get the
12062 not the last argument even though the user used the last
12063 argument. We just warn and set the arg to be the last
12064 argument so that we will get wrong-code because of
12065 it. */
12066 warning (0, "second parameter of %<va_start%> not last named argument");
12069 /* Undefined by C99 7.15.1.4p4 (va_start):
12070 "If the parameter parmN is declared with the register storage
12071 class, with a function or array type, or with a type that is
12072 not compatible with the type that results after application of
12073 the default argument promotions, the behavior is undefined."
12075 else if (DECL_REGISTER (arg))
12076 warning (0, "undefined behaviour when second parameter of "
12077 "%<va_start%> is declared with %<register%> storage");
12079 /* We want to verify the second parameter just once before the tree
12080 optimizers are run and then avoid keeping it in the tree,
12081 as otherwise we could warn even for correct code like:
12082 void foo (int i, ...)
12083 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12084 if (va_start_p)
12085 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12086 else
12087 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12089 return false;
12093 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12094 ORIG may be null if this is a 2-argument call. We don't attempt to
12095 simplify calls with more than 3 arguments.
12097 Return NULL_TREE if no simplification was possible, otherwise return the
12098 simplified form of the call as a tree. If IGNORED is true, it means that
12099 the caller does not use the returned value of the function. */
12101 static tree
12102 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12103 tree orig, int ignored)
12105 tree call, retval;
12106 const char *fmt_str = NULL;
12108 /* Verify the required arguments in the original call. We deal with two
12109 types of sprintf() calls: 'sprintf (str, fmt)' and
12110 'sprintf (dest, "%s", orig)'. */
12111 if (!validate_arg (dest, POINTER_TYPE)
12112 || !validate_arg (fmt, POINTER_TYPE))
12113 return NULL_TREE;
12114 if (orig && !validate_arg (orig, POINTER_TYPE))
12115 return NULL_TREE;
12117 /* Check whether the format is a literal string constant. */
12118 fmt_str = c_getstr (fmt);
12119 if (fmt_str == NULL)
12120 return NULL_TREE;
12122 call = NULL_TREE;
12123 retval = NULL_TREE;
12125 if (!init_target_chars ())
12126 return NULL_TREE;
12128 /* If the format doesn't contain % args or %%, use strcpy. */
12129 if (strchr (fmt_str, target_percent) == NULL)
12131 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12133 if (!fn)
12134 return NULL_TREE;
12136 /* Don't optimize sprintf (buf, "abc", ptr++). */
12137 if (orig)
12138 return NULL_TREE;
12140 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12141 'format' is known to contain no % formats. */
12142 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12143 if (!ignored)
12144 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12147 /* If the format is "%s", use strcpy if the result isn't used. */
12148 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12150 tree fn;
12151 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12153 if (!fn)
12154 return NULL_TREE;
12156 /* Don't crash on sprintf (str1, "%s"). */
12157 if (!orig)
12158 return NULL_TREE;
12160 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12161 if (!ignored)
12163 retval = c_strlen (orig, 1);
12164 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12165 return NULL_TREE;
12167 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12170 if (call && retval)
12172 retval = fold_convert_loc
12173 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12174 retval);
12175 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12177 else
12178 return call;
12181 /* Expand a call EXP to __builtin_object_size. */
12184 expand_builtin_object_size (tree exp)
12186 tree ost;
12187 int object_size_type;
12188 tree fndecl = get_callee_fndecl (exp);
12190 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12192 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12193 exp, fndecl);
12194 expand_builtin_trap ();
12195 return const0_rtx;
12198 ost = CALL_EXPR_ARG (exp, 1);
12199 STRIP_NOPS (ost);
12201 if (TREE_CODE (ost) != INTEGER_CST
12202 || tree_int_cst_sgn (ost) < 0
12203 || compare_tree_int (ost, 3) > 0)
12205 error ("%Klast argument of %D is not integer constant between 0 and 3",
12206 exp, fndecl);
12207 expand_builtin_trap ();
12208 return const0_rtx;
12211 object_size_type = tree_low_cst (ost, 0);
12213 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12216 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12217 FCODE is the BUILT_IN_* to use.
12218 Return NULL_RTX if we failed; the caller should emit a normal call,
12219 otherwise try to get the result in TARGET, if convenient (and in
12220 mode MODE if that's convenient). */
12222 static rtx
12223 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12224 enum built_in_function fcode)
12226 tree dest, src, len, size;
12228 if (!validate_arglist (exp,
12229 POINTER_TYPE,
12230 fcode == BUILT_IN_MEMSET_CHK
12231 ? INTEGER_TYPE : POINTER_TYPE,
12232 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12233 return NULL_RTX;
12235 dest = CALL_EXPR_ARG (exp, 0);
12236 src = CALL_EXPR_ARG (exp, 1);
12237 len = CALL_EXPR_ARG (exp, 2);
12238 size = CALL_EXPR_ARG (exp, 3);
12240 if (! host_integerp (size, 1))
12241 return NULL_RTX;
12243 if (host_integerp (len, 1) || integer_all_onesp (size))
12245 tree fn;
12247 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12249 warning_at (tree_nonartificial_location (exp),
12250 0, "%Kcall to %D will always overflow destination buffer",
12251 exp, get_callee_fndecl (exp));
12252 return NULL_RTX;
12255 fn = NULL_TREE;
12256 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12257 mem{cpy,pcpy,move,set} is available. */
12258 switch (fcode)
12260 case BUILT_IN_MEMCPY_CHK:
12261 fn = built_in_decls[BUILT_IN_MEMCPY];
12262 break;
12263 case BUILT_IN_MEMPCPY_CHK:
12264 fn = built_in_decls[BUILT_IN_MEMPCPY];
12265 break;
12266 case BUILT_IN_MEMMOVE_CHK:
12267 fn = built_in_decls[BUILT_IN_MEMMOVE];
12268 break;
12269 case BUILT_IN_MEMSET_CHK:
12270 fn = built_in_decls[BUILT_IN_MEMSET];
12271 break;
12272 default:
12273 break;
12276 if (! fn)
12277 return NULL_RTX;
12279 fn = build_call_expr (fn, 3, dest, src, len);
12280 STRIP_TYPE_NOPS (fn);
12281 while (TREE_CODE (fn) == COMPOUND_EXPR)
12283 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12284 EXPAND_NORMAL);
12285 fn = TREE_OPERAND (fn, 1);
12287 if (TREE_CODE (fn) == CALL_EXPR)
12288 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12289 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12291 else if (fcode == BUILT_IN_MEMSET_CHK)
12292 return NULL_RTX;
12293 else
12295 unsigned int dest_align
12296 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12298 /* If DEST is not a pointer type, call the normal function. */
12299 if (dest_align == 0)
12300 return NULL_RTX;
12302 /* If SRC and DEST are the same (and not volatile), do nothing. */
12303 if (operand_equal_p (src, dest, 0))
12305 tree expr;
12307 if (fcode != BUILT_IN_MEMPCPY_CHK)
12309 /* Evaluate and ignore LEN in case it has side-effects. */
12310 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12311 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12314 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12315 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12318 /* __memmove_chk special case. */
12319 if (fcode == BUILT_IN_MEMMOVE_CHK)
12321 unsigned int src_align
12322 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12324 if (src_align == 0)
12325 return NULL_RTX;
12327 /* If src is categorized for a readonly section we can use
12328 normal __memcpy_chk. */
12329 if (readonly_data_expr (src))
12331 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12332 if (!fn)
12333 return NULL_RTX;
12334 fn = build_call_expr (fn, 4, dest, src, len, size);
12335 STRIP_TYPE_NOPS (fn);
12336 while (TREE_CODE (fn) == COMPOUND_EXPR)
12338 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12339 EXPAND_NORMAL);
12340 fn = TREE_OPERAND (fn, 1);
12342 if (TREE_CODE (fn) == CALL_EXPR)
12343 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12344 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12347 return NULL_RTX;
12351 /* Emit warning if a buffer overflow is detected at compile time. */
12353 static void
12354 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12356 int is_strlen = 0;
12357 tree len, size;
12358 location_t loc = tree_nonartificial_location (exp);
12360 switch (fcode)
12362 case BUILT_IN_STRCPY_CHK:
12363 case BUILT_IN_STPCPY_CHK:
12364 /* For __strcat_chk the warning will be emitted only if overflowing
12365 by at least strlen (dest) + 1 bytes. */
12366 case BUILT_IN_STRCAT_CHK:
12367 len = CALL_EXPR_ARG (exp, 1);
12368 size = CALL_EXPR_ARG (exp, 2);
12369 is_strlen = 1;
12370 break;
12371 case BUILT_IN_STRNCAT_CHK:
12372 case BUILT_IN_STRNCPY_CHK:
12373 len = CALL_EXPR_ARG (exp, 2);
12374 size = CALL_EXPR_ARG (exp, 3);
12375 break;
12376 case BUILT_IN_SNPRINTF_CHK:
12377 case BUILT_IN_VSNPRINTF_CHK:
12378 len = CALL_EXPR_ARG (exp, 1);
12379 size = CALL_EXPR_ARG (exp, 3);
12380 break;
12381 default:
12382 gcc_unreachable ();
12385 if (!len || !size)
12386 return;
12388 if (! host_integerp (size, 1) || integer_all_onesp (size))
12389 return;
12391 if (is_strlen)
12393 len = c_strlen (len, 1);
12394 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12395 return;
12397 else if (fcode == BUILT_IN_STRNCAT_CHK)
12399 tree src = CALL_EXPR_ARG (exp, 1);
12400 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12401 return;
12402 src = c_strlen (src, 1);
12403 if (! src || ! host_integerp (src, 1))
12405 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12406 exp, get_callee_fndecl (exp));
12407 return;
12409 else if (tree_int_cst_lt (src, size))
12410 return;
12412 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12413 return;
12415 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12416 exp, get_callee_fndecl (exp));
12419 /* Emit warning if a buffer overflow is detected at compile time
12420 in __sprintf_chk/__vsprintf_chk calls. */
12422 static void
12423 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12425 tree dest, size, len, fmt, flag;
12426 const char *fmt_str;
12427 int nargs = call_expr_nargs (exp);
12429 /* Verify the required arguments in the original call. */
12431 if (nargs < 4)
12432 return;
12433 dest = CALL_EXPR_ARG (exp, 0);
12434 flag = CALL_EXPR_ARG (exp, 1);
12435 size = CALL_EXPR_ARG (exp, 2);
12436 fmt = CALL_EXPR_ARG (exp, 3);
12438 if (! host_integerp (size, 1) || integer_all_onesp (size))
12439 return;
12441 /* Check whether the format is a literal string constant. */
12442 fmt_str = c_getstr (fmt);
12443 if (fmt_str == NULL)
12444 return;
12446 if (!init_target_chars ())
12447 return;
12449 /* If the format doesn't contain % args or %%, we know its size. */
12450 if (strchr (fmt_str, target_percent) == 0)
12451 len = build_int_cstu (size_type_node, strlen (fmt_str));
12452 /* If the format is "%s" and first ... argument is a string literal,
12453 we know it too. */
12454 else if (fcode == BUILT_IN_SPRINTF_CHK
12455 && strcmp (fmt_str, target_percent_s) == 0)
12457 tree arg;
12459 if (nargs < 5)
12460 return;
12461 arg = CALL_EXPR_ARG (exp, 4);
12462 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12463 return;
12465 len = c_strlen (arg, 1);
12466 if (!len || ! host_integerp (len, 1))
12467 return;
12469 else
12470 return;
12472 if (! tree_int_cst_lt (len, size))
12473 warning_at (tree_nonartificial_location (exp),
12474 0, "%Kcall to %D will always overflow destination buffer",
12475 exp, get_callee_fndecl (exp));
12478 /* Emit warning if a free is called with address of a variable. */
12480 static void
12481 maybe_emit_free_warning (tree exp)
12483 tree arg = CALL_EXPR_ARG (exp, 0);
12485 STRIP_NOPS (arg);
12486 if (TREE_CODE (arg) != ADDR_EXPR)
12487 return;
12489 arg = get_base_address (TREE_OPERAND (arg, 0));
12490 if (arg == NULL || INDIRECT_REF_P (arg))
12491 return;
12493 if (SSA_VAR_P (arg))
12494 warning_at (tree_nonartificial_location (exp),
12495 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12496 else
12497 warning_at (tree_nonartificial_location (exp),
12498 0, "%Kattempt to free a non-heap object", exp);
12501 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12502 if possible. */
12504 tree
12505 fold_builtin_object_size (tree ptr, tree ost)
12507 tree ret = NULL_TREE;
12508 int object_size_type;
12510 if (!validate_arg (ptr, POINTER_TYPE)
12511 || !validate_arg (ost, INTEGER_TYPE))
12512 return NULL_TREE;
12514 STRIP_NOPS (ost);
12516 if (TREE_CODE (ost) != INTEGER_CST
12517 || tree_int_cst_sgn (ost) < 0
12518 || compare_tree_int (ost, 3) > 0)
12519 return NULL_TREE;
12521 object_size_type = tree_low_cst (ost, 0);
12523 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12524 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12525 and (size_t) 0 for types 2 and 3. */
12526 if (TREE_SIDE_EFFECTS (ptr))
12527 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12529 if (TREE_CODE (ptr) == ADDR_EXPR)
12530 ret = build_int_cstu (size_type_node,
12531 compute_builtin_object_size (ptr, object_size_type));
12533 else if (TREE_CODE (ptr) == SSA_NAME)
12535 unsigned HOST_WIDE_INT bytes;
12537 /* If object size is not known yet, delay folding until
12538 later. Maybe subsequent passes will help determining
12539 it. */
12540 bytes = compute_builtin_object_size (ptr, object_size_type);
12541 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12542 ? -1 : 0))
12543 ret = build_int_cstu (size_type_node, bytes);
12546 if (ret)
12548 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12549 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12550 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12551 ret = NULL_TREE;
12554 return ret;
12557 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12558 DEST, SRC, LEN, and SIZE are the arguments to the call.
12559 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12560 code of the builtin. If MAXLEN is not NULL, it is maximum length
12561 passed as third argument. */
12563 tree
12564 fold_builtin_memory_chk (location_t loc, tree fndecl,
12565 tree dest, tree src, tree len, tree size,
12566 tree maxlen, bool ignore,
12567 enum built_in_function fcode)
12569 tree fn;
12571 if (!validate_arg (dest, POINTER_TYPE)
12572 || !validate_arg (src,
12573 (fcode == BUILT_IN_MEMSET_CHK
12574 ? INTEGER_TYPE : POINTER_TYPE))
12575 || !validate_arg (len, INTEGER_TYPE)
12576 || !validate_arg (size, INTEGER_TYPE))
12577 return NULL_TREE;
12579 /* If SRC and DEST are the same (and not volatile), return DEST
12580 (resp. DEST+LEN for __mempcpy_chk). */
12581 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12583 if (fcode != BUILT_IN_MEMPCPY_CHK)
12584 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12585 dest, len);
12586 else
12588 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12589 dest, len);
12590 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12594 if (! host_integerp (size, 1))
12595 return NULL_TREE;
12597 if (! integer_all_onesp (size))
12599 if (! host_integerp (len, 1))
12601 /* If LEN is not constant, try MAXLEN too.
12602 For MAXLEN only allow optimizing into non-_ocs function
12603 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12604 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12606 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12608 /* (void) __mempcpy_chk () can be optimized into
12609 (void) __memcpy_chk (). */
12610 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12611 if (!fn)
12612 return NULL_TREE;
12614 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12616 return NULL_TREE;
12619 else
12620 maxlen = len;
12622 if (tree_int_cst_lt (size, maxlen))
12623 return NULL_TREE;
12626 fn = NULL_TREE;
12627 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12628 mem{cpy,pcpy,move,set} is available. */
12629 switch (fcode)
12631 case BUILT_IN_MEMCPY_CHK:
12632 fn = built_in_decls[BUILT_IN_MEMCPY];
12633 break;
12634 case BUILT_IN_MEMPCPY_CHK:
12635 fn = built_in_decls[BUILT_IN_MEMPCPY];
12636 break;
12637 case BUILT_IN_MEMMOVE_CHK:
12638 fn = built_in_decls[BUILT_IN_MEMMOVE];
12639 break;
12640 case BUILT_IN_MEMSET_CHK:
12641 fn = built_in_decls[BUILT_IN_MEMSET];
12642 break;
12643 default:
12644 break;
12647 if (!fn)
12648 return NULL_TREE;
12650 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12653 /* Fold a call to the __st[rp]cpy_chk builtin.
12654 DEST, SRC, and SIZE are the arguments to the call.
12655 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12656 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12657 strings passed as second argument. */
12659 tree
12660 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12661 tree src, tree size,
12662 tree maxlen, bool ignore,
12663 enum built_in_function fcode)
12665 tree len, fn;
12667 if (!validate_arg (dest, POINTER_TYPE)
12668 || !validate_arg (src, POINTER_TYPE)
12669 || !validate_arg (size, INTEGER_TYPE))
12670 return NULL_TREE;
12672 /* If SRC and DEST are the same (and not volatile), return DEST. */
12673 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12674 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12676 if (! host_integerp (size, 1))
12677 return NULL_TREE;
12679 if (! integer_all_onesp (size))
12681 len = c_strlen (src, 1);
12682 if (! len || ! host_integerp (len, 1))
12684 /* If LEN is not constant, try MAXLEN too.
12685 For MAXLEN only allow optimizing into non-_ocs function
12686 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12687 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12689 if (fcode == BUILT_IN_STPCPY_CHK)
12691 if (! ignore)
12692 return NULL_TREE;
12694 /* If return value of __stpcpy_chk is ignored,
12695 optimize into __strcpy_chk. */
12696 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12697 if (!fn)
12698 return NULL_TREE;
12700 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12703 if (! len || TREE_SIDE_EFFECTS (len))
12704 return NULL_TREE;
12706 /* If c_strlen returned something, but not a constant,
12707 transform __strcpy_chk into __memcpy_chk. */
12708 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12709 if (!fn)
12710 return NULL_TREE;
12712 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12713 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12714 build_call_expr_loc (loc, fn, 4,
12715 dest, src, len, size));
12718 else
12719 maxlen = len;
12721 if (! tree_int_cst_lt (maxlen, size))
12722 return NULL_TREE;
12725 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12726 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12727 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12728 if (!fn)
12729 return NULL_TREE;
12731 return build_call_expr_loc (loc, fn, 2, dest, src);
12734 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12735 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12736 length passed as third argument. */
12738 tree
12739 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12740 tree len, tree size, tree maxlen)
12742 tree fn;
12744 if (!validate_arg (dest, POINTER_TYPE)
12745 || !validate_arg (src, POINTER_TYPE)
12746 || !validate_arg (len, INTEGER_TYPE)
12747 || !validate_arg (size, INTEGER_TYPE))
12748 return NULL_TREE;
12750 if (! host_integerp (size, 1))
12751 return NULL_TREE;
12753 if (! integer_all_onesp (size))
12755 if (! host_integerp (len, 1))
12757 /* If LEN is not constant, try MAXLEN too.
12758 For MAXLEN only allow optimizing into non-_ocs function
12759 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12760 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12761 return NULL_TREE;
12763 else
12764 maxlen = len;
12766 if (tree_int_cst_lt (size, maxlen))
12767 return NULL_TREE;
12770 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12771 fn = built_in_decls[BUILT_IN_STRNCPY];
12772 if (!fn)
12773 return NULL_TREE;
12775 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12778 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12779 are the arguments to the call. */
12781 static tree
12782 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12783 tree src, tree size)
12785 tree fn;
12786 const char *p;
12788 if (!validate_arg (dest, POINTER_TYPE)
12789 || !validate_arg (src, POINTER_TYPE)
12790 || !validate_arg (size, INTEGER_TYPE))
12791 return NULL_TREE;
12793 p = c_getstr (src);
12794 /* If the SRC parameter is "", return DEST. */
12795 if (p && *p == '\0')
12796 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12798 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12799 return NULL_TREE;
12801 /* If __builtin_strcat_chk is used, assume strcat is available. */
12802 fn = built_in_decls[BUILT_IN_STRCAT];
12803 if (!fn)
12804 return NULL_TREE;
12806 return build_call_expr_loc (loc, fn, 2, dest, src);
12809 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12810 LEN, and SIZE. */
12812 static tree
12813 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12814 tree dest, tree src, tree len, tree size)
12816 tree fn;
12817 const char *p;
12819 if (!validate_arg (dest, POINTER_TYPE)
12820 || !validate_arg (src, POINTER_TYPE)
12821 || !validate_arg (size, INTEGER_TYPE)
12822 || !validate_arg (size, INTEGER_TYPE))
12823 return NULL_TREE;
12825 p = c_getstr (src);
12826 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12827 if (p && *p == '\0')
12828 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12829 else if (integer_zerop (len))
12830 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12832 if (! host_integerp (size, 1))
12833 return NULL_TREE;
12835 if (! integer_all_onesp (size))
12837 tree src_len = c_strlen (src, 1);
12838 if (src_len
12839 && host_integerp (src_len, 1)
12840 && host_integerp (len, 1)
12841 && ! tree_int_cst_lt (len, src_len))
12843 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12844 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12845 if (!fn)
12846 return NULL_TREE;
12848 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12850 return NULL_TREE;
12853 /* If __builtin_strncat_chk is used, assume strncat is available. */
12854 fn = built_in_decls[BUILT_IN_STRNCAT];
12855 if (!fn)
12856 return NULL_TREE;
12858 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12861 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12862 a normal call should be emitted rather than expanding the function
12863 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12865 static tree
12866 fold_builtin_sprintf_chk (location_t loc, tree exp,
12867 enum built_in_function fcode)
12869 tree dest, size, len, fn, fmt, flag;
12870 const char *fmt_str;
12871 int nargs = call_expr_nargs (exp);
12873 /* Verify the required arguments in the original call. */
12874 if (nargs < 4)
12875 return NULL_TREE;
12876 dest = CALL_EXPR_ARG (exp, 0);
12877 if (!validate_arg (dest, POINTER_TYPE))
12878 return NULL_TREE;
12879 flag = CALL_EXPR_ARG (exp, 1);
12880 if (!validate_arg (flag, INTEGER_TYPE))
12881 return NULL_TREE;
12882 size = CALL_EXPR_ARG (exp, 2);
12883 if (!validate_arg (size, INTEGER_TYPE))
12884 return NULL_TREE;
12885 fmt = CALL_EXPR_ARG (exp, 3);
12886 if (!validate_arg (fmt, POINTER_TYPE))
12887 return NULL_TREE;
12889 if (! host_integerp (size, 1))
12890 return NULL_TREE;
12892 len = NULL_TREE;
12894 if (!init_target_chars ())
12895 return NULL_TREE;
12897 /* Check whether the format is a literal string constant. */
12898 fmt_str = c_getstr (fmt);
12899 if (fmt_str != NULL)
12901 /* If the format doesn't contain % args or %%, we know the size. */
12902 if (strchr (fmt_str, target_percent) == 0)
12904 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12905 len = build_int_cstu (size_type_node, strlen (fmt_str));
12907 /* If the format is "%s" and first ... argument is a string literal,
12908 we know the size too. */
12909 else if (fcode == BUILT_IN_SPRINTF_CHK
12910 && strcmp (fmt_str, target_percent_s) == 0)
12912 tree arg;
12914 if (nargs == 5)
12916 arg = CALL_EXPR_ARG (exp, 4);
12917 if (validate_arg (arg, POINTER_TYPE))
12919 len = c_strlen (arg, 1);
12920 if (! len || ! host_integerp (len, 1))
12921 len = NULL_TREE;
12927 if (! integer_all_onesp (size))
12929 if (! len || ! tree_int_cst_lt (len, size))
12930 return NULL_TREE;
12933 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12934 or if format doesn't contain % chars or is "%s". */
12935 if (! integer_zerop (flag))
12937 if (fmt_str == NULL)
12938 return NULL_TREE;
12939 if (strchr (fmt_str, target_percent) != NULL
12940 && strcmp (fmt_str, target_percent_s))
12941 return NULL_TREE;
12944 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12945 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12946 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12947 if (!fn)
12948 return NULL_TREE;
12950 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12953 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12954 a normal call should be emitted rather than expanding the function
12955 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12956 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12957 passed as second argument. */
12959 tree
12960 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12961 enum built_in_function fcode)
12963 tree dest, size, len, fn, fmt, flag;
12964 const char *fmt_str;
12966 /* Verify the required arguments in the original call. */
12967 if (call_expr_nargs (exp) < 5)
12968 return NULL_TREE;
12969 dest = CALL_EXPR_ARG (exp, 0);
12970 if (!validate_arg (dest, POINTER_TYPE))
12971 return NULL_TREE;
12972 len = CALL_EXPR_ARG (exp, 1);
12973 if (!validate_arg (len, INTEGER_TYPE))
12974 return NULL_TREE;
12975 flag = CALL_EXPR_ARG (exp, 2);
12976 if (!validate_arg (flag, INTEGER_TYPE))
12977 return NULL_TREE;
12978 size = CALL_EXPR_ARG (exp, 3);
12979 if (!validate_arg (size, INTEGER_TYPE))
12980 return NULL_TREE;
12981 fmt = CALL_EXPR_ARG (exp, 4);
12982 if (!validate_arg (fmt, POINTER_TYPE))
12983 return NULL_TREE;
12985 if (! host_integerp (size, 1))
12986 return NULL_TREE;
12988 if (! integer_all_onesp (size))
12990 if (! host_integerp (len, 1))
12992 /* If LEN is not constant, try MAXLEN too.
12993 For MAXLEN only allow optimizing into non-_ocs function
12994 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12995 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12996 return NULL_TREE;
12998 else
12999 maxlen = len;
13001 if (tree_int_cst_lt (size, maxlen))
13002 return NULL_TREE;
13005 if (!init_target_chars ())
13006 return NULL_TREE;
13008 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13009 or if format doesn't contain % chars or is "%s". */
13010 if (! integer_zerop (flag))
13012 fmt_str = c_getstr (fmt);
13013 if (fmt_str == NULL)
13014 return NULL_TREE;
13015 if (strchr (fmt_str, target_percent) != NULL
13016 && strcmp (fmt_str, target_percent_s))
13017 return NULL_TREE;
13020 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13021 available. */
13022 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13023 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13024 if (!fn)
13025 return NULL_TREE;
13027 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
13030 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13031 FMT and ARG are the arguments to the call; we don't fold cases with
13032 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13034 Return NULL_TREE if no simplification was possible, otherwise return the
13035 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13036 code of the function to be simplified. */
13038 static tree
13039 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13040 tree arg, bool ignore,
13041 enum built_in_function fcode)
13043 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13044 const char *fmt_str = NULL;
13046 /* If the return value is used, don't do the transformation. */
13047 if (! ignore)
13048 return NULL_TREE;
13050 /* Verify the required arguments in the original call. */
13051 if (!validate_arg (fmt, POINTER_TYPE))
13052 return NULL_TREE;
13054 /* Check whether the format is a literal string constant. */
13055 fmt_str = c_getstr (fmt);
13056 if (fmt_str == NULL)
13057 return NULL_TREE;
13059 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13061 /* If we're using an unlocked function, assume the other
13062 unlocked functions exist explicitly. */
13063 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
13064 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
13066 else
13068 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
13069 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
13072 if (!init_target_chars ())
13073 return NULL_TREE;
13075 if (strcmp (fmt_str, target_percent_s) == 0
13076 || strchr (fmt_str, target_percent) == NULL)
13078 const char *str;
13080 if (strcmp (fmt_str, target_percent_s) == 0)
13082 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13083 return NULL_TREE;
13085 if (!arg || !validate_arg (arg, POINTER_TYPE))
13086 return NULL_TREE;
13088 str = c_getstr (arg);
13089 if (str == NULL)
13090 return NULL_TREE;
13092 else
13094 /* The format specifier doesn't contain any '%' characters. */
13095 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13096 && arg)
13097 return NULL_TREE;
13098 str = fmt_str;
13101 /* If the string was "", printf does nothing. */
13102 if (str[0] == '\0')
13103 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13105 /* If the string has length of 1, call putchar. */
13106 if (str[1] == '\0')
13108 /* Given printf("c"), (where c is any one character,)
13109 convert "c"[0] to an int and pass that to the replacement
13110 function. */
13111 newarg = build_int_cst (NULL_TREE, str[0]);
13112 if (fn_putchar)
13113 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13115 else
13117 /* If the string was "string\n", call puts("string"). */
13118 size_t len = strlen (str);
13119 if ((unsigned char)str[len - 1] == target_newline)
13121 /* Create a NUL-terminated string that's one char shorter
13122 than the original, stripping off the trailing '\n'. */
13123 char *newstr = XALLOCAVEC (char, len);
13124 memcpy (newstr, str, len - 1);
13125 newstr[len - 1] = 0;
13127 newarg = build_string_literal (len, newstr);
13128 if (fn_puts)
13129 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13131 else
13132 /* We'd like to arrange to call fputs(string,stdout) here,
13133 but we need stdout and don't have a way to get it yet. */
13134 return NULL_TREE;
13138 /* The other optimizations can be done only on the non-va_list variants. */
13139 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13140 return NULL_TREE;
13142 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13143 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13145 if (!arg || !validate_arg (arg, POINTER_TYPE))
13146 return NULL_TREE;
13147 if (fn_puts)
13148 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13151 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13152 else if (strcmp (fmt_str, target_percent_c) == 0)
13154 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13155 return NULL_TREE;
13156 if (fn_putchar)
13157 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13160 if (!call)
13161 return NULL_TREE;
13163 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13166 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13167 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13168 more than 3 arguments, and ARG may be null in the 2-argument case.
13170 Return NULL_TREE if no simplification was possible, otherwise return the
13171 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13172 code of the function to be simplified. */
13174 static tree
13175 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13176 tree fmt, tree arg, bool ignore,
13177 enum built_in_function fcode)
13179 tree fn_fputc, fn_fputs, call = NULL_TREE;
13180 const char *fmt_str = NULL;
13182 /* If the return value is used, don't do the transformation. */
13183 if (! ignore)
13184 return NULL_TREE;
13186 /* Verify the required arguments in the original call. */
13187 if (!validate_arg (fp, POINTER_TYPE))
13188 return NULL_TREE;
13189 if (!validate_arg (fmt, POINTER_TYPE))
13190 return NULL_TREE;
13192 /* Check whether the format is a literal string constant. */
13193 fmt_str = c_getstr (fmt);
13194 if (fmt_str == NULL)
13195 return NULL_TREE;
13197 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13199 /* If we're using an unlocked function, assume the other
13200 unlocked functions exist explicitly. */
13201 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13202 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13204 else
13206 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13207 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13210 if (!init_target_chars ())
13211 return NULL_TREE;
13213 /* If the format doesn't contain % args or %%, use strcpy. */
13214 if (strchr (fmt_str, target_percent) == NULL)
13216 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13217 && arg)
13218 return NULL_TREE;
13220 /* If the format specifier was "", fprintf does nothing. */
13221 if (fmt_str[0] == '\0')
13223 /* If FP has side-effects, just wait until gimplification is
13224 done. */
13225 if (TREE_SIDE_EFFECTS (fp))
13226 return NULL_TREE;
13228 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13231 /* When "string" doesn't contain %, replace all cases of
13232 fprintf (fp, string) with fputs (string, fp). The fputs
13233 builtin will take care of special cases like length == 1. */
13234 if (fn_fputs)
13235 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13238 /* The other optimizations can be done only on the non-va_list variants. */
13239 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13240 return NULL_TREE;
13242 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13243 else if (strcmp (fmt_str, target_percent_s) == 0)
13245 if (!arg || !validate_arg (arg, POINTER_TYPE))
13246 return NULL_TREE;
13247 if (fn_fputs)
13248 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13251 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13252 else if (strcmp (fmt_str, target_percent_c) == 0)
13254 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13255 return NULL_TREE;
13256 if (fn_fputc)
13257 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13260 if (!call)
13261 return NULL_TREE;
13262 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13265 /* Initialize format string characters in the target charset. */
13267 static bool
13268 init_target_chars (void)
13270 static bool init;
13271 if (!init)
13273 target_newline = lang_hooks.to_target_charset ('\n');
13274 target_percent = lang_hooks.to_target_charset ('%');
13275 target_c = lang_hooks.to_target_charset ('c');
13276 target_s = lang_hooks.to_target_charset ('s');
13277 if (target_newline == 0 || target_percent == 0 || target_c == 0
13278 || target_s == 0)
13279 return false;
13281 target_percent_c[0] = target_percent;
13282 target_percent_c[1] = target_c;
13283 target_percent_c[2] = '\0';
13285 target_percent_s[0] = target_percent;
13286 target_percent_s[1] = target_s;
13287 target_percent_s[2] = '\0';
13289 target_percent_s_newline[0] = target_percent;
13290 target_percent_s_newline[1] = target_s;
13291 target_percent_s_newline[2] = target_newline;
13292 target_percent_s_newline[3] = '\0';
13294 init = true;
13296 return true;
13299 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13300 and no overflow/underflow occurred. INEXACT is true if M was not
13301 exactly calculated. TYPE is the tree type for the result. This
13302 function assumes that you cleared the MPFR flags and then
13303 calculated M to see if anything subsequently set a flag prior to
13304 entering this function. Return NULL_TREE if any checks fail. */
13306 static tree
13307 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13309 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13310 overflow/underflow occurred. If -frounding-math, proceed iff the
13311 result of calling FUNC was exact. */
13312 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13313 && (!flag_rounding_math || !inexact))
13315 REAL_VALUE_TYPE rr;
13317 real_from_mpfr (&rr, m, type, GMP_RNDN);
13318 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13319 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13320 but the mpft_t is not, then we underflowed in the
13321 conversion. */
13322 if (real_isfinite (&rr)
13323 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13325 REAL_VALUE_TYPE rmode;
13327 real_convert (&rmode, TYPE_MODE (type), &rr);
13328 /* Proceed iff the specified mode can hold the value. */
13329 if (real_identical (&rmode, &rr))
13330 return build_real (type, rmode);
13333 return NULL_TREE;
13336 #ifdef HAVE_mpc
13337 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13338 number and no overflow/underflow occurred. INEXACT is true if M
13339 was not exactly calculated. TYPE is the tree type for the result.
13340 This function assumes that you cleared the MPFR flags and then
13341 calculated M to see if anything subsequently set a flag prior to
13342 entering this function. Return NULL_TREE if any checks fail. */
13344 static tree
13345 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13347 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13348 overflow/underflow occurred. If -frounding-math, proceed iff the
13349 result of calling FUNC was exact. */
13350 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13351 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13352 && (!flag_rounding_math || !inexact))
13354 REAL_VALUE_TYPE re, im;
13356 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13357 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13358 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13359 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13360 but the mpft_t is not, then we underflowed in the
13361 conversion. */
13362 if (real_isfinite (&re) && real_isfinite (&im)
13363 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13364 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13366 REAL_VALUE_TYPE re_mode, im_mode;
13368 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13369 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13370 /* Proceed iff the specified mode can hold the value. */
13371 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13372 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13373 build_real (TREE_TYPE (type), im_mode));
13376 return NULL_TREE;
13378 #endif /* HAVE_mpc */
13380 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13381 FUNC on it and return the resulting value as a tree with type TYPE.
13382 If MIN and/or MAX are not NULL, then the supplied ARG must be
13383 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13384 acceptable values, otherwise they are not. The mpfr precision is
13385 set to the precision of TYPE. We assume that function FUNC returns
13386 zero if the result could be calculated exactly within the requested
13387 precision. */
13389 static tree
13390 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13391 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13392 bool inclusive)
13394 tree result = NULL_TREE;
13396 STRIP_NOPS (arg);
13398 /* To proceed, MPFR must exactly represent the target floating point
13399 format, which only happens when the target base equals two. */
13400 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13401 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13403 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13405 if (real_isfinite (ra)
13406 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13407 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13409 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13410 const int prec = fmt->p;
13411 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13412 int inexact;
13413 mpfr_t m;
13415 mpfr_init2 (m, prec);
13416 mpfr_from_real (m, ra, GMP_RNDN);
13417 mpfr_clear_flags ();
13418 inexact = func (m, m, rnd);
13419 result = do_mpfr_ckconv (m, type, inexact);
13420 mpfr_clear (m);
13424 return result;
13427 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13428 FUNC on it and return the resulting value as a tree with type TYPE.
13429 The mpfr precision is set to the precision of TYPE. We assume that
13430 function FUNC returns zero if the result could be calculated
13431 exactly within the requested precision. */
13433 static tree
13434 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13435 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13437 tree result = NULL_TREE;
13439 STRIP_NOPS (arg1);
13440 STRIP_NOPS (arg2);
13442 /* To proceed, MPFR must exactly represent the target floating point
13443 format, which only happens when the target base equals two. */
13444 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13445 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13446 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13448 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13449 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13451 if (real_isfinite (ra1) && real_isfinite (ra2))
13453 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13454 const int prec = fmt->p;
13455 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13456 int inexact;
13457 mpfr_t m1, m2;
13459 mpfr_inits2 (prec, m1, m2, NULL);
13460 mpfr_from_real (m1, ra1, GMP_RNDN);
13461 mpfr_from_real (m2, ra2, GMP_RNDN);
13462 mpfr_clear_flags ();
13463 inexact = func (m1, m1, m2, rnd);
13464 result = do_mpfr_ckconv (m1, type, inexact);
13465 mpfr_clears (m1, m2, NULL);
13469 return result;
13472 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13473 FUNC on it and return the resulting value as a tree with type TYPE.
13474 The mpfr precision is set to the precision of TYPE. We assume that
13475 function FUNC returns zero if the result could be calculated
13476 exactly within the requested precision. */
13478 static tree
13479 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13480 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13482 tree result = NULL_TREE;
13484 STRIP_NOPS (arg1);
13485 STRIP_NOPS (arg2);
13486 STRIP_NOPS (arg3);
13488 /* To proceed, MPFR must exactly represent the target floating point
13489 format, which only happens when the target base equals two. */
13490 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13491 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13492 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13493 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13495 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13496 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13497 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13499 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13501 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13502 const int prec = fmt->p;
13503 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13504 int inexact;
13505 mpfr_t m1, m2, m3;
13507 mpfr_inits2 (prec, m1, m2, m3, NULL);
13508 mpfr_from_real (m1, ra1, GMP_RNDN);
13509 mpfr_from_real (m2, ra2, GMP_RNDN);
13510 mpfr_from_real (m3, ra3, GMP_RNDN);
13511 mpfr_clear_flags ();
13512 inexact = func (m1, m1, m2, m3, rnd);
13513 result = do_mpfr_ckconv (m1, type, inexact);
13514 mpfr_clears (m1, m2, m3, NULL);
13518 return result;
13521 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13522 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13523 If ARG_SINP and ARG_COSP are NULL then the result is returned
13524 as a complex value.
13525 The type is taken from the type of ARG and is used for setting the
13526 precision of the calculation and results. */
13528 static tree
13529 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13531 tree const type = TREE_TYPE (arg);
13532 tree result = NULL_TREE;
13534 STRIP_NOPS (arg);
13536 /* To proceed, MPFR must exactly represent the target floating point
13537 format, which only happens when the target base equals two. */
13538 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13539 && TREE_CODE (arg) == REAL_CST
13540 && !TREE_OVERFLOW (arg))
13542 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13544 if (real_isfinite (ra))
13546 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13547 const int prec = fmt->p;
13548 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13549 tree result_s, result_c;
13550 int inexact;
13551 mpfr_t m, ms, mc;
13553 mpfr_inits2 (prec, m, ms, mc, NULL);
13554 mpfr_from_real (m, ra, GMP_RNDN);
13555 mpfr_clear_flags ();
13556 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13557 result_s = do_mpfr_ckconv (ms, type, inexact);
13558 result_c = do_mpfr_ckconv (mc, type, inexact);
13559 mpfr_clears (m, ms, mc, NULL);
13560 if (result_s && result_c)
13562 /* If we are to return in a complex value do so. */
13563 if (!arg_sinp && !arg_cosp)
13564 return build_complex (build_complex_type (type),
13565 result_c, result_s);
13567 /* Dereference the sin/cos pointer arguments. */
13568 arg_sinp = build_fold_indirect_ref (arg_sinp);
13569 arg_cosp = build_fold_indirect_ref (arg_cosp);
13570 /* Proceed if valid pointer type were passed in. */
13571 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13572 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13574 /* Set the values. */
13575 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13576 result_s);
13577 TREE_SIDE_EFFECTS (result_s) = 1;
13578 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13579 result_c);
13580 TREE_SIDE_EFFECTS (result_c) = 1;
13581 /* Combine the assignments into a compound expr. */
13582 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13583 result_s, result_c));
13588 return result;
13591 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13592 two-argument mpfr order N Bessel function FUNC on them and return
13593 the resulting value as a tree with type TYPE. The mpfr precision
13594 is set to the precision of TYPE. We assume that function FUNC
13595 returns zero if the result could be calculated exactly within the
13596 requested precision. */
13597 static tree
13598 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13599 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13600 const REAL_VALUE_TYPE *min, bool inclusive)
13602 tree result = NULL_TREE;
13604 STRIP_NOPS (arg1);
13605 STRIP_NOPS (arg2);
13607 /* To proceed, MPFR must exactly represent the target floating point
13608 format, which only happens when the target base equals two. */
13609 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13610 && host_integerp (arg1, 0)
13611 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13613 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13614 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13616 if (n == (long)n
13617 && real_isfinite (ra)
13618 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13620 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13621 const int prec = fmt->p;
13622 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13623 int inexact;
13624 mpfr_t m;
13626 mpfr_init2 (m, prec);
13627 mpfr_from_real (m, ra, GMP_RNDN);
13628 mpfr_clear_flags ();
13629 inexact = func (m, n, m, rnd);
13630 result = do_mpfr_ckconv (m, type, inexact);
13631 mpfr_clear (m);
13635 return result;
13638 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13639 the pointer *(ARG_QUO) and return the result. The type is taken
13640 from the type of ARG0 and is used for setting the precision of the
13641 calculation and results. */
13643 static tree
13644 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13646 tree const type = TREE_TYPE (arg0);
13647 tree result = NULL_TREE;
13649 STRIP_NOPS (arg0);
13650 STRIP_NOPS (arg1);
13652 /* To proceed, MPFR must exactly represent the target floating point
13653 format, which only happens when the target base equals two. */
13654 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13655 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13656 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13658 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13659 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13661 if (real_isfinite (ra0) && real_isfinite (ra1))
13663 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13664 const int prec = fmt->p;
13665 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13666 tree result_rem;
13667 long integer_quo;
13668 mpfr_t m0, m1;
13670 mpfr_inits2 (prec, m0, m1, NULL);
13671 mpfr_from_real (m0, ra0, GMP_RNDN);
13672 mpfr_from_real (m1, ra1, GMP_RNDN);
13673 mpfr_clear_flags ();
13674 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13675 /* Remquo is independent of the rounding mode, so pass
13676 inexact=0 to do_mpfr_ckconv(). */
13677 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13678 mpfr_clears (m0, m1, NULL);
13679 if (result_rem)
13681 /* MPFR calculates quo in the host's long so it may
13682 return more bits in quo than the target int can hold
13683 if sizeof(host long) > sizeof(target int). This can
13684 happen even for native compilers in LP64 mode. In
13685 these cases, modulo the quo value with the largest
13686 number that the target int can hold while leaving one
13687 bit for the sign. */
13688 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13689 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13691 /* Dereference the quo pointer argument. */
13692 arg_quo = build_fold_indirect_ref (arg_quo);
13693 /* Proceed iff a valid pointer type was passed in. */
13694 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13696 /* Set the value. */
13697 tree result_quo = fold_build2 (MODIFY_EXPR,
13698 TREE_TYPE (arg_quo), arg_quo,
13699 build_int_cst (NULL, integer_quo));
13700 TREE_SIDE_EFFECTS (result_quo) = 1;
13701 /* Combine the quo assignment with the rem. */
13702 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13703 result_quo, result_rem));
13708 return result;
13711 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13712 resulting value as a tree with type TYPE. The mpfr precision is
13713 set to the precision of TYPE. We assume that this mpfr function
13714 returns zero if the result could be calculated exactly within the
13715 requested precision. In addition, the integer pointer represented
13716 by ARG_SG will be dereferenced and set to the appropriate signgam
13717 (-1,1) value. */
13719 static tree
13720 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13722 tree result = NULL_TREE;
13724 STRIP_NOPS (arg);
13726 /* To proceed, MPFR must exactly represent the target floating point
13727 format, which only happens when the target base equals two. Also
13728 verify ARG is a constant and that ARG_SG is an int pointer. */
13729 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13730 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13731 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13732 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13734 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13736 /* In addition to NaN and Inf, the argument cannot be zero or a
13737 negative integer. */
13738 if (real_isfinite (ra)
13739 && ra->cl != rvc_zero
13740 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13742 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13743 const int prec = fmt->p;
13744 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13745 int inexact, sg;
13746 mpfr_t m;
13747 tree result_lg;
13749 mpfr_init2 (m, prec);
13750 mpfr_from_real (m, ra, GMP_RNDN);
13751 mpfr_clear_flags ();
13752 inexact = mpfr_lgamma (m, &sg, m, rnd);
13753 result_lg = do_mpfr_ckconv (m, type, inexact);
13754 mpfr_clear (m);
13755 if (result_lg)
13757 tree result_sg;
13759 /* Dereference the arg_sg pointer argument. */
13760 arg_sg = build_fold_indirect_ref (arg_sg);
13761 /* Assign the signgam value into *arg_sg. */
13762 result_sg = fold_build2 (MODIFY_EXPR,
13763 TREE_TYPE (arg_sg), arg_sg,
13764 build_int_cst (NULL, sg));
13765 TREE_SIDE_EFFECTS (result_sg) = 1;
13766 /* Combine the signgam assignment with the lgamma result. */
13767 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13768 result_sg, result_lg));
13773 return result;
13776 #ifdef HAVE_mpc
13777 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13778 function FUNC on it and return the resulting value as a tree with
13779 type TYPE. The mpfr precision is set to the precision of TYPE. We
13780 assume that function FUNC returns zero if the result could be
13781 calculated exactly within the requested precision. */
13783 static tree
13784 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13786 tree result = NULL_TREE;
13788 STRIP_NOPS (arg);
13790 /* To proceed, MPFR must exactly represent the target floating point
13791 format, which only happens when the target base equals two. */
13792 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13793 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13794 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13796 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13797 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13799 if (real_isfinite (re) && real_isfinite (im))
13801 const struct real_format *const fmt =
13802 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13803 const int prec = fmt->p;
13804 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13805 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13806 int inexact;
13807 mpc_t m;
13809 mpc_init2 (m, prec);
13810 mpfr_from_real (mpc_realref(m), re, rnd);
13811 mpfr_from_real (mpc_imagref(m), im, rnd);
13812 mpfr_clear_flags ();
13813 inexact = func (m, m, crnd);
13814 result = do_mpc_ckconv (m, type, inexact);
13815 mpc_clear (m);
13819 return result;
13822 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13823 mpc function FUNC on it and return the resulting value as a tree
13824 with type TYPE. The mpfr precision is set to the precision of
13825 TYPE. We assume that function FUNC returns zero if the result
13826 could be calculated exactly within the requested precision. */
13828 #ifdef HAVE_mpc_pow
13829 static tree
13830 do_mpc_arg2 (tree arg0, tree arg1, tree type,
13831 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13833 tree result = NULL_TREE;
13835 STRIP_NOPS (arg0);
13836 STRIP_NOPS (arg1);
13838 /* To proceed, MPFR must exactly represent the target floating point
13839 format, which only happens when the target base equals two. */
13840 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13841 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13842 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13843 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13844 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13846 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13847 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13848 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13849 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13851 if (real_isfinite (re0) && real_isfinite (im0)
13852 && real_isfinite (re1) && real_isfinite (im1))
13854 const struct real_format *const fmt =
13855 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13856 const int prec = fmt->p;
13857 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13858 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13859 int inexact;
13860 mpc_t m0, m1;
13862 mpc_init2 (m0, prec);
13863 mpc_init2 (m1, prec);
13864 mpfr_from_real (mpc_realref(m0), re0, rnd);
13865 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13866 mpfr_from_real (mpc_realref(m1), re1, rnd);
13867 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13868 mpfr_clear_flags ();
13869 inexact = func (m0, m0, m1, crnd);
13870 result = do_mpc_ckconv (m0, type, inexact);
13871 mpc_clear (m0);
13872 mpc_clear (m1);
13876 return result;
13878 # endif
13879 #endif /* HAVE_mpc */
13881 /* FIXME tuples.
13882 The functions below provide an alternate interface for folding
13883 builtin function calls presented as GIMPLE_CALL statements rather
13884 than as CALL_EXPRs. The folded result is still expressed as a
13885 tree. There is too much code duplication in the handling of
13886 varargs functions, and a more intrusive re-factoring would permit
13887 better sharing of code between the tree and statement-based
13888 versions of these functions. */
13890 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13891 along with N new arguments specified as the "..." parameters. SKIP
13892 is the number of arguments in STMT to be omitted. This function is used
13893 to do varargs-to-varargs transformations. */
13895 static tree
13896 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13898 int oldnargs = gimple_call_num_args (stmt);
13899 int nargs = oldnargs - skip + n;
13900 tree fntype = TREE_TYPE (fndecl);
13901 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13902 tree *buffer;
13903 int i, j;
13904 va_list ap;
13905 location_t loc = gimple_location (stmt);
13907 buffer = XALLOCAVEC (tree, nargs);
13908 va_start (ap, n);
13909 for (i = 0; i < n; i++)
13910 buffer[i] = va_arg (ap, tree);
13911 va_end (ap);
13912 for (j = skip; j < oldnargs; j++, i++)
13913 buffer[i] = gimple_call_arg (stmt, j);
13915 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13918 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13919 a normal call should be emitted rather than expanding the function
13920 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13922 static tree
13923 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13925 tree dest, size, len, fn, fmt, flag;
13926 const char *fmt_str;
13927 int nargs = gimple_call_num_args (stmt);
13929 /* Verify the required arguments in the original call. */
13930 if (nargs < 4)
13931 return NULL_TREE;
13932 dest = gimple_call_arg (stmt, 0);
13933 if (!validate_arg (dest, POINTER_TYPE))
13934 return NULL_TREE;
13935 flag = gimple_call_arg (stmt, 1);
13936 if (!validate_arg (flag, INTEGER_TYPE))
13937 return NULL_TREE;
13938 size = gimple_call_arg (stmt, 2);
13939 if (!validate_arg (size, INTEGER_TYPE))
13940 return NULL_TREE;
13941 fmt = gimple_call_arg (stmt, 3);
13942 if (!validate_arg (fmt, POINTER_TYPE))
13943 return NULL_TREE;
13945 if (! host_integerp (size, 1))
13946 return NULL_TREE;
13948 len = NULL_TREE;
13950 if (!init_target_chars ())
13951 return NULL_TREE;
13953 /* Check whether the format is a literal string constant. */
13954 fmt_str = c_getstr (fmt);
13955 if (fmt_str != NULL)
13957 /* If the format doesn't contain % args or %%, we know the size. */
13958 if (strchr (fmt_str, target_percent) == 0)
13960 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13961 len = build_int_cstu (size_type_node, strlen (fmt_str));
13963 /* If the format is "%s" and first ... argument is a string literal,
13964 we know the size too. */
13965 else if (fcode == BUILT_IN_SPRINTF_CHK
13966 && strcmp (fmt_str, target_percent_s) == 0)
13968 tree arg;
13970 if (nargs == 5)
13972 arg = gimple_call_arg (stmt, 4);
13973 if (validate_arg (arg, POINTER_TYPE))
13975 len = c_strlen (arg, 1);
13976 if (! len || ! host_integerp (len, 1))
13977 len = NULL_TREE;
13983 if (! integer_all_onesp (size))
13985 if (! len || ! tree_int_cst_lt (len, size))
13986 return NULL_TREE;
13989 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13990 or if format doesn't contain % chars or is "%s". */
13991 if (! integer_zerop (flag))
13993 if (fmt_str == NULL)
13994 return NULL_TREE;
13995 if (strchr (fmt_str, target_percent) != NULL
13996 && strcmp (fmt_str, target_percent_s))
13997 return NULL_TREE;
14000 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
14001 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
14002 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
14003 if (!fn)
14004 return NULL_TREE;
14006 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
14009 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14010 a normal call should be emitted rather than expanding the function
14011 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14012 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14013 passed as second argument. */
14015 tree
14016 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14017 enum built_in_function fcode)
14019 tree dest, size, len, fn, fmt, flag;
14020 const char *fmt_str;
14022 /* Verify the required arguments in the original call. */
14023 if (gimple_call_num_args (stmt) < 5)
14024 return NULL_TREE;
14025 dest = gimple_call_arg (stmt, 0);
14026 if (!validate_arg (dest, POINTER_TYPE))
14027 return NULL_TREE;
14028 len = gimple_call_arg (stmt, 1);
14029 if (!validate_arg (len, INTEGER_TYPE))
14030 return NULL_TREE;
14031 flag = gimple_call_arg (stmt, 2);
14032 if (!validate_arg (flag, INTEGER_TYPE))
14033 return NULL_TREE;
14034 size = gimple_call_arg (stmt, 3);
14035 if (!validate_arg (size, INTEGER_TYPE))
14036 return NULL_TREE;
14037 fmt = gimple_call_arg (stmt, 4);
14038 if (!validate_arg (fmt, POINTER_TYPE))
14039 return NULL_TREE;
14041 if (! host_integerp (size, 1))
14042 return NULL_TREE;
14044 if (! integer_all_onesp (size))
14046 if (! host_integerp (len, 1))
14048 /* If LEN is not constant, try MAXLEN too.
14049 For MAXLEN only allow optimizing into non-_ocs function
14050 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
14051 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
14052 return NULL_TREE;
14054 else
14055 maxlen = len;
14057 if (tree_int_cst_lt (size, maxlen))
14058 return NULL_TREE;
14061 if (!init_target_chars ())
14062 return NULL_TREE;
14064 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
14065 or if format doesn't contain % chars or is "%s". */
14066 if (! integer_zerop (flag))
14068 fmt_str = c_getstr (fmt);
14069 if (fmt_str == NULL)
14070 return NULL_TREE;
14071 if (strchr (fmt_str, target_percent) != NULL
14072 && strcmp (fmt_str, target_percent_s))
14073 return NULL_TREE;
14076 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
14077 available. */
14078 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
14079 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
14080 if (!fn)
14081 return NULL_TREE;
14083 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
14086 /* Builtins with folding operations that operate on "..." arguments
14087 need special handling; we need to store the arguments in a convenient
14088 data structure before attempting any folding. Fortunately there are
14089 only a few builtins that fall into this category. FNDECL is the
14090 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14091 result of the function call is ignored. */
14093 static tree
14094 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14095 bool ignore ATTRIBUTE_UNUSED)
14097 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14098 tree ret = NULL_TREE;
14100 switch (fcode)
14102 case BUILT_IN_SPRINTF_CHK:
14103 case BUILT_IN_VSPRINTF_CHK:
14104 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14105 break;
14107 case BUILT_IN_SNPRINTF_CHK:
14108 case BUILT_IN_VSNPRINTF_CHK:
14109 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14111 default:
14112 break;
14114 if (ret)
14116 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14117 TREE_NO_WARNING (ret) = 1;
14118 return ret;
14120 return NULL_TREE;
14123 /* A wrapper function for builtin folding that prevents warnings for
14124 "statement without effect" and the like, caused by removing the
14125 call node earlier than the warning is generated. */
14127 tree
14128 fold_call_stmt (gimple stmt, bool ignore)
14130 tree ret = NULL_TREE;
14131 tree fndecl = gimple_call_fndecl (stmt);
14132 location_t loc = gimple_location (stmt);
14133 if (fndecl
14134 && TREE_CODE (fndecl) == FUNCTION_DECL
14135 && DECL_BUILT_IN (fndecl)
14136 && !gimple_call_va_arg_pack_p (stmt))
14138 int nargs = gimple_call_num_args (stmt);
14140 if (avoid_folding_inline_builtin (fndecl))
14141 return NULL_TREE;
14142 /* FIXME: Don't use a list in this interface. */
14143 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14145 tree arglist = NULL_TREE;
14146 int i;
14147 for (i = nargs - 1; i >= 0; i--)
14148 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
14149 return targetm.fold_builtin (fndecl, arglist, ignore);
14151 else
14153 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14155 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
14156 int i;
14157 for (i = 0; i < nargs; i++)
14158 args[i] = gimple_call_arg (stmt, i);
14159 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14161 if (!ret)
14162 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14163 if (ret)
14165 /* Propagate location information from original call to
14166 expansion of builtin. Otherwise things like
14167 maybe_emit_chk_warning, that operate on the expansion
14168 of a builtin, will use the wrong location information. */
14169 if (gimple_has_location (stmt))
14171 tree realret = ret;
14172 if (TREE_CODE (ret) == NOP_EXPR)
14173 realret = TREE_OPERAND (ret, 0);
14174 if (CAN_HAVE_LOCATION_P (realret)
14175 && !EXPR_HAS_LOCATION (realret))
14176 SET_EXPR_LOCATION (realret, loc);
14177 return realret;
14179 return ret;
14183 return NULL_TREE;