2009-09-05 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / builtins.c
blobd4801b14e55dc06a8fc0b43f1199b04a5cb5a327
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list_loc (location_t, tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (location_t, tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (location_t, tree);
158 static tree fold_builtin_inf (location_t, tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (location_t, tree, tree);
168 static tree fold_builtin_cbrt (location_t, tree, tree);
169 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_cos (location_t, tree, tree, tree);
172 static tree fold_builtin_cosh (location_t, tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (location_t, tree, tree);
175 static tree fold_builtin_floor (location_t, tree, tree);
176 static tree fold_builtin_ceil (location_t, tree, tree);
177 static tree fold_builtin_round (location_t, tree, tree);
178 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (location_t, tree, tree, tree);
182 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
184 static tree fold_builtin_strcmp (location_t, tree, tree);
185 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
186 static tree fold_builtin_signbit (location_t, tree, tree);
187 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
188 static tree fold_builtin_isascii (location_t, tree);
189 static tree fold_builtin_toascii (location_t, tree);
190 static tree fold_builtin_isdigit (location_t, tree);
191 static tree fold_builtin_fabs (location_t, tree, tree);
192 static tree fold_builtin_abs (location_t, tree, tree);
193 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
194 enum tree_code);
195 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
196 static tree fold_builtin_0 (location_t, tree, bool);
197 static tree fold_builtin_1 (location_t, tree, tree, bool);
198 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
199 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (location_t, tree, tree, bool);
203 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
204 static tree fold_builtin_strstr (location_t, tree, tree, tree);
205 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
206 static tree fold_builtin_strcat (location_t, tree, tree);
207 static tree fold_builtin_strncat (location_t, tree, tree, tree);
208 static tree fold_builtin_strspn (location_t, tree, tree);
209 static tree fold_builtin_strcspn (location_t, tree, tree);
210 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
222 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
247 bool
248 is_builtin_name (const char *name)
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
261 static bool
262 called_as_built_in (tree node)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
266 will have. */
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
278 unsigned int inner;
280 inner = max_align;
281 if (handled_component_p (exp))
283 HOST_WIDE_INT bitsize, bitpos;
284 tree offset;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
290 if (bitpos)
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
292 while (offset)
294 tree next_offset;
296 if (TREE_CODE (offset) == PLUS_EXPR)
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
301 else
302 next_offset = NULL;
303 if (host_integerp (offset, 1))
305 /* Any overflow in calculating offset_bits won't change
306 the alignment. */
307 unsigned offset_bits
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
310 if (offset_bits)
311 inner = MIN (inner, (offset_bits & -offset_bits));
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
316 /* Any overflow in calculating offset_factor won't change
317 the alignment. */
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
320 * BITS_PER_UNIT);
322 if (offset_factor)
323 inner = MIN (inner, (offset_factor & -offset_factor));
325 else
327 inner = MIN (inner, BITS_PER_UNIT);
328 break;
330 offset = next_offset;
333 if (DECL_P (exp))
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
338 #endif
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
342 else
343 align = MIN (align, inner);
344 return MIN (align, max_align);
347 /* Returns true iff we can trust that alignment information has been
348 calculated properly. */
350 bool
351 can_trust_pointer_alignment (void)
353 /* We rely on TER to compute accurate alignment information. */
354 return (optimize && flag_tree_ter);
357 /* Return the alignment in bits of EXP, a pointer valued expression.
358 But don't return more than MAX_ALIGN no matter what.
359 The alignment returned is, by default, the alignment of the thing that
360 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
362 Otherwise, look at the expression to see if we can do better, i.e., if the
363 expression is actually pointing at an object whose alignment is tighter. */
366 get_pointer_alignment (tree exp, unsigned int max_align)
368 unsigned int align, inner;
370 if (!can_trust_pointer_alignment ())
371 return 0;
373 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
374 return 0;
376 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (align, max_align);
379 while (1)
381 switch (TREE_CODE (exp))
383 CASE_CONVERT:
384 exp = TREE_OPERAND (exp, 0);
385 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
386 return align;
388 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
389 align = MIN (inner, max_align);
390 break;
392 case POINTER_PLUS_EXPR:
393 /* If sum of pointer + int, restrict our maximum alignment to that
394 imposed by the integer. If not, we can't do any better than
395 ALIGN. */
396 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
397 return align;
399 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
400 & (max_align / BITS_PER_UNIT - 1))
401 != 0)
402 max_align >>= 1;
404 exp = TREE_OPERAND (exp, 0);
405 break;
407 case ADDR_EXPR:
408 /* See what we are pointing at and look at its alignment. */
409 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
411 default:
412 return align;
417 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
418 way, because it could contain a zero byte in the middle.
419 TREE_STRING_LENGTH is the size of the character array, not the string.
421 ONLY_VALUE should be nonzero if the result is not going to be emitted
422 into the instruction stream and zero if it is going to be expanded.
423 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
424 is returned, otherwise NULL, since
425 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
426 evaluate the side-effects.
428 The value returned is of type `ssizetype'.
430 Unfortunately, string_constant can't access the values of const char
431 arrays with initializers, so neither can we do so here. */
433 tree
434 c_strlen (tree src, int only_value)
436 tree offset_node;
437 HOST_WIDE_INT offset;
438 int max;
439 const char *ptr;
441 STRIP_NOPS (src);
442 if (TREE_CODE (src) == COND_EXPR
443 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
445 tree len1, len2;
447 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
448 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
449 if (tree_int_cst_equal (len1, len2))
450 return len1;
453 if (TREE_CODE (src) == COMPOUND_EXPR
454 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
455 return c_strlen (TREE_OPERAND (src, 1), only_value);
457 src = string_constant (src, &offset_node);
458 if (src == 0)
459 return NULL_TREE;
461 max = TREE_STRING_LENGTH (src) - 1;
462 ptr = TREE_STRING_POINTER (src);
464 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
466 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
467 compute the offset to the following null if we don't know where to
468 start searching for it. */
469 int i;
471 for (i = 0; i < max; i++)
472 if (ptr[i] == 0)
473 return NULL_TREE;
475 /* We don't know the starting offset, but we do know that the string
476 has no internal zero bytes. We can assume that the offset falls
477 within the bounds of the string; otherwise, the programmer deserves
478 what he gets. Subtract the offset from the length of the string,
479 and return that. This would perhaps not be valid if we were dealing
480 with named arrays in addition to literal string constants. */
482 return size_diffop_loc (input_location, size_int (max), offset_node);
485 /* We have a known offset into the string. Start searching there for
486 a null character if we can represent it as a single HOST_WIDE_INT. */
487 if (offset_node == 0)
488 offset = 0;
489 else if (! host_integerp (offset_node, 0))
490 offset = -1;
491 else
492 offset = tree_low_cst (offset_node, 0);
494 /* If the offset is known to be out of bounds, warn, and call strlen at
495 runtime. */
496 if (offset < 0 || offset > max)
498 /* Suppress multiple warnings for propagated constant strings. */
499 if (! TREE_NO_WARNING (src))
501 warning (0, "offset outside bounds of constant string");
502 TREE_NO_WARNING (src) = 1;
504 return NULL_TREE;
507 /* Use strlen to search for the first zero byte. Since any strings
508 constructed with build_string will have nulls appended, we win even
509 if we get handed something like (char[4])"abcd".
511 Since OFFSET is our starting index into the string, no further
512 calculation is needed. */
513 return ssize_int (strlen (ptr + offset));
516 /* Return a char pointer for a C string if it is a string constant
517 or sum of string constant and integer constant. */
519 static const char *
520 c_getstr (tree src)
522 tree offset_node;
524 src = string_constant (src, &offset_node);
525 if (src == 0)
526 return 0;
528 if (offset_node == 0)
529 return TREE_STRING_POINTER (src);
530 else if (!host_integerp (offset_node, 1)
531 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
532 return 0;
534 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
537 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
538 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
540 static rtx
541 c_readstr (const char *str, enum machine_mode mode)
543 HOST_WIDE_INT c[2];
544 HOST_WIDE_INT ch;
545 unsigned int i, j;
547 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
549 c[0] = 0;
550 c[1] = 0;
551 ch = 1;
552 for (i = 0; i < GET_MODE_SIZE (mode); i++)
554 j = i;
555 if (WORDS_BIG_ENDIAN)
556 j = GET_MODE_SIZE (mode) - i - 1;
557 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
558 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
559 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
560 j *= BITS_PER_UNIT;
561 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
563 if (ch)
564 ch = (unsigned char) str[i];
565 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
567 return immed_double_const (c[0], c[1], mode);
570 /* Cast a target constant CST to target CHAR and if that value fits into
571 host char type, return zero and put that value into variable pointed to by
572 P. */
574 static int
575 target_char_cast (tree cst, char *p)
577 unsigned HOST_WIDE_INT val, hostval;
579 if (!host_integerp (cst, 1)
580 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
581 return 1;
583 val = tree_low_cst (cst, 1);
584 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
585 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
587 hostval = val;
588 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
589 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
591 if (val != hostval)
592 return 1;
594 *p = hostval;
595 return 0;
598 /* Similar to save_expr, but assumes that arbitrary code is not executed
599 in between the multiple evaluations. In particular, we assume that a
600 non-addressable local variable will not be modified. */
602 static tree
603 builtin_save_expr (tree exp)
605 if (TREE_ADDRESSABLE (exp) == 0
606 && (TREE_CODE (exp) == PARM_DECL
607 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
608 return exp;
610 return save_expr (exp);
613 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
614 times to get the address of either a higher stack frame, or a return
615 address located within it (depending on FNDECL_CODE). */
617 static rtx
618 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
620 int i;
622 #ifdef INITIAL_FRAME_ADDRESS_RTX
623 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
624 #else
625 rtx tem;
627 /* For a zero count with __builtin_return_address, we don't care what
628 frame address we return, because target-specific definitions will
629 override us. Therefore frame pointer elimination is OK, and using
630 the soft frame pointer is OK.
632 For a nonzero count, or a zero count with __builtin_frame_address,
633 we require a stable offset from the current frame pointer to the
634 previous one, so we must use the hard frame pointer, and
635 we must disable frame pointer elimination. */
636 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
637 tem = frame_pointer_rtx;
638 else
640 tem = hard_frame_pointer_rtx;
642 /* Tell reload not to eliminate the frame pointer. */
643 crtl->accesses_prior_frames = 1;
645 #endif
647 /* Some machines need special handling before we can access
648 arbitrary frames. For example, on the SPARC, we must first flush
649 all register windows to the stack. */
650 #ifdef SETUP_FRAME_ADDRESSES
651 if (count > 0)
652 SETUP_FRAME_ADDRESSES ();
653 #endif
655 /* On the SPARC, the return address is not in the frame, it is in a
656 register. There is no way to access it off of the current frame
657 pointer, but it can be accessed off the previous frame pointer by
658 reading the value from the register window save area. */
659 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
660 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
661 count--;
662 #endif
664 /* Scan back COUNT frames to the specified frame. */
665 for (i = 0; i < count; i++)
667 /* Assume the dynamic chain pointer is in the word that the
668 frame address points to, unless otherwise specified. */
669 #ifdef DYNAMIC_CHAIN_ADDRESS
670 tem = DYNAMIC_CHAIN_ADDRESS (tem);
671 #endif
672 tem = memory_address (Pmode, tem);
673 tem = gen_frame_mem (Pmode, tem);
674 tem = copy_to_reg (tem);
677 /* For __builtin_frame_address, return what we've got. But, on
678 the SPARC for example, we may have to add a bias. */
679 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
680 #ifdef FRAME_ADDR_RTX
681 return FRAME_ADDR_RTX (tem);
682 #else
683 return tem;
684 #endif
686 /* For __builtin_return_address, get the return address from that frame. */
687 #ifdef RETURN_ADDR_RTX
688 tem = RETURN_ADDR_RTX (count, tem);
689 #else
690 tem = memory_address (Pmode,
691 plus_constant (tem, GET_MODE_SIZE (Pmode)));
692 tem = gen_frame_mem (Pmode, tem);
693 #endif
694 return tem;
697 /* Alias set used for setjmp buffer. */
698 static alias_set_type setjmp_alias_set = -1;
700 /* Construct the leading half of a __builtin_setjmp call. Control will
701 return to RECEIVER_LABEL. This is also called directly by the SJLJ
702 exception handling code. */
704 void
705 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
707 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
708 rtx stack_save;
709 rtx mem;
711 if (setjmp_alias_set == -1)
712 setjmp_alias_set = new_alias_set ();
714 buf_addr = convert_memory_address (Pmode, buf_addr);
716 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
718 /* We store the frame pointer and the address of receiver_label in
719 the buffer and use the rest of it for the stack save area, which
720 is machine-dependent. */
722 mem = gen_rtx_MEM (Pmode, buf_addr);
723 set_mem_alias_set (mem, setjmp_alias_set);
724 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
726 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
727 set_mem_alias_set (mem, setjmp_alias_set);
729 emit_move_insn (validize_mem (mem),
730 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
732 stack_save = gen_rtx_MEM (sa_mode,
733 plus_constant (buf_addr,
734 2 * GET_MODE_SIZE (Pmode)));
735 set_mem_alias_set (stack_save, setjmp_alias_set);
736 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
738 /* If there is further processing to do, do it. */
739 #ifdef HAVE_builtin_setjmp_setup
740 if (HAVE_builtin_setjmp_setup)
741 emit_insn (gen_builtin_setjmp_setup (buf_addr));
742 #endif
744 /* Tell optimize_save_area_alloca that extra work is going to
745 need to go on during alloca. */
746 cfun->calls_setjmp = 1;
748 /* We have a nonlocal label. */
749 cfun->has_nonlocal_label = 1;
752 /* Construct the trailing part of a __builtin_setjmp call. This is
753 also called directly by the SJLJ exception handling code. */
755 void
756 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
758 /* Clobber the FP when we get here, so we have to make sure it's
759 marked as used by this function. */
760 emit_use (hard_frame_pointer_rtx);
762 /* Mark the static chain as clobbered here so life information
763 doesn't get messed up for it. */
764 emit_clobber (static_chain_rtx);
766 /* Now put in the code to restore the frame pointer, and argument
767 pointer, if needed. */
768 #ifdef HAVE_nonlocal_goto
769 if (! HAVE_nonlocal_goto)
770 #endif
772 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
773 /* This might change the hard frame pointer in ways that aren't
774 apparent to early optimization passes, so force a clobber. */
775 emit_clobber (hard_frame_pointer_rtx);
778 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
779 if (fixed_regs[ARG_POINTER_REGNUM])
781 #ifdef ELIMINABLE_REGS
782 size_t i;
783 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
785 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
786 if (elim_regs[i].from == ARG_POINTER_REGNUM
787 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
788 break;
790 if (i == ARRAY_SIZE (elim_regs))
791 #endif
793 /* Now restore our arg pointer from the address at which it
794 was saved in our stack frame. */
795 emit_move_insn (crtl->args.internal_arg_pointer,
796 copy_to_reg (get_arg_pointer_save_area ()));
799 #endif
801 #ifdef HAVE_builtin_setjmp_receiver
802 if (HAVE_builtin_setjmp_receiver)
803 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
804 else
805 #endif
806 #ifdef HAVE_nonlocal_goto_receiver
807 if (HAVE_nonlocal_goto_receiver)
808 emit_insn (gen_nonlocal_goto_receiver ());
809 else
810 #endif
811 { /* Nothing */ }
813 /* We must not allow the code we just generated to be reordered by
814 scheduling. Specifically, the update of the frame pointer must
815 happen immediately, not later. */
816 emit_insn (gen_blockage ());
819 /* __builtin_longjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient. Much of
822 the code below is copied from the handling of non-local gotos. */
824 static void
825 expand_builtin_longjmp (rtx buf_addr, rtx value)
827 rtx fp, lab, stack, insn, last;
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
830 /* DRAP is needed for stack realign if longjmp is expanded to current
831 function */
832 if (SUPPORTS_STACK_ALIGNMENT)
833 crtl->need_drap = true;
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
838 buf_addr = convert_memory_address (Pmode, buf_addr);
840 buf_addr = force_reg (Pmode, buf_addr);
842 /* We used to store value in static_chain_rtx, but that fails if pointers
843 are smaller than integers. We instead require that the user must pass
844 a second argument of 1, because that is what builtin_setjmp will
845 return. This also makes EH slightly more efficient, since we are no
846 longer copying around a value that we don't care about. */
847 gcc_assert (value == const1_rtx);
849 last = get_last_insn ();
850 #ifdef HAVE_builtin_longjmp
851 if (HAVE_builtin_longjmp)
852 emit_insn (gen_builtin_longjmp (buf_addr));
853 else
854 #endif
856 fp = gen_rtx_MEM (Pmode, buf_addr);
857 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
858 GET_MODE_SIZE (Pmode)));
860 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
861 2 * GET_MODE_SIZE (Pmode)));
862 set_mem_alias_set (fp, setjmp_alias_set);
863 set_mem_alias_set (lab, setjmp_alias_set);
864 set_mem_alias_set (stack, setjmp_alias_set);
866 /* Pick up FP, label, and SP from the block and jump. This code is
867 from expand_goto in stmt.c; see there for detailed comments. */
868 #ifdef HAVE_nonlocal_goto
869 if (HAVE_nonlocal_goto)
870 /* We have to pass a value to the nonlocal_goto pattern that will
871 get copied into the static_chain pointer, but it does not matter
872 what that value is, because builtin_setjmp does not use it. */
873 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
874 else
875 #endif
877 lab = copy_to_reg (lab);
879 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
880 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
882 emit_move_insn (hard_frame_pointer_rtx, fp);
883 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
885 emit_use (hard_frame_pointer_rtx);
886 emit_use (stack_pointer_rtx);
887 emit_indirect_jump (lab);
891 /* Search backwards and mark the jump insn as a non-local goto.
892 Note that this precludes the use of __builtin_longjmp to a
893 __builtin_setjmp target in the same function. However, we've
894 already cautioned the user that these functions are for
895 internal exception handling use only. */
896 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
898 gcc_assert (insn != last);
900 if (JUMP_P (insn))
902 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
903 break;
905 else if (CALL_P (insn))
906 break;
910 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
911 and the address of the save area. */
913 static rtx
914 expand_builtin_nonlocal_goto (tree exp)
916 tree t_label, t_save_area;
917 rtx r_label, r_save_area, r_fp, r_sp, insn;
919 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
920 return NULL_RTX;
922 t_label = CALL_EXPR_ARG (exp, 0);
923 t_save_area = CALL_EXPR_ARG (exp, 1);
925 r_label = expand_normal (t_label);
926 r_label = convert_memory_address (Pmode, r_label);
927 r_save_area = expand_normal (t_save_area);
928 r_save_area = convert_memory_address (Pmode, r_save_area);
929 /* Copy the address of the save location to a register just in case it was based
930 on the frame pointer. */
931 r_save_area = copy_to_reg (r_save_area);
932 r_fp = gen_rtx_MEM (Pmode, r_save_area);
933 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
934 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
936 crtl->has_nonlocal_goto = 1;
938 #ifdef HAVE_nonlocal_goto
939 /* ??? We no longer need to pass the static chain value, afaik. */
940 if (HAVE_nonlocal_goto)
941 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
942 else
943 #endif
945 r_label = copy_to_reg (r_label);
947 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
948 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
950 /* Restore frame pointer for containing function.
951 This sets the actual hard register used for the frame pointer
952 to the location of the function's incoming static chain info.
953 The non-local goto handler will then adjust it to contain the
954 proper value and reload the argument pointer, if needed. */
955 emit_move_insn (hard_frame_pointer_rtx, r_fp);
956 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
958 /* USE of hard_frame_pointer_rtx added for consistency;
959 not clear if really needed. */
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
963 /* If the architecture is using a GP register, we must
964 conservatively assume that the target function makes use of it.
965 The prologue of functions with nonlocal gotos must therefore
966 initialize the GP register to the appropriate value, and we
967 must then make sure that this value is live at the point
968 of the jump. (Note that this doesn't necessarily apply
969 to targets with a nonlocal_goto pattern; they are free
970 to implement it in their own way. Note also that this is
971 a no-op if the GP register is a global invariant.) */
972 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
973 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
974 emit_use (pic_offset_table_rtx);
976 emit_indirect_jump (r_label);
979 /* Search backwards to the jump insn and mark it as a
980 non-local goto. */
981 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
983 if (JUMP_P (insn))
985 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
986 break;
988 else if (CALL_P (insn))
989 break;
992 return const0_rtx;
995 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
996 (not all will be used on all machines) that was passed to __builtin_setjmp.
997 It updates the stack pointer in that block to correspond to the current
998 stack pointer. */
1000 static void
1001 expand_builtin_update_setjmp_buf (rtx buf_addr)
1003 enum machine_mode sa_mode = Pmode;
1004 rtx stack_save;
1007 #ifdef HAVE_save_stack_nonlocal
1008 if (HAVE_save_stack_nonlocal)
1009 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1010 #endif
1011 #ifdef STACK_SAVEAREA_MODE
1012 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1013 #endif
1015 stack_save
1016 = gen_rtx_MEM (sa_mode,
1017 memory_address
1018 (sa_mode,
1019 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1021 #ifdef HAVE_setjmp
1022 if (HAVE_setjmp)
1023 emit_insn (gen_setjmp ());
1024 #endif
1026 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1029 /* Expand a call to __builtin_prefetch. For a target that does not support
1030 data prefetch, evaluate the memory address argument in case it has side
1031 effects. */
1033 static void
1034 expand_builtin_prefetch (tree exp)
1036 tree arg0, arg1, arg2;
1037 int nargs;
1038 rtx op0, op1, op2;
1040 if (!validate_arglist (exp, POINTER_TYPE, 0))
1041 return;
1043 arg0 = CALL_EXPR_ARG (exp, 0);
1045 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1046 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1047 locality). */
1048 nargs = call_expr_nargs (exp);
1049 if (nargs > 1)
1050 arg1 = CALL_EXPR_ARG (exp, 1);
1051 else
1052 arg1 = integer_zero_node;
1053 if (nargs > 2)
1054 arg2 = CALL_EXPR_ARG (exp, 2);
1055 else
1056 arg2 = build_int_cst (NULL_TREE, 3);
1058 /* Argument 0 is an address. */
1059 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1061 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1062 if (TREE_CODE (arg1) != INTEGER_CST)
1064 error ("second argument to %<__builtin_prefetch%> must be a constant");
1065 arg1 = integer_zero_node;
1067 op1 = expand_normal (arg1);
1068 /* Argument 1 must be either zero or one. */
1069 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1071 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1072 " using zero");
1073 op1 = const0_rtx;
1076 /* Argument 2 (locality) must be a compile-time constant int. */
1077 if (TREE_CODE (arg2) != INTEGER_CST)
1079 error ("third argument to %<__builtin_prefetch%> must be a constant");
1080 arg2 = integer_zero_node;
1082 op2 = expand_normal (arg2);
1083 /* Argument 2 must be 0, 1, 2, or 3. */
1084 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1086 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1087 op2 = const0_rtx;
1090 #ifdef HAVE_prefetch
1091 if (HAVE_prefetch)
1093 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1094 (op0,
1095 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1096 || (GET_MODE (op0) != Pmode))
1098 op0 = convert_memory_address (Pmode, op0);
1099 op0 = force_reg (Pmode, op0);
1101 emit_insn (gen_prefetch (op0, op1, op2));
1103 #endif
1105 /* Don't do anything with direct references to volatile memory, but
1106 generate code to handle other side effects. */
1107 if (!MEM_P (op0) && side_effects_p (op0))
1108 emit_insn (op0);
1111 /* Get a MEM rtx for expression EXP which is the address of an operand
1112 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1113 the maximum length of the block of memory that might be accessed or
1114 NULL if unknown. */
1116 static rtx
1117 get_memory_rtx (tree exp, tree len)
1119 tree orig_exp = exp;
1120 rtx addr, mem;
1121 HOST_WIDE_INT off;
1123 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1124 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1125 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1126 exp = TREE_OPERAND (exp, 0);
1128 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1129 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1131 /* Get an expression we can use to find the attributes to assign to MEM.
1132 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1133 we can. First remove any nops. */
1134 while (CONVERT_EXPR_P (exp)
1135 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1136 exp = TREE_OPERAND (exp, 0);
1138 off = 0;
1139 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1140 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1141 && host_integerp (TREE_OPERAND (exp, 1), 0)
1142 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1143 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1144 else if (TREE_CODE (exp) == ADDR_EXPR)
1145 exp = TREE_OPERAND (exp, 0);
1146 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1147 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1148 else
1149 exp = NULL;
1151 /* Honor attributes derived from exp, except for the alias set
1152 (as builtin stringops may alias with anything) and the size
1153 (as stringops may access multiple array elements). */
1154 if (exp)
1156 set_mem_attributes (mem, exp, 0);
1158 if (off)
1159 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1161 /* Allow the string and memory builtins to overflow from one
1162 field into another, see http://gcc.gnu.org/PR23561.
1163 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1164 memory accessed by the string or memory builtin will fit
1165 within the field. */
1166 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1168 tree mem_expr = MEM_EXPR (mem);
1169 HOST_WIDE_INT offset = -1, length = -1;
1170 tree inner = exp;
1172 while (TREE_CODE (inner) == ARRAY_REF
1173 || CONVERT_EXPR_P (inner)
1174 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1175 || TREE_CODE (inner) == SAVE_EXPR)
1176 inner = TREE_OPERAND (inner, 0);
1178 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1180 if (MEM_OFFSET (mem)
1181 && CONST_INT_P (MEM_OFFSET (mem)))
1182 offset = INTVAL (MEM_OFFSET (mem));
1184 if (offset >= 0 && len && host_integerp (len, 0))
1185 length = tree_low_cst (len, 0);
1187 while (TREE_CODE (inner) == COMPONENT_REF)
1189 tree field = TREE_OPERAND (inner, 1);
1190 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1191 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1193 /* Bitfields are generally not byte-addressable. */
1194 gcc_assert (!DECL_BIT_FIELD (field)
1195 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1196 % BITS_PER_UNIT) == 0
1197 && host_integerp (DECL_SIZE (field), 0)
1198 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1199 % BITS_PER_UNIT) == 0));
1201 /* If we can prove that the memory starting at XEXP (mem, 0) and
1202 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1203 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1204 fields without DECL_SIZE_UNIT like flexible array members. */
1205 if (length >= 0
1206 && DECL_SIZE_UNIT (field)
1207 && host_integerp (DECL_SIZE_UNIT (field), 0))
1209 HOST_WIDE_INT size
1210 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1211 if (offset <= size
1212 && length <= size
1213 && offset + length <= size)
1214 break;
1217 if (offset >= 0
1218 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1219 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1220 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1221 / BITS_PER_UNIT;
1222 else
1224 offset = -1;
1225 length = -1;
1228 mem_expr = TREE_OPERAND (mem_expr, 0);
1229 inner = TREE_OPERAND (inner, 0);
1232 if (mem_expr == NULL)
1233 offset = -1;
1234 if (mem_expr != MEM_EXPR (mem))
1236 set_mem_expr (mem, mem_expr);
1237 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1240 set_mem_alias_set (mem, 0);
1241 set_mem_size (mem, NULL_RTX);
1244 return mem;
1247 /* Built-in functions to perform an untyped call and return. */
1249 /* For each register that may be used for calling a function, this
1250 gives a mode used to copy the register's value. VOIDmode indicates
1251 the register is not used for calling a function. If the machine
1252 has register windows, this gives only the outbound registers.
1253 INCOMING_REGNO gives the corresponding inbound register. */
1254 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1256 /* For each register that may be used for returning values, this gives
1257 a mode used to copy the register's value. VOIDmode indicates the
1258 register is not used for returning values. If the machine has
1259 register windows, this gives only the outbound registers.
1260 INCOMING_REGNO gives the corresponding inbound register. */
1261 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1263 /* For each register that may be used for calling a function, this
1264 gives the offset of that register into the block returned by
1265 __builtin_apply_args. 0 indicates that the register is not
1266 used for calling a function. */
1267 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1269 /* Return the size required for the block returned by __builtin_apply_args,
1270 and initialize apply_args_mode. */
1272 static int
1273 apply_args_size (void)
1275 static int size = -1;
1276 int align;
1277 unsigned int regno;
1278 enum machine_mode mode;
1280 /* The values computed by this function never change. */
1281 if (size < 0)
1283 /* The first value is the incoming arg-pointer. */
1284 size = GET_MODE_SIZE (Pmode);
1286 /* The second value is the structure value address unless this is
1287 passed as an "invisible" first argument. */
1288 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1289 size += GET_MODE_SIZE (Pmode);
1291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1292 if (FUNCTION_ARG_REGNO_P (regno))
1294 mode = reg_raw_mode[regno];
1296 gcc_assert (mode != VOIDmode);
1298 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1299 if (size % align != 0)
1300 size = CEIL (size, align) * align;
1301 apply_args_reg_offset[regno] = size;
1302 size += GET_MODE_SIZE (mode);
1303 apply_args_mode[regno] = mode;
1305 else
1307 apply_args_mode[regno] = VOIDmode;
1308 apply_args_reg_offset[regno] = 0;
1311 return size;
1314 /* Return the size required for the block returned by __builtin_apply,
1315 and initialize apply_result_mode. */
1317 static int
1318 apply_result_size (void)
1320 static int size = -1;
1321 int align, regno;
1322 enum machine_mode mode;
1324 /* The values computed by this function never change. */
1325 if (size < 0)
1327 size = 0;
1329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1330 if (FUNCTION_VALUE_REGNO_P (regno))
1332 mode = reg_raw_mode[regno];
1334 gcc_assert (mode != VOIDmode);
1336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1337 if (size % align != 0)
1338 size = CEIL (size, align) * align;
1339 size += GET_MODE_SIZE (mode);
1340 apply_result_mode[regno] = mode;
1342 else
1343 apply_result_mode[regno] = VOIDmode;
1345 /* Allow targets that use untyped_call and untyped_return to override
1346 the size so that machine-specific information can be stored here. */
1347 #ifdef APPLY_RESULT_SIZE
1348 size = APPLY_RESULT_SIZE;
1349 #endif
1351 return size;
1354 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1355 /* Create a vector describing the result block RESULT. If SAVEP is true,
1356 the result block is used to save the values; otherwise it is used to
1357 restore the values. */
1359 static rtx
1360 result_vector (int savep, rtx result)
1362 int regno, size, align, nelts;
1363 enum machine_mode mode;
1364 rtx reg, mem;
1365 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1367 size = nelts = 0;
1368 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1369 if ((mode = apply_result_mode[regno]) != VOIDmode)
1371 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1372 if (size % align != 0)
1373 size = CEIL (size, align) * align;
1374 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1375 mem = adjust_address (result, mode, size);
1376 savevec[nelts++] = (savep
1377 ? gen_rtx_SET (VOIDmode, mem, reg)
1378 : gen_rtx_SET (VOIDmode, reg, mem));
1379 size += GET_MODE_SIZE (mode);
1381 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1383 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1385 /* Save the state required to perform an untyped call with the same
1386 arguments as were passed to the current function. */
1388 static rtx
1389 expand_builtin_apply_args_1 (void)
1391 rtx registers, tem;
1392 int size, align, regno;
1393 enum machine_mode mode;
1394 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1396 /* Create a block where the arg-pointer, structure value address,
1397 and argument registers can be saved. */
1398 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1400 /* Walk past the arg-pointer and structure value address. */
1401 size = GET_MODE_SIZE (Pmode);
1402 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1403 size += GET_MODE_SIZE (Pmode);
1405 /* Save each register used in calling a function to the block. */
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if ((mode = apply_args_mode[regno]) != VOIDmode)
1409 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1410 if (size % align != 0)
1411 size = CEIL (size, align) * align;
1413 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1415 emit_move_insn (adjust_address (registers, mode, size), tem);
1416 size += GET_MODE_SIZE (mode);
1419 /* Save the arg pointer to the block. */
1420 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1421 #ifdef STACK_GROWS_DOWNWARD
1422 /* We need the pointer as the caller actually passed them to us, not
1423 as we might have pretended they were passed. Make sure it's a valid
1424 operand, as emit_move_insn isn't expected to handle a PLUS. */
1426 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1427 NULL_RTX);
1428 #endif
1429 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1431 size = GET_MODE_SIZE (Pmode);
1433 /* Save the structure value address unless this is passed as an
1434 "invisible" first argument. */
1435 if (struct_incoming_value)
1437 emit_move_insn (adjust_address (registers, Pmode, size),
1438 copy_to_reg (struct_incoming_value));
1439 size += GET_MODE_SIZE (Pmode);
1442 /* Return the address of the block. */
1443 return copy_addr_to_reg (XEXP (registers, 0));
1446 /* __builtin_apply_args returns block of memory allocated on
1447 the stack into which is stored the arg pointer, structure
1448 value address, static chain, and all the registers that might
1449 possibly be used in performing a function call. The code is
1450 moved to the start of the function so the incoming values are
1451 saved. */
1453 static rtx
1454 expand_builtin_apply_args (void)
1456 /* Don't do __builtin_apply_args more than once in a function.
1457 Save the result of the first call and reuse it. */
1458 if (apply_args_value != 0)
1459 return apply_args_value;
1461 /* When this function is called, it means that registers must be
1462 saved on entry to this function. So we migrate the
1463 call to the first insn of this function. */
1464 rtx temp;
1465 rtx seq;
1467 start_sequence ();
1468 temp = expand_builtin_apply_args_1 ();
1469 seq = get_insns ();
1470 end_sequence ();
1472 apply_args_value = temp;
1474 /* Put the insns after the NOTE that starts the function.
1475 If this is inside a start_sequence, make the outer-level insn
1476 chain current, so the code is placed at the start of the
1477 function. If internal_arg_pointer is a non-virtual pseudo,
1478 it needs to be placed after the function that initializes
1479 that pseudo. */
1480 push_topmost_sequence ();
1481 if (REG_P (crtl->args.internal_arg_pointer)
1482 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1483 emit_insn_before (seq, parm_birth_insn);
1484 else
1485 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1486 pop_topmost_sequence ();
1487 return temp;
1491 /* Perform an untyped call and save the state required to perform an
1492 untyped return of whatever value was returned by the given function. */
1494 static rtx
1495 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1497 int size, align, regno;
1498 enum machine_mode mode;
1499 rtx incoming_args, result, reg, dest, src, call_insn;
1500 rtx old_stack_level = 0;
1501 rtx call_fusage = 0;
1502 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1504 arguments = convert_memory_address (Pmode, arguments);
1506 /* Create a block where the return registers can be saved. */
1507 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1509 /* Fetch the arg pointer from the ARGUMENTS block. */
1510 incoming_args = gen_reg_rtx (Pmode);
1511 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1512 #ifndef STACK_GROWS_DOWNWARD
1513 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1514 incoming_args, 0, OPTAB_LIB_WIDEN);
1515 #endif
1517 /* Push a new argument block and copy the arguments. Do not allow
1518 the (potential) memcpy call below to interfere with our stack
1519 manipulations. */
1520 do_pending_stack_adjust ();
1521 NO_DEFER_POP;
1523 /* Save the stack with nonlocal if available. */
1524 #ifdef HAVE_save_stack_nonlocal
1525 if (HAVE_save_stack_nonlocal)
1526 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1527 else
1528 #endif
1529 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1531 /* Allocate a block of memory onto the stack and copy the memory
1532 arguments to the outgoing arguments address. */
1533 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1535 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1536 may have already set current_function_calls_alloca to true.
1537 current_function_calls_alloca won't be set if argsize is zero,
1538 so we have to guarantee need_drap is true here. */
1539 if (SUPPORTS_STACK_ALIGNMENT)
1540 crtl->need_drap = true;
1542 dest = virtual_outgoing_args_rtx;
1543 #ifndef STACK_GROWS_DOWNWARD
1544 if (CONST_INT_P (argsize))
1545 dest = plus_constant (dest, -INTVAL (argsize));
1546 else
1547 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1548 #endif
1549 dest = gen_rtx_MEM (BLKmode, dest);
1550 set_mem_align (dest, PARM_BOUNDARY);
1551 src = gen_rtx_MEM (BLKmode, incoming_args);
1552 set_mem_align (src, PARM_BOUNDARY);
1553 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1555 /* Refer to the argument block. */
1556 apply_args_size ();
1557 arguments = gen_rtx_MEM (BLKmode, arguments);
1558 set_mem_align (arguments, PARM_BOUNDARY);
1560 /* Walk past the arg-pointer and structure value address. */
1561 size = GET_MODE_SIZE (Pmode);
1562 if (struct_value)
1563 size += GET_MODE_SIZE (Pmode);
1565 /* Restore each of the registers previously saved. Make USE insns
1566 for each of these registers for use in making the call. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_args_mode[regno]) != VOIDmode)
1570 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1571 if (size % align != 0)
1572 size = CEIL (size, align) * align;
1573 reg = gen_rtx_REG (mode, regno);
1574 emit_move_insn (reg, adjust_address (arguments, mode, size));
1575 use_reg (&call_fusage, reg);
1576 size += GET_MODE_SIZE (mode);
1579 /* Restore the structure value address unless this is passed as an
1580 "invisible" first argument. */
1581 size = GET_MODE_SIZE (Pmode);
1582 if (struct_value)
1584 rtx value = gen_reg_rtx (Pmode);
1585 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1586 emit_move_insn (struct_value, value);
1587 if (REG_P (struct_value))
1588 use_reg (&call_fusage, struct_value);
1589 size += GET_MODE_SIZE (Pmode);
1592 /* All arguments and registers used for the call are set up by now! */
1593 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1595 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1596 and we don't want to load it into a register as an optimization,
1597 because prepare_call_address already did it if it should be done. */
1598 if (GET_CODE (function) != SYMBOL_REF)
1599 function = memory_address (FUNCTION_MODE, function);
1601 /* Generate the actual call instruction and save the return value. */
1602 #ifdef HAVE_untyped_call
1603 if (HAVE_untyped_call)
1604 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1605 result, result_vector (1, result)));
1606 else
1607 #endif
1608 #ifdef HAVE_call_value
1609 if (HAVE_call_value)
1611 rtx valreg = 0;
1613 /* Locate the unique return register. It is not possible to
1614 express a call that sets more than one return register using
1615 call_value; use untyped_call for that. In fact, untyped_call
1616 only needs to save the return registers in the given block. */
1617 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1618 if ((mode = apply_result_mode[regno]) != VOIDmode)
1620 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1622 valreg = gen_rtx_REG (mode, regno);
1625 emit_call_insn (GEN_CALL_VALUE (valreg,
1626 gen_rtx_MEM (FUNCTION_MODE, function),
1627 const0_rtx, NULL_RTX, const0_rtx));
1629 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1631 else
1632 #endif
1633 gcc_unreachable ();
1635 /* Find the CALL insn we just emitted, and attach the register usage
1636 information. */
1637 call_insn = last_call_insn ();
1638 add_function_usage_to (call_insn, call_fusage);
1640 /* Restore the stack. */
1641 #ifdef HAVE_save_stack_nonlocal
1642 if (HAVE_save_stack_nonlocal)
1643 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1644 else
1645 #endif
1646 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1648 OK_DEFER_POP;
1650 /* Return the address of the result block. */
1651 result = copy_addr_to_reg (XEXP (result, 0));
1652 return convert_memory_address (ptr_mode, result);
1655 /* Perform an untyped return. */
1657 static void
1658 expand_builtin_return (rtx result)
1660 int size, align, regno;
1661 enum machine_mode mode;
1662 rtx reg;
1663 rtx call_fusage = 0;
1665 result = convert_memory_address (Pmode, result);
1667 apply_result_size ();
1668 result = gen_rtx_MEM (BLKmode, result);
1670 #ifdef HAVE_untyped_return
1671 if (HAVE_untyped_return)
1673 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 emit_barrier ();
1675 return;
1677 #endif
1679 /* Restore the return value and note that each value is used. */
1680 size = 0;
1681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1682 if ((mode = apply_result_mode[regno]) != VOIDmode)
1684 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1685 if (size % align != 0)
1686 size = CEIL (size, align) * align;
1687 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1688 emit_move_insn (reg, adjust_address (result, mode, size));
1690 push_to_sequence (call_fusage);
1691 emit_use (reg);
1692 call_fusage = get_insns ();
1693 end_sequence ();
1694 size += GET_MODE_SIZE (mode);
1697 /* Put the USE insns before the return. */
1698 emit_insn (call_fusage);
1700 /* Return whatever values was restored by jumping directly to the end
1701 of the function. */
1702 expand_naked_return ();
1705 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1707 static enum type_class
1708 type_to_class (tree type)
1710 switch (TREE_CODE (type))
1712 case VOID_TYPE: return void_type_class;
1713 case INTEGER_TYPE: return integer_type_class;
1714 case ENUMERAL_TYPE: return enumeral_type_class;
1715 case BOOLEAN_TYPE: return boolean_type_class;
1716 case POINTER_TYPE: return pointer_type_class;
1717 case REFERENCE_TYPE: return reference_type_class;
1718 case OFFSET_TYPE: return offset_type_class;
1719 case REAL_TYPE: return real_type_class;
1720 case COMPLEX_TYPE: return complex_type_class;
1721 case FUNCTION_TYPE: return function_type_class;
1722 case METHOD_TYPE: return method_type_class;
1723 case RECORD_TYPE: return record_type_class;
1724 case UNION_TYPE:
1725 case QUAL_UNION_TYPE: return union_type_class;
1726 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1727 ? string_type_class : array_type_class);
1728 case LANG_TYPE: return lang_type_class;
1729 default: return no_type_class;
1733 /* Expand a call EXP to __builtin_classify_type. */
1735 static rtx
1736 expand_builtin_classify_type (tree exp)
1738 if (call_expr_nargs (exp))
1739 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1740 return GEN_INT (no_type_class);
1743 /* This helper macro, meant to be used in mathfn_built_in below,
1744 determines which among a set of three builtin math functions is
1745 appropriate for a given type mode. The `F' and `L' cases are
1746 automatically generated from the `double' case. */
1747 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1749 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1750 fcodel = BUILT_IN_MATHFN##L ; break;
1751 /* Similar to above, but appends _R after any F/L suffix. */
1752 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1753 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1754 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1755 fcodel = BUILT_IN_MATHFN##L_R ; break;
1757 /* Return mathematic function equivalent to FN but operating directly
1758 on TYPE, if available. If IMPLICIT is true find the function in
1759 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1760 can't do the conversion, return zero. */
1762 static tree
1763 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1765 tree const *const fn_arr
1766 = implicit ? implicit_built_in_decls : built_in_decls;
1767 enum built_in_function fcode, fcodef, fcodel;
1769 switch (fn)
1771 CASE_MATHFN (BUILT_IN_ACOS)
1772 CASE_MATHFN (BUILT_IN_ACOSH)
1773 CASE_MATHFN (BUILT_IN_ASIN)
1774 CASE_MATHFN (BUILT_IN_ASINH)
1775 CASE_MATHFN (BUILT_IN_ATAN)
1776 CASE_MATHFN (BUILT_IN_ATAN2)
1777 CASE_MATHFN (BUILT_IN_ATANH)
1778 CASE_MATHFN (BUILT_IN_CBRT)
1779 CASE_MATHFN (BUILT_IN_CEIL)
1780 CASE_MATHFN (BUILT_IN_CEXPI)
1781 CASE_MATHFN (BUILT_IN_COPYSIGN)
1782 CASE_MATHFN (BUILT_IN_COS)
1783 CASE_MATHFN (BUILT_IN_COSH)
1784 CASE_MATHFN (BUILT_IN_DREM)
1785 CASE_MATHFN (BUILT_IN_ERF)
1786 CASE_MATHFN (BUILT_IN_ERFC)
1787 CASE_MATHFN (BUILT_IN_EXP)
1788 CASE_MATHFN (BUILT_IN_EXP10)
1789 CASE_MATHFN (BUILT_IN_EXP2)
1790 CASE_MATHFN (BUILT_IN_EXPM1)
1791 CASE_MATHFN (BUILT_IN_FABS)
1792 CASE_MATHFN (BUILT_IN_FDIM)
1793 CASE_MATHFN (BUILT_IN_FLOOR)
1794 CASE_MATHFN (BUILT_IN_FMA)
1795 CASE_MATHFN (BUILT_IN_FMAX)
1796 CASE_MATHFN (BUILT_IN_FMIN)
1797 CASE_MATHFN (BUILT_IN_FMOD)
1798 CASE_MATHFN (BUILT_IN_FREXP)
1799 CASE_MATHFN (BUILT_IN_GAMMA)
1800 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1801 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1802 CASE_MATHFN (BUILT_IN_HYPOT)
1803 CASE_MATHFN (BUILT_IN_ILOGB)
1804 CASE_MATHFN (BUILT_IN_INF)
1805 CASE_MATHFN (BUILT_IN_ISINF)
1806 CASE_MATHFN (BUILT_IN_J0)
1807 CASE_MATHFN (BUILT_IN_J1)
1808 CASE_MATHFN (BUILT_IN_JN)
1809 CASE_MATHFN (BUILT_IN_LCEIL)
1810 CASE_MATHFN (BUILT_IN_LDEXP)
1811 CASE_MATHFN (BUILT_IN_LFLOOR)
1812 CASE_MATHFN (BUILT_IN_LGAMMA)
1813 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1814 CASE_MATHFN (BUILT_IN_LLCEIL)
1815 CASE_MATHFN (BUILT_IN_LLFLOOR)
1816 CASE_MATHFN (BUILT_IN_LLRINT)
1817 CASE_MATHFN (BUILT_IN_LLROUND)
1818 CASE_MATHFN (BUILT_IN_LOG)
1819 CASE_MATHFN (BUILT_IN_LOG10)
1820 CASE_MATHFN (BUILT_IN_LOG1P)
1821 CASE_MATHFN (BUILT_IN_LOG2)
1822 CASE_MATHFN (BUILT_IN_LOGB)
1823 CASE_MATHFN (BUILT_IN_LRINT)
1824 CASE_MATHFN (BUILT_IN_LROUND)
1825 CASE_MATHFN (BUILT_IN_MODF)
1826 CASE_MATHFN (BUILT_IN_NAN)
1827 CASE_MATHFN (BUILT_IN_NANS)
1828 CASE_MATHFN (BUILT_IN_NEARBYINT)
1829 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1830 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1831 CASE_MATHFN (BUILT_IN_POW)
1832 CASE_MATHFN (BUILT_IN_POWI)
1833 CASE_MATHFN (BUILT_IN_POW10)
1834 CASE_MATHFN (BUILT_IN_REMAINDER)
1835 CASE_MATHFN (BUILT_IN_REMQUO)
1836 CASE_MATHFN (BUILT_IN_RINT)
1837 CASE_MATHFN (BUILT_IN_ROUND)
1838 CASE_MATHFN (BUILT_IN_SCALB)
1839 CASE_MATHFN (BUILT_IN_SCALBLN)
1840 CASE_MATHFN (BUILT_IN_SCALBN)
1841 CASE_MATHFN (BUILT_IN_SIGNBIT)
1842 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1843 CASE_MATHFN (BUILT_IN_SIN)
1844 CASE_MATHFN (BUILT_IN_SINCOS)
1845 CASE_MATHFN (BUILT_IN_SINH)
1846 CASE_MATHFN (BUILT_IN_SQRT)
1847 CASE_MATHFN (BUILT_IN_TAN)
1848 CASE_MATHFN (BUILT_IN_TANH)
1849 CASE_MATHFN (BUILT_IN_TGAMMA)
1850 CASE_MATHFN (BUILT_IN_TRUNC)
1851 CASE_MATHFN (BUILT_IN_Y0)
1852 CASE_MATHFN (BUILT_IN_Y1)
1853 CASE_MATHFN (BUILT_IN_YN)
1855 default:
1856 return NULL_TREE;
1859 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1860 return fn_arr[fcode];
1861 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1862 return fn_arr[fcodef];
1863 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1864 return fn_arr[fcodel];
1865 else
1866 return NULL_TREE;
1869 /* Like mathfn_built_in_1(), but always use the implicit array. */
1871 tree
1872 mathfn_built_in (tree type, enum built_in_function fn)
1874 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1877 /* If errno must be maintained, expand the RTL to check if the result,
1878 TARGET, of a built-in function call, EXP, is NaN, and if so set
1879 errno to EDOM. */
1881 static void
1882 expand_errno_check (tree exp, rtx target)
1884 rtx lab = gen_label_rtx ();
1886 /* Test the result; if it is NaN, set errno=EDOM because
1887 the argument was not in the domain. */
1888 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1889 NULL_RTX, NULL_RTX, lab);
1891 #ifdef TARGET_EDOM
1892 /* If this built-in doesn't throw an exception, set errno directly. */
1893 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1895 #ifdef GEN_ERRNO_RTX
1896 rtx errno_rtx = GEN_ERRNO_RTX;
1897 #else
1898 rtx errno_rtx
1899 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1900 #endif
1901 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1902 emit_label (lab);
1903 return;
1905 #endif
1907 /* Make sure the library call isn't expanded as a tail call. */
1908 CALL_EXPR_TAILCALL (exp) = 0;
1910 /* We can't set errno=EDOM directly; let the library call do it.
1911 Pop the arguments right away in case the call gets deleted. */
1912 NO_DEFER_POP;
1913 expand_call (exp, target, 0);
1914 OK_DEFER_POP;
1915 emit_label (lab);
1918 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1919 Return NULL_RTX if a normal call should be emitted rather than expanding
1920 the function in-line. EXP is the expression that is a call to the builtin
1921 function; if convenient, the result should be placed in TARGET.
1922 SUBTARGET may be used as the target for computing one of EXP's operands. */
1924 static rtx
1925 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1927 optab builtin_optab;
1928 rtx op0, insns, before_call;
1929 tree fndecl = get_callee_fndecl (exp);
1930 enum machine_mode mode;
1931 bool errno_set = false;
1932 tree arg;
1934 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1935 return NULL_RTX;
1937 arg = CALL_EXPR_ARG (exp, 0);
1939 switch (DECL_FUNCTION_CODE (fndecl))
1941 CASE_FLT_FN (BUILT_IN_SQRT):
1942 errno_set = ! tree_expr_nonnegative_p (arg);
1943 builtin_optab = sqrt_optab;
1944 break;
1945 CASE_FLT_FN (BUILT_IN_EXP):
1946 errno_set = true; builtin_optab = exp_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP10):
1948 CASE_FLT_FN (BUILT_IN_POW10):
1949 errno_set = true; builtin_optab = exp10_optab; break;
1950 CASE_FLT_FN (BUILT_IN_EXP2):
1951 errno_set = true; builtin_optab = exp2_optab; break;
1952 CASE_FLT_FN (BUILT_IN_EXPM1):
1953 errno_set = true; builtin_optab = expm1_optab; break;
1954 CASE_FLT_FN (BUILT_IN_LOGB):
1955 errno_set = true; builtin_optab = logb_optab; break;
1956 CASE_FLT_FN (BUILT_IN_LOG):
1957 errno_set = true; builtin_optab = log_optab; break;
1958 CASE_FLT_FN (BUILT_IN_LOG10):
1959 errno_set = true; builtin_optab = log10_optab; break;
1960 CASE_FLT_FN (BUILT_IN_LOG2):
1961 errno_set = true; builtin_optab = log2_optab; break;
1962 CASE_FLT_FN (BUILT_IN_LOG1P):
1963 errno_set = true; builtin_optab = log1p_optab; break;
1964 CASE_FLT_FN (BUILT_IN_ASIN):
1965 builtin_optab = asin_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ACOS):
1967 builtin_optab = acos_optab; break;
1968 CASE_FLT_FN (BUILT_IN_TAN):
1969 builtin_optab = tan_optab; break;
1970 CASE_FLT_FN (BUILT_IN_ATAN):
1971 builtin_optab = atan_optab; break;
1972 CASE_FLT_FN (BUILT_IN_FLOOR):
1973 builtin_optab = floor_optab; break;
1974 CASE_FLT_FN (BUILT_IN_CEIL):
1975 builtin_optab = ceil_optab; break;
1976 CASE_FLT_FN (BUILT_IN_TRUNC):
1977 builtin_optab = btrunc_optab; break;
1978 CASE_FLT_FN (BUILT_IN_ROUND):
1979 builtin_optab = round_optab; break;
1980 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1981 builtin_optab = nearbyint_optab;
1982 if (flag_trapping_math)
1983 break;
1984 /* Else fallthrough and expand as rint. */
1985 CASE_FLT_FN (BUILT_IN_RINT):
1986 builtin_optab = rint_optab; break;
1987 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1988 builtin_optab = significand_optab; break;
1989 default:
1990 gcc_unreachable ();
1993 /* Make a suitable register to place result in. */
1994 mode = TYPE_MODE (TREE_TYPE (exp));
1996 if (! flag_errno_math || ! HONOR_NANS (mode))
1997 errno_set = false;
1999 /* Before working hard, check whether the instruction is available. */
2000 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2002 target = gen_reg_rtx (mode);
2004 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2005 need to expand the argument again. This way, we will not perform
2006 side-effects more the once. */
2007 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2009 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2011 start_sequence ();
2013 /* Compute into TARGET.
2014 Set TARGET to wherever the result comes back. */
2015 target = expand_unop (mode, builtin_optab, op0, target, 0);
2017 if (target != 0)
2019 if (errno_set)
2020 expand_errno_check (exp, target);
2022 /* Output the entire sequence. */
2023 insns = get_insns ();
2024 end_sequence ();
2025 emit_insn (insns);
2026 return target;
2029 /* If we were unable to expand via the builtin, stop the sequence
2030 (without outputting the insns) and call to the library function
2031 with the stabilized argument list. */
2032 end_sequence ();
2035 before_call = get_last_insn ();
2037 return expand_call (exp, target, target == const0_rtx);
2040 /* Expand a call to the builtin binary math functions (pow and atan2).
2041 Return NULL_RTX if a normal call should be emitted rather than expanding the
2042 function in-line. EXP is the expression that is a call to the builtin
2043 function; if convenient, the result should be placed in TARGET.
2044 SUBTARGET may be used as the target for computing one of EXP's
2045 operands. */
2047 static rtx
2048 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2050 optab builtin_optab;
2051 rtx op0, op1, insns;
2052 int op1_type = REAL_TYPE;
2053 tree fndecl = get_callee_fndecl (exp);
2054 tree arg0, arg1;
2055 enum machine_mode mode;
2056 bool errno_set = true;
2058 switch (DECL_FUNCTION_CODE (fndecl))
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 CASE_FLT_FN (BUILT_IN_LDEXP):
2063 op1_type = INTEGER_TYPE;
2064 default:
2065 break;
2068 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2069 return NULL_RTX;
2071 arg0 = CALL_EXPR_ARG (exp, 0);
2072 arg1 = CALL_EXPR_ARG (exp, 1);
2074 switch (DECL_FUNCTION_CODE (fndecl))
2076 CASE_FLT_FN (BUILT_IN_POW):
2077 builtin_optab = pow_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ATAN2):
2079 builtin_optab = atan2_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SCALB):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 builtin_optab = scalb_optab; break;
2084 CASE_FLT_FN (BUILT_IN_SCALBN):
2085 CASE_FLT_FN (BUILT_IN_SCALBLN):
2086 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2087 return 0;
2088 /* Fall through... */
2089 CASE_FLT_FN (BUILT_IN_LDEXP):
2090 builtin_optab = ldexp_optab; break;
2091 CASE_FLT_FN (BUILT_IN_FMOD):
2092 builtin_optab = fmod_optab; break;
2093 CASE_FLT_FN (BUILT_IN_REMAINDER):
2094 CASE_FLT_FN (BUILT_IN_DREM):
2095 builtin_optab = remainder_optab; break;
2096 default:
2097 gcc_unreachable ();
2100 /* Make a suitable register to place result in. */
2101 mode = TYPE_MODE (TREE_TYPE (exp));
2103 /* Before working hard, check whether the instruction is available. */
2104 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2105 return NULL_RTX;
2107 target = gen_reg_rtx (mode);
2109 if (! flag_errno_math || ! HONOR_NANS (mode))
2110 errno_set = false;
2112 /* Always stabilize the argument list. */
2113 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2114 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2116 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2117 op1 = expand_normal (arg1);
2119 start_sequence ();
2121 /* Compute into TARGET.
2122 Set TARGET to wherever the result comes back. */
2123 target = expand_binop (mode, builtin_optab, op0, op1,
2124 target, 0, OPTAB_DIRECT);
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2129 if (target == 0)
2131 end_sequence ();
2132 return expand_call (exp, target, target == const0_rtx);
2135 if (errno_set)
2136 expand_errno_check (exp, target);
2138 /* Output the entire sequence. */
2139 insns = get_insns ();
2140 end_sequence ();
2141 emit_insn (insns);
2143 return target;
2146 /* Expand a call to the builtin sin and cos math functions.
2147 Return NULL_RTX if a normal call should be emitted rather than expanding the
2148 function in-line. EXP is the expression that is a call to the builtin
2149 function; if convenient, the result should be placed in TARGET.
2150 SUBTARGET may be used as the target for computing one of EXP's
2151 operands. */
2153 static rtx
2154 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2156 optab builtin_optab;
2157 rtx op0, insns;
2158 tree fndecl = get_callee_fndecl (exp);
2159 enum machine_mode mode;
2160 tree arg;
2162 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2163 return NULL_RTX;
2165 arg = CALL_EXPR_ARG (exp, 0);
2167 switch (DECL_FUNCTION_CODE (fndecl))
2169 CASE_FLT_FN (BUILT_IN_SIN):
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 builtin_optab = sincos_optab; break;
2172 default:
2173 gcc_unreachable ();
2176 /* Make a suitable register to place result in. */
2177 mode = TYPE_MODE (TREE_TYPE (exp));
2179 /* Check if sincos insn is available, otherwise fallback
2180 to sin or cos insn. */
2181 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2182 switch (DECL_FUNCTION_CODE (fndecl))
2184 CASE_FLT_FN (BUILT_IN_SIN):
2185 builtin_optab = sin_optab; break;
2186 CASE_FLT_FN (BUILT_IN_COS):
2187 builtin_optab = cos_optab; break;
2188 default:
2189 gcc_unreachable ();
2192 /* Before working hard, check whether the instruction is available. */
2193 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2195 target = gen_reg_rtx (mode);
2197 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2198 need to expand the argument again. This way, we will not perform
2199 side-effects more the once. */
2200 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2202 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2204 start_sequence ();
2206 /* Compute into TARGET.
2207 Set TARGET to wherever the result comes back. */
2208 if (builtin_optab == sincos_optab)
2210 int result;
2212 switch (DECL_FUNCTION_CODE (fndecl))
2214 CASE_FLT_FN (BUILT_IN_SIN):
2215 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2216 break;
2217 CASE_FLT_FN (BUILT_IN_COS):
2218 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2219 break;
2220 default:
2221 gcc_unreachable ();
2223 gcc_assert (result);
2225 else
2227 target = expand_unop (mode, builtin_optab, op0, target, 0);
2230 if (target != 0)
2232 /* Output the entire sequence. */
2233 insns = get_insns ();
2234 end_sequence ();
2235 emit_insn (insns);
2236 return target;
2239 /* If we were unable to expand via the builtin, stop the sequence
2240 (without outputting the insns) and call to the library function
2241 with the stabilized argument list. */
2242 end_sequence ();
2245 target = expand_call (exp, target, target == const0_rtx);
2247 return target;
2250 /* Expand a call to one of the builtin math functions that operate on
2251 floating point argument and output an integer result (ilogb, isinf,
2252 isnan, etc).
2253 Return 0 if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's operands. */
2258 static rtx
2259 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2261 optab builtin_optab = 0;
2262 enum insn_code icode = CODE_FOR_nothing;
2263 rtx op0;
2264 tree fndecl = get_callee_fndecl (exp);
2265 enum machine_mode mode;
2266 bool errno_set = false;
2267 tree arg;
2268 location_t loc = EXPR_LOCATION (exp);
2270 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2271 return NULL_RTX;
2273 arg = CALL_EXPR_ARG (exp, 0);
2275 switch (DECL_FUNCTION_CODE (fndecl))
2277 CASE_FLT_FN (BUILT_IN_ILOGB):
2278 errno_set = true; builtin_optab = ilogb_optab; break;
2279 CASE_FLT_FN (BUILT_IN_ISINF):
2280 builtin_optab = isinf_optab; break;
2281 case BUILT_IN_ISNORMAL:
2282 case BUILT_IN_ISFINITE:
2283 CASE_FLT_FN (BUILT_IN_FINITE):
2284 /* These builtins have no optabs (yet). */
2285 break;
2286 default:
2287 gcc_unreachable ();
2290 /* There's no easy way to detect the case we need to set EDOM. */
2291 if (flag_errno_math && errno_set)
2292 return NULL_RTX;
2294 /* Optab mode depends on the mode of the input argument. */
2295 mode = TYPE_MODE (TREE_TYPE (arg));
2297 if (builtin_optab)
2298 icode = optab_handler (builtin_optab, mode)->insn_code;
2300 /* Before working hard, check whether the instruction is available. */
2301 if (icode != CODE_FOR_nothing)
2303 /* Make a suitable register to place result in. */
2304 if (!target
2305 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2306 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2308 gcc_assert (insn_data[icode].operand[0].predicate
2309 (target, GET_MODE (target)));
2311 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2312 need to expand the argument again. This way, we will not perform
2313 side-effects more the once. */
2314 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2316 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2318 if (mode != GET_MODE (op0))
2319 op0 = convert_to_mode (mode, op0, 0);
2321 /* Compute into TARGET.
2322 Set TARGET to wherever the result comes back. */
2323 emit_unop_insn (icode, target, op0, UNKNOWN);
2324 return target;
2327 /* If there is no optab, try generic code. */
2328 switch (DECL_FUNCTION_CODE (fndecl))
2330 tree result;
2332 CASE_FLT_FN (BUILT_IN_ISINF):
2334 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2335 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2336 tree const type = TREE_TYPE (arg);
2337 REAL_VALUE_TYPE r;
2338 char buf[128];
2340 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2341 real_from_string (&r, buf);
2342 result = build_call_expr (isgr_fn, 2,
2343 fold_build1_loc (loc, ABS_EXPR, type, arg),
2344 build_real (type, r));
2345 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2347 CASE_FLT_FN (BUILT_IN_FINITE):
2348 case BUILT_IN_ISFINITE:
2350 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2351 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2352 tree const type = TREE_TYPE (arg);
2353 REAL_VALUE_TYPE r;
2354 char buf[128];
2356 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2357 real_from_string (&r, buf);
2358 result = build_call_expr (isle_fn, 2,
2359 fold_build1_loc (loc, ABS_EXPR, type, arg),
2360 build_real (type, r));
2361 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2363 case BUILT_IN_ISNORMAL:
2365 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2366 islessequal(fabs(x),DBL_MAX). */
2367 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2368 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2369 tree const type = TREE_TYPE (arg);
2370 REAL_VALUE_TYPE rmax, rmin;
2371 char buf[128];
2373 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2374 real_from_string (&rmax, buf);
2375 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2376 real_from_string (&rmin, buf);
2377 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
2378 result = build_call_expr (isle_fn, 2, arg,
2379 build_real (type, rmax));
2380 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2381 build_call_expr (isge_fn, 2, arg,
2382 build_real (type, rmin)));
2383 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2385 default:
2386 break;
2389 target = expand_call (exp, target, target == const0_rtx);
2391 return target;
2394 /* Expand a call to the builtin sincos math function.
2395 Return NULL_RTX if a normal call should be emitted rather than expanding the
2396 function in-line. EXP is the expression that is a call to the builtin
2397 function. */
2399 static rtx
2400 expand_builtin_sincos (tree exp)
2402 rtx op0, op1, op2, target1, target2;
2403 enum machine_mode mode;
2404 tree arg, sinp, cosp;
2405 int result;
2406 location_t loc = EXPR_LOCATION (exp);
2408 if (!validate_arglist (exp, REAL_TYPE,
2409 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2410 return NULL_RTX;
2412 arg = CALL_EXPR_ARG (exp, 0);
2413 sinp = CALL_EXPR_ARG (exp, 1);
2414 cosp = CALL_EXPR_ARG (exp, 2);
2416 /* Make a suitable register to place result in. */
2417 mode = TYPE_MODE (TREE_TYPE (arg));
2419 /* Check if sincos insn is available, otherwise emit the call. */
2420 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2421 return NULL_RTX;
2423 target1 = gen_reg_rtx (mode);
2424 target2 = gen_reg_rtx (mode);
2426 op0 = expand_normal (arg);
2427 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2428 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2430 /* Compute into target1 and target2.
2431 Set TARGET to wherever the result comes back. */
2432 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2433 gcc_assert (result);
2435 /* Move target1 and target2 to the memory locations indicated
2436 by op1 and op2. */
2437 emit_move_insn (op1, target1);
2438 emit_move_insn (op2, target2);
2440 return const0_rtx;
2443 /* Expand a call to the internal cexpi builtin to the sincos math function.
2444 EXP is the expression that is a call to the builtin function; if convenient,
2445 the result should be placed in TARGET. SUBTARGET may be used as the target
2446 for computing one of EXP's operands. */
2448 static rtx
2449 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2451 tree fndecl = get_callee_fndecl (exp);
2452 tree arg, type;
2453 enum machine_mode mode;
2454 rtx op0, op1, op2;
2455 location_t loc = EXPR_LOCATION (exp);
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 type = TREE_TYPE (arg);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2464 /* Try expanding via a sincos optab, fall back to emitting a libcall
2465 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2466 is only generated from sincos, cexp or if we have either of them. */
2467 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2469 op1 = gen_reg_rtx (mode);
2470 op2 = gen_reg_rtx (mode);
2472 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2474 /* Compute into op1 and op2. */
2475 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2477 else if (TARGET_HAS_SINCOS)
2479 tree call, fn = NULL_TREE;
2480 tree top1, top2;
2481 rtx op1a, op2a;
2483 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2484 fn = built_in_decls[BUILT_IN_SINCOSF];
2485 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2486 fn = built_in_decls[BUILT_IN_SINCOS];
2487 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2488 fn = built_in_decls[BUILT_IN_SINCOSL];
2489 else
2490 gcc_unreachable ();
2492 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2493 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2494 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2495 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2496 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2497 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2499 /* Make sure not to fold the sincos call again. */
2500 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2501 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2502 call, 3, arg, top1, top2));
2504 else
2506 tree call, fn = NULL_TREE, narg;
2507 tree ctype = build_complex_type (type);
2509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2510 fn = built_in_decls[BUILT_IN_CEXPF];
2511 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2512 fn = built_in_decls[BUILT_IN_CEXP];
2513 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2514 fn = built_in_decls[BUILT_IN_CEXPL];
2515 else
2516 gcc_unreachable ();
2518 /* If we don't have a decl for cexp create one. This is the
2519 friendliest fallback if the user calls __builtin_cexpi
2520 without full target C99 function support. */
2521 if (fn == NULL_TREE)
2523 tree fntype;
2524 const char *name = NULL;
2526 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2527 name = "cexpf";
2528 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2529 name = "cexp";
2530 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2531 name = "cexpl";
2533 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2534 fn = build_fn_decl (name, fntype);
2537 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2538 build_real (type, dconst0), arg);
2540 /* Make sure not to fold the cexp call again. */
2541 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2542 return expand_expr (build_call_nary (ctype, call, 1, narg),
2543 target, VOIDmode, EXPAND_NORMAL);
2546 /* Now build the proper return type. */
2547 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2548 make_tree (TREE_TYPE (arg), op2),
2549 make_tree (TREE_TYPE (arg), op1)),
2550 target, VOIDmode, EXPAND_NORMAL);
2553 /* Expand a call to one of the builtin rounding functions gcc defines
2554 as an extension (lfloor and lceil). As these are gcc extensions we
2555 do not need to worry about setting errno to EDOM.
2556 If expanding via optab fails, lower expression to (int)(floor(x)).
2557 EXP is the expression that is a call to the builtin function;
2558 if convenient, the result should be placed in TARGET. */
2560 static rtx
2561 expand_builtin_int_roundingfn (tree exp, rtx target)
2563 convert_optab builtin_optab;
2564 rtx op0, insns, tmp;
2565 tree fndecl = get_callee_fndecl (exp);
2566 enum built_in_function fallback_fn;
2567 tree fallback_fndecl;
2568 enum machine_mode mode;
2569 tree arg;
2571 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2572 gcc_unreachable ();
2574 arg = CALL_EXPR_ARG (exp, 0);
2576 switch (DECL_FUNCTION_CODE (fndecl))
2578 CASE_FLT_FN (BUILT_IN_LCEIL):
2579 CASE_FLT_FN (BUILT_IN_LLCEIL):
2580 builtin_optab = lceil_optab;
2581 fallback_fn = BUILT_IN_CEIL;
2582 break;
2584 CASE_FLT_FN (BUILT_IN_LFLOOR):
2585 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2586 builtin_optab = lfloor_optab;
2587 fallback_fn = BUILT_IN_FLOOR;
2588 break;
2590 default:
2591 gcc_unreachable ();
2594 /* Make a suitable register to place result in. */
2595 mode = TYPE_MODE (TREE_TYPE (exp));
2597 target = gen_reg_rtx (mode);
2599 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2600 need to expand the argument again. This way, we will not perform
2601 side-effects more the once. */
2602 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2604 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2606 start_sequence ();
2608 /* Compute into TARGET. */
2609 if (expand_sfix_optab (target, op0, builtin_optab))
2611 /* Output the entire sequence. */
2612 insns = get_insns ();
2613 end_sequence ();
2614 emit_insn (insns);
2615 return target;
2618 /* If we were unable to expand via the builtin, stop the sequence
2619 (without outputting the insns). */
2620 end_sequence ();
2622 /* Fall back to floating point rounding optab. */
2623 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2625 /* For non-C99 targets we may end up without a fallback fndecl here
2626 if the user called __builtin_lfloor directly. In this case emit
2627 a call to the floor/ceil variants nevertheless. This should result
2628 in the best user experience for not full C99 targets. */
2629 if (fallback_fndecl == NULL_TREE)
2631 tree fntype;
2632 const char *name = NULL;
2634 switch (DECL_FUNCTION_CODE (fndecl))
2636 case BUILT_IN_LCEIL:
2637 case BUILT_IN_LLCEIL:
2638 name = "ceil";
2639 break;
2640 case BUILT_IN_LCEILF:
2641 case BUILT_IN_LLCEILF:
2642 name = "ceilf";
2643 break;
2644 case BUILT_IN_LCEILL:
2645 case BUILT_IN_LLCEILL:
2646 name = "ceill";
2647 break;
2648 case BUILT_IN_LFLOOR:
2649 case BUILT_IN_LLFLOOR:
2650 name = "floor";
2651 break;
2652 case BUILT_IN_LFLOORF:
2653 case BUILT_IN_LLFLOORF:
2654 name = "floorf";
2655 break;
2656 case BUILT_IN_LFLOORL:
2657 case BUILT_IN_LLFLOORL:
2658 name = "floorl";
2659 break;
2660 default:
2661 gcc_unreachable ();
2664 fntype = build_function_type_list (TREE_TYPE (arg),
2665 TREE_TYPE (arg), NULL_TREE);
2666 fallback_fndecl = build_fn_decl (name, fntype);
2669 exp = build_call_expr (fallback_fndecl, 1, arg);
2671 tmp = expand_normal (exp);
2673 /* Truncate the result of floating point optab to integer
2674 via expand_fix (). */
2675 target = gen_reg_rtx (mode);
2676 expand_fix (target, tmp, 0);
2678 return target;
2681 /* Expand a call to one of the builtin math functions doing integer
2682 conversion (lrint).
2683 Return 0 if a normal call should be emitted rather than expanding the
2684 function in-line. EXP is the expression that is a call to the builtin
2685 function; if convenient, the result should be placed in TARGET. */
2687 static rtx
2688 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2690 convert_optab builtin_optab;
2691 rtx op0, insns;
2692 tree fndecl = get_callee_fndecl (exp);
2693 tree arg;
2694 enum machine_mode mode;
2696 /* There's no easy way to detect the case we need to set EDOM. */
2697 if (flag_errno_math)
2698 return NULL_RTX;
2700 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2701 gcc_unreachable ();
2703 arg = CALL_EXPR_ARG (exp, 0);
2705 switch (DECL_FUNCTION_CODE (fndecl))
2707 CASE_FLT_FN (BUILT_IN_LRINT):
2708 CASE_FLT_FN (BUILT_IN_LLRINT):
2709 builtin_optab = lrint_optab; break;
2710 CASE_FLT_FN (BUILT_IN_LROUND):
2711 CASE_FLT_FN (BUILT_IN_LLROUND):
2712 builtin_optab = lround_optab; break;
2713 default:
2714 gcc_unreachable ();
2717 /* Make a suitable register to place result in. */
2718 mode = TYPE_MODE (TREE_TYPE (exp));
2720 target = gen_reg_rtx (mode);
2722 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2723 need to expand the argument again. This way, we will not perform
2724 side-effects more the once. */
2725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2727 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2729 start_sequence ();
2731 if (expand_sfix_optab (target, op0, builtin_optab))
2733 /* Output the entire sequence. */
2734 insns = get_insns ();
2735 end_sequence ();
2736 emit_insn (insns);
2737 return target;
2740 /* If we were unable to expand via the builtin, stop the sequence
2741 (without outputting the insns) and call to the library function
2742 with the stabilized argument list. */
2743 end_sequence ();
2745 target = expand_call (exp, target, target == const0_rtx);
2747 return target;
2750 /* To evaluate powi(x,n), the floating point value x raised to the
2751 constant integer exponent n, we use a hybrid algorithm that
2752 combines the "window method" with look-up tables. For an
2753 introduction to exponentiation algorithms and "addition chains",
2754 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2755 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2756 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2757 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2759 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2760 multiplications to inline before calling the system library's pow
2761 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2762 so this default never requires calling pow, powf or powl. */
2764 #ifndef POWI_MAX_MULTS
2765 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2766 #endif
2768 /* The size of the "optimal power tree" lookup table. All
2769 exponents less than this value are simply looked up in the
2770 powi_table below. This threshold is also used to size the
2771 cache of pseudo registers that hold intermediate results. */
2772 #define POWI_TABLE_SIZE 256
2774 /* The size, in bits of the window, used in the "window method"
2775 exponentiation algorithm. This is equivalent to a radix of
2776 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2777 #define POWI_WINDOW_SIZE 3
2779 /* The following table is an efficient representation of an
2780 "optimal power tree". For each value, i, the corresponding
2781 value, j, in the table states than an optimal evaluation
2782 sequence for calculating pow(x,i) can be found by evaluating
2783 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2784 100 integers is given in Knuth's "Seminumerical algorithms". */
2786 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2788 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2789 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2790 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2791 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2792 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2793 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2794 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2795 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2796 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2797 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2798 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2799 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2800 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2801 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2802 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2803 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2804 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2805 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2806 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2807 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2808 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2809 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2810 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2811 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2812 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2813 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2814 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2815 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2816 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2817 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2818 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2819 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2823 /* Return the number of multiplications required to calculate
2824 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2825 subroutine of powi_cost. CACHE is an array indicating
2826 which exponents have already been calculated. */
2828 static int
2829 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2831 /* If we've already calculated this exponent, then this evaluation
2832 doesn't require any additional multiplications. */
2833 if (cache[n])
2834 return 0;
2836 cache[n] = true;
2837 return powi_lookup_cost (n - powi_table[n], cache)
2838 + powi_lookup_cost (powi_table[n], cache) + 1;
2841 /* Return the number of multiplications required to calculate
2842 powi(x,n) for an arbitrary x, given the exponent N. This
2843 function needs to be kept in sync with expand_powi below. */
2845 static int
2846 powi_cost (HOST_WIDE_INT n)
2848 bool cache[POWI_TABLE_SIZE];
2849 unsigned HOST_WIDE_INT digit;
2850 unsigned HOST_WIDE_INT val;
2851 int result;
2853 if (n == 0)
2854 return 0;
2856 /* Ignore the reciprocal when calculating the cost. */
2857 val = (n < 0) ? -n : n;
2859 /* Initialize the exponent cache. */
2860 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2861 cache[1] = true;
2863 result = 0;
2865 while (val >= POWI_TABLE_SIZE)
2867 if (val & 1)
2869 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2870 result += powi_lookup_cost (digit, cache)
2871 + POWI_WINDOW_SIZE + 1;
2872 val >>= POWI_WINDOW_SIZE;
2874 else
2876 val >>= 1;
2877 result++;
2881 return result + powi_lookup_cost (val, cache);
2884 /* Recursive subroutine of expand_powi. This function takes the array,
2885 CACHE, of already calculated exponents and an exponent N and returns
2886 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2888 static rtx
2889 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2891 unsigned HOST_WIDE_INT digit;
2892 rtx target, result;
2893 rtx op0, op1;
2895 if (n < POWI_TABLE_SIZE)
2897 if (cache[n])
2898 return cache[n];
2900 target = gen_reg_rtx (mode);
2901 cache[n] = target;
2903 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2904 op1 = expand_powi_1 (mode, powi_table[n], cache);
2906 else if (n & 1)
2908 target = gen_reg_rtx (mode);
2909 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2910 op0 = expand_powi_1 (mode, n - digit, cache);
2911 op1 = expand_powi_1 (mode, digit, cache);
2913 else
2915 target = gen_reg_rtx (mode);
2916 op0 = expand_powi_1 (mode, n >> 1, cache);
2917 op1 = op0;
2920 result = expand_mult (mode, op0, op1, target, 0);
2921 if (result != target)
2922 emit_move_insn (target, result);
2923 return target;
2926 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2927 floating point operand in mode MODE, and N is the exponent. This
2928 function needs to be kept in sync with powi_cost above. */
2930 static rtx
2931 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2933 unsigned HOST_WIDE_INT val;
2934 rtx cache[POWI_TABLE_SIZE];
2935 rtx result;
2937 if (n == 0)
2938 return CONST1_RTX (mode);
2940 val = (n < 0) ? -n : n;
2942 memset (cache, 0, sizeof (cache));
2943 cache[1] = x;
2945 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2947 /* If the original exponent was negative, reciprocate the result. */
2948 if (n < 0)
2949 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2950 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2952 return result;
2955 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2956 a normal call should be emitted rather than expanding the function
2957 in-line. EXP is the expression that is a call to the builtin
2958 function; if convenient, the result should be placed in TARGET. */
2960 static rtx
2961 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2963 tree arg0, arg1;
2964 tree fn, narg0;
2965 tree type = TREE_TYPE (exp);
2966 REAL_VALUE_TYPE cint, c, c2;
2967 HOST_WIDE_INT n;
2968 rtx op, op2;
2969 enum machine_mode mode = TYPE_MODE (type);
2971 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2972 return NULL_RTX;
2974 arg0 = CALL_EXPR_ARG (exp, 0);
2975 arg1 = CALL_EXPR_ARG (exp, 1);
2977 if (TREE_CODE (arg1) != REAL_CST
2978 || TREE_OVERFLOW (arg1))
2979 return expand_builtin_mathfn_2 (exp, target, subtarget);
2981 /* Handle constant exponents. */
2983 /* For integer valued exponents we can expand to an optimal multiplication
2984 sequence using expand_powi. */
2985 c = TREE_REAL_CST (arg1);
2986 n = real_to_integer (&c);
2987 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2988 if (real_identical (&c, &cint)
2989 && ((n >= -1 && n <= 2)
2990 || (flag_unsafe_math_optimizations
2991 && optimize_insn_for_speed_p ()
2992 && powi_cost (n) <= POWI_MAX_MULTS)))
2994 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2995 if (n != 1)
2997 op = force_reg (mode, op);
2998 op = expand_powi (op, mode, n);
3000 return op;
3003 narg0 = builtin_save_expr (arg0);
3005 /* If the exponent is not integer valued, check if it is half of an integer.
3006 In this case we can expand to sqrt (x) * x**(n/2). */
3007 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3008 if (fn != NULL_TREE)
3010 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3011 n = real_to_integer (&c2);
3012 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3013 if (real_identical (&c2, &cint)
3014 && ((flag_unsafe_math_optimizations
3015 && optimize_insn_for_speed_p ()
3016 && powi_cost (n/2) <= POWI_MAX_MULTS)
3017 || n == 1))
3019 tree call_expr = build_call_expr (fn, 1, narg0);
3020 /* Use expand_expr in case the newly built call expression
3021 was folded to a non-call. */
3022 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3023 if (n != 1)
3025 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3026 op2 = force_reg (mode, op2);
3027 op2 = expand_powi (op2, mode, abs (n / 2));
3028 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3029 0, OPTAB_LIB_WIDEN);
3030 /* If the original exponent was negative, reciprocate the
3031 result. */
3032 if (n < 0)
3033 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3034 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3036 return op;
3040 /* Try if the exponent is a third of an integer. In this case
3041 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3042 different from pow (x, 1./3.) due to rounding and behavior
3043 with negative x we need to constrain this transformation to
3044 unsafe math and positive x or finite math. */
3045 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3046 if (fn != NULL_TREE
3047 && flag_unsafe_math_optimizations
3048 && (tree_expr_nonnegative_p (arg0)
3049 || !HONOR_NANS (mode)))
3051 REAL_VALUE_TYPE dconst3;
3052 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3053 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3054 real_round (&c2, mode, &c2);
3055 n = real_to_integer (&c2);
3056 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3057 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3058 real_convert (&c2, mode, &c2);
3059 if (real_identical (&c2, &c)
3060 && ((optimize_insn_for_speed_p ()
3061 && powi_cost (n/3) <= POWI_MAX_MULTS)
3062 || n == 1))
3064 tree call_expr = build_call_expr (fn, 1,narg0);
3065 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3066 if (abs (n) % 3 == 2)
3067 op = expand_simple_binop (mode, MULT, op, op, op,
3068 0, OPTAB_LIB_WIDEN);
3069 if (n != 1)
3071 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3072 op2 = force_reg (mode, op2);
3073 op2 = expand_powi (op2, mode, abs (n / 3));
3074 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3075 0, OPTAB_LIB_WIDEN);
3076 /* If the original exponent was negative, reciprocate the
3077 result. */
3078 if (n < 0)
3079 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3080 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3082 return op;
3086 /* Fall back to optab expansion. */
3087 return expand_builtin_mathfn_2 (exp, target, subtarget);
3090 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3091 a normal call should be emitted rather than expanding the function
3092 in-line. EXP is the expression that is a call to the builtin
3093 function; if convenient, the result should be placed in TARGET. */
3095 static rtx
3096 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3098 tree arg0, arg1;
3099 rtx op0, op1;
3100 enum machine_mode mode;
3101 enum machine_mode mode2;
3103 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3104 return NULL_RTX;
3106 arg0 = CALL_EXPR_ARG (exp, 0);
3107 arg1 = CALL_EXPR_ARG (exp, 1);
3108 mode = TYPE_MODE (TREE_TYPE (exp));
3110 /* Handle constant power. */
3112 if (TREE_CODE (arg1) == INTEGER_CST
3113 && !TREE_OVERFLOW (arg1))
3115 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3117 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3118 Otherwise, check the number of multiplications required. */
3119 if ((TREE_INT_CST_HIGH (arg1) == 0
3120 || TREE_INT_CST_HIGH (arg1) == -1)
3121 && ((n >= -1 && n <= 2)
3122 || (optimize_insn_for_speed_p ()
3123 && powi_cost (n) <= POWI_MAX_MULTS)))
3125 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3126 op0 = force_reg (mode, op0);
3127 return expand_powi (op0, mode, n);
3131 /* Emit a libcall to libgcc. */
3133 /* Mode of the 2nd argument must match that of an int. */
3134 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3136 if (target == NULL_RTX)
3137 target = gen_reg_rtx (mode);
3139 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3140 if (GET_MODE (op0) != mode)
3141 op0 = convert_to_mode (mode, op0, 0);
3142 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3143 if (GET_MODE (op1) != mode2)
3144 op1 = convert_to_mode (mode2, op1, 0);
3146 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3147 target, LCT_CONST, mode, 2,
3148 op0, mode, op1, mode2);
3150 return target;
3153 /* Expand expression EXP which is a call to the strlen builtin. Return
3154 NULL_RTX if we failed the caller should emit a normal call, otherwise
3155 try to get the result in TARGET, if convenient. */
3157 static rtx
3158 expand_builtin_strlen (tree exp, rtx target,
3159 enum machine_mode target_mode)
3161 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3162 return NULL_RTX;
3163 else
3165 rtx pat;
3166 tree len;
3167 tree src = CALL_EXPR_ARG (exp, 0);
3168 rtx result, src_reg, char_rtx, before_strlen;
3169 enum machine_mode insn_mode = target_mode, char_mode;
3170 enum insn_code icode = CODE_FOR_nothing;
3171 int align;
3173 /* If the length can be computed at compile-time, return it. */
3174 len = c_strlen (src, 0);
3175 if (len)
3176 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3178 /* If the length can be computed at compile-time and is constant
3179 integer, but there are side-effects in src, evaluate
3180 src for side-effects, then return len.
3181 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3182 can be optimized into: i++; x = 3; */
3183 len = c_strlen (src, 1);
3184 if (len && TREE_CODE (len) == INTEGER_CST)
3186 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3187 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3190 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3192 /* If SRC is not a pointer type, don't do this operation inline. */
3193 if (align == 0)
3194 return NULL_RTX;
3196 /* Bail out if we can't compute strlen in the right mode. */
3197 while (insn_mode != VOIDmode)
3199 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3200 if (icode != CODE_FOR_nothing)
3201 break;
3203 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3205 if (insn_mode == VOIDmode)
3206 return NULL_RTX;
3208 /* Make a place to write the result of the instruction. */
3209 result = target;
3210 if (! (result != 0
3211 && REG_P (result)
3212 && GET_MODE (result) == insn_mode
3213 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3214 result = gen_reg_rtx (insn_mode);
3216 /* Make a place to hold the source address. We will not expand
3217 the actual source until we are sure that the expansion will
3218 not fail -- there are trees that cannot be expanded twice. */
3219 src_reg = gen_reg_rtx (Pmode);
3221 /* Mark the beginning of the strlen sequence so we can emit the
3222 source operand later. */
3223 before_strlen = get_last_insn ();
3225 char_rtx = const0_rtx;
3226 char_mode = insn_data[(int) icode].operand[2].mode;
3227 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3228 char_mode))
3229 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3231 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3232 char_rtx, GEN_INT (align));
3233 if (! pat)
3234 return NULL_RTX;
3235 emit_insn (pat);
3237 /* Now that we are assured of success, expand the source. */
3238 start_sequence ();
3239 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3240 if (pat != src_reg)
3241 emit_move_insn (src_reg, pat);
3242 pat = get_insns ();
3243 end_sequence ();
3245 if (before_strlen)
3246 emit_insn_after (pat, before_strlen);
3247 else
3248 emit_insn_before (pat, get_insns ());
3250 /* Return the value in the proper mode for this function. */
3251 if (GET_MODE (result) == target_mode)
3252 target = result;
3253 else if (target != 0)
3254 convert_move (target, result, 0);
3255 else
3256 target = convert_to_mode (target_mode, result, 0);
3258 return target;
3262 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3263 caller should emit a normal call, otherwise try to get the result
3264 in TARGET, if convenient (and in mode MODE if that's convenient). */
3266 static rtx
3267 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3269 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3271 tree type = TREE_TYPE (exp);
3272 tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
3273 CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3275 if (result)
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3278 return NULL_RTX;
3281 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3282 caller should emit a normal call, otherwise try to get the result
3283 in TARGET, if convenient (and in mode MODE if that's convenient). */
3285 static rtx
3286 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3288 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3290 tree type = TREE_TYPE (exp);
3291 tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
3292 CALL_EXPR_ARG (exp, 0),
3293 CALL_EXPR_ARG (exp, 1), type);
3294 if (result)
3295 return expand_expr (result, target, mode, EXPAND_NORMAL);
3297 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3299 return NULL_RTX;
3302 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3303 caller should emit a normal call, otherwise try to get the result
3304 in TARGET, if convenient (and in mode MODE if that's convenient). */
3306 static rtx
3307 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3309 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3311 tree type = TREE_TYPE (exp);
3312 tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
3313 CALL_EXPR_ARG (exp, 0),
3314 CALL_EXPR_ARG (exp, 1), type);
3315 if (result)
3316 return expand_expr (result, target, mode, EXPAND_NORMAL);
3318 return NULL_RTX;
3321 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3322 caller should emit a normal call, otherwise try to get the result
3323 in TARGET, if convenient (and in mode MODE if that's convenient). */
3325 static rtx
3326 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3328 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3330 tree type = TREE_TYPE (exp);
3331 tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
3332 CALL_EXPR_ARG (exp, 0),
3333 CALL_EXPR_ARG (exp, 1), type);
3334 if (result)
3335 return expand_expr (result, target, mode, EXPAND_NORMAL);
3337 return NULL_RTX;
3340 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3341 bytes from constant string DATA + OFFSET and return it as target
3342 constant. */
3344 static rtx
3345 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3346 enum machine_mode mode)
3348 const char *str = (const char *) data;
3350 gcc_assert (offset >= 0
3351 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3352 <= strlen (str) + 1));
3354 return c_readstr (str + offset, mode);
3357 /* Expand a call EXP to the memcpy builtin.
3358 Return NULL_RTX if we failed, the caller should emit a normal call,
3359 otherwise try to get the result in TARGET, if convenient (and in
3360 mode MODE if that's convenient). */
3362 static rtx
3363 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3365 tree fndecl = get_callee_fndecl (exp);
3367 if (!validate_arglist (exp,
3368 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3369 return NULL_RTX;
3370 else
3372 tree dest = CALL_EXPR_ARG (exp, 0);
3373 tree src = CALL_EXPR_ARG (exp, 1);
3374 tree len = CALL_EXPR_ARG (exp, 2);
3375 const char *src_str;
3376 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3377 unsigned int dest_align
3378 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3379 rtx dest_mem, src_mem, dest_addr, len_rtx;
3380 tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
3381 dest, src, len,
3382 TREE_TYPE (TREE_TYPE (fndecl)),
3383 false, /*endp=*/0);
3384 HOST_WIDE_INT expected_size = -1;
3385 unsigned int expected_align = 0;
3386 tree_ann_common_t ann;
3388 if (result)
3390 while (TREE_CODE (result) == COMPOUND_EXPR)
3392 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3393 EXPAND_NORMAL);
3394 result = TREE_OPERAND (result, 1);
3396 return expand_expr (result, target, mode, EXPAND_NORMAL);
3399 /* If DEST is not a pointer type, call the normal function. */
3400 if (dest_align == 0)
3401 return NULL_RTX;
3403 /* If either SRC is not a pointer type, don't do this
3404 operation in-line. */
3405 if (src_align == 0)
3406 return NULL_RTX;
3408 ann = tree_common_ann (exp);
3409 if (ann)
3410 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3412 if (expected_align < dest_align)
3413 expected_align = dest_align;
3414 dest_mem = get_memory_rtx (dest, len);
3415 set_mem_align (dest_mem, dest_align);
3416 len_rtx = expand_normal (len);
3417 src_str = c_getstr (src);
3419 /* If SRC is a string constant and block move would be done
3420 by pieces, we can avoid loading the string from memory
3421 and only stored the computed constants. */
3422 if (src_str
3423 && CONST_INT_P (len_rtx)
3424 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3425 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3426 CONST_CAST (char *, src_str),
3427 dest_align, false))
3429 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3430 builtin_memcpy_read_str,
3431 CONST_CAST (char *, src_str),
3432 dest_align, false, 0);
3433 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3434 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3435 return dest_mem;
3438 src_mem = get_memory_rtx (src, len);
3439 set_mem_align (src_mem, src_align);
3441 /* Copy word part most expediently. */
3442 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3443 CALL_EXPR_TAILCALL (exp)
3444 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3445 expected_align, expected_size);
3447 if (dest_addr == 0)
3449 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3450 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3452 return dest_addr;
3456 /* Expand a call EXP to the mempcpy builtin.
3457 Return NULL_RTX if we failed; the caller should emit a normal call,
3458 otherwise try to get the result in TARGET, if convenient (and in
3459 mode MODE if that's convenient). If ENDP is 0 return the
3460 destination pointer, if ENDP is 1 return the end pointer ala
3461 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3462 stpcpy. */
3464 static rtx
3465 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3467 if (!validate_arglist (exp,
3468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3469 return NULL_RTX;
3470 else
3472 tree dest = CALL_EXPR_ARG (exp, 0);
3473 tree src = CALL_EXPR_ARG (exp, 1);
3474 tree len = CALL_EXPR_ARG (exp, 2);
3475 return expand_builtin_mempcpy_args (dest, src, len,
3476 TREE_TYPE (exp),
3477 target, mode, /*endp=*/ 1);
3481 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3482 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3483 so that this can also be called without constructing an actual CALL_EXPR.
3484 TYPE is the return type of the call. The other arguments and return value
3485 are the same as for expand_builtin_mempcpy. */
3487 static rtx
3488 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3489 rtx target, enum machine_mode mode, int endp)
3491 /* If return value is ignored, transform mempcpy into memcpy. */
3492 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3494 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3495 tree result = build_call_expr (fn, 3, dest, src, len);
3497 while (TREE_CODE (result) == COMPOUND_EXPR)
3499 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3500 EXPAND_NORMAL);
3501 result = TREE_OPERAND (result, 1);
3503 return expand_expr (result, target, mode, EXPAND_NORMAL);
3505 else
3507 const char *src_str;
3508 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3509 unsigned int dest_align
3510 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3511 rtx dest_mem, src_mem, len_rtx;
3512 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3513 dest, src, len, type, false, endp);
3515 if (result)
3517 while (TREE_CODE (result) == COMPOUND_EXPR)
3519 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3520 EXPAND_NORMAL);
3521 result = TREE_OPERAND (result, 1);
3523 return expand_expr (result, target, mode, EXPAND_NORMAL);
3526 /* If either SRC or DEST is not a pointer type, don't do this
3527 operation in-line. */
3528 if (dest_align == 0 || src_align == 0)
3529 return NULL_RTX;
3531 /* If LEN is not constant, call the normal function. */
3532 if (! host_integerp (len, 1))
3533 return NULL_RTX;
3535 len_rtx = expand_normal (len);
3536 src_str = c_getstr (src);
3538 /* If SRC is a string constant and block move would be done
3539 by pieces, we can avoid loading the string from memory
3540 and only stored the computed constants. */
3541 if (src_str
3542 && CONST_INT_P (len_rtx)
3543 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3544 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3545 CONST_CAST (char *, src_str),
3546 dest_align, false))
3548 dest_mem = get_memory_rtx (dest, len);
3549 set_mem_align (dest_mem, dest_align);
3550 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3551 builtin_memcpy_read_str,
3552 CONST_CAST (char *, src_str),
3553 dest_align, false, endp);
3554 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3555 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3556 return dest_mem;
3559 if (CONST_INT_P (len_rtx)
3560 && can_move_by_pieces (INTVAL (len_rtx),
3561 MIN (dest_align, src_align)))
3563 dest_mem = get_memory_rtx (dest, len);
3564 set_mem_align (dest_mem, dest_align);
3565 src_mem = get_memory_rtx (src, len);
3566 set_mem_align (src_mem, src_align);
3567 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3568 MIN (dest_align, src_align), endp);
3569 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3570 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3571 return dest_mem;
3574 return NULL_RTX;
3578 /* Expand expression EXP, which is a call to the memmove builtin. Return
3579 NULL_RTX if we failed; the caller should emit a normal call. */
3581 static rtx
3582 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3584 if (!validate_arglist (exp,
3585 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3586 return NULL_RTX;
3587 else
3589 tree dest = CALL_EXPR_ARG (exp, 0);
3590 tree src = CALL_EXPR_ARG (exp, 1);
3591 tree len = CALL_EXPR_ARG (exp, 2);
3592 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3593 target, mode, ignore);
3597 /* Helper function to do the actual work for expand_builtin_memmove. The
3598 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3599 so that this can also be called without constructing an actual CALL_EXPR.
3600 TYPE is the return type of the call. The other arguments and return value
3601 are the same as for expand_builtin_memmove. */
3603 static rtx
3604 expand_builtin_memmove_args (tree dest, tree src, tree len,
3605 tree type, rtx target, enum machine_mode mode,
3606 int ignore)
3608 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3609 dest, src, len, type, ignore, /*endp=*/3);
3611 if (result)
3613 STRIP_TYPE_NOPS (result);
3614 while (TREE_CODE (result) == COMPOUND_EXPR)
3616 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3617 EXPAND_NORMAL);
3618 result = TREE_OPERAND (result, 1);
3620 return expand_expr (result, target, mode, EXPAND_NORMAL);
3623 /* Otherwise, call the normal function. */
3624 return NULL_RTX;
3627 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3628 NULL_RTX if we failed the caller should emit a normal call. */
3630 static rtx
3631 expand_builtin_bcopy (tree exp, int ignore)
3633 tree type = TREE_TYPE (exp);
3634 tree src, dest, size;
3635 location_t loc = EXPR_LOCATION (exp);
3637 if (!validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3641 src = CALL_EXPR_ARG (exp, 0);
3642 dest = CALL_EXPR_ARG (exp, 1);
3643 size = CALL_EXPR_ARG (exp, 2);
3645 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3646 This is done this way so that if it isn't expanded inline, we fall
3647 back to calling bcopy instead of memmove. */
3648 return expand_builtin_memmove_args (dest, src,
3649 fold_convert_loc (loc, sizetype, size),
3650 type, const0_rtx, VOIDmode,
3651 ignore);
3654 #ifndef HAVE_movstr
3655 # define HAVE_movstr 0
3656 # define CODE_FOR_movstr CODE_FOR_nothing
3657 #endif
3659 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3660 we failed, the caller should emit a normal call, otherwise try to
3661 get the result in TARGET, if convenient. If ENDP is 0 return the
3662 destination pointer, if ENDP is 1 return the end pointer ala
3663 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3664 stpcpy. */
3666 static rtx
3667 expand_movstr (tree dest, tree src, rtx target, int endp)
3669 rtx end;
3670 rtx dest_mem;
3671 rtx src_mem;
3672 rtx insn;
3673 const struct insn_data * data;
3675 if (!HAVE_movstr)
3676 return NULL_RTX;
3678 dest_mem = get_memory_rtx (dest, NULL);
3679 src_mem = get_memory_rtx (src, NULL);
3680 if (!endp)
3682 target = force_reg (Pmode, XEXP (dest_mem, 0));
3683 dest_mem = replace_equiv_address (dest_mem, target);
3684 end = gen_reg_rtx (Pmode);
3686 else
3688 if (target == 0 || target == const0_rtx)
3690 end = gen_reg_rtx (Pmode);
3691 if (target == 0)
3692 target = end;
3694 else
3695 end = target;
3698 data = insn_data + CODE_FOR_movstr;
3700 if (data->operand[0].mode != VOIDmode)
3701 end = gen_lowpart (data->operand[0].mode, end);
3703 insn = data->genfun (end, dest_mem, src_mem);
3705 gcc_assert (insn);
3707 emit_insn (insn);
3709 /* movstr is supposed to set end to the address of the NUL
3710 terminator. If the caller requested a mempcpy-like return value,
3711 adjust it. */
3712 if (endp == 1 && target != const0_rtx)
3714 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3715 emit_move_insn (target, force_operand (tem, NULL_RTX));
3718 return target;
3721 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3722 NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3726 static rtx
3727 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3729 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3731 tree dest = CALL_EXPR_ARG (exp, 0);
3732 tree src = CALL_EXPR_ARG (exp, 1);
3733 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3735 return NULL_RTX;
3738 /* Helper function to do the actual work for expand_builtin_strcpy. The
3739 arguments to the builtin_strcpy call DEST and SRC are broken out
3740 so that this can also be called without constructing an actual CALL_EXPR.
3741 The other arguments and return value are the same as for
3742 expand_builtin_strcpy. */
3744 static rtx
3745 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3746 rtx target, enum machine_mode mode)
3748 tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
3749 fndecl, dest, src, 0);
3750 if (result)
3751 return expand_expr (result, target, mode, EXPAND_NORMAL);
3752 return expand_movstr (dest, src, target, /*endp=*/0);
3756 /* Expand a call EXP to the stpcpy builtin.
3757 Return NULL_RTX if we failed the caller should emit a normal call,
3758 otherwise try to get the result in TARGET, if convenient (and in
3759 mode MODE if that's convenient). */
3761 static rtx
3762 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3764 tree dst, src;
3765 location_t loc = EXPR_LOCATION (exp);
3767 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3768 return NULL_RTX;
3770 dst = CALL_EXPR_ARG (exp, 0);
3771 src = CALL_EXPR_ARG (exp, 1);
3773 /* If return value is ignored, transform stpcpy into strcpy. */
3774 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3776 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3777 tree result = build_call_expr (fn, 2, dst, src);
3779 STRIP_NOPS (result);
3780 while (TREE_CODE (result) == COMPOUND_EXPR)
3782 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3783 EXPAND_NORMAL);
3784 result = TREE_OPERAND (result, 1);
3786 return expand_expr (result, target, mode, EXPAND_NORMAL);
3788 else
3790 tree len, lenp1;
3791 rtx ret;
3793 /* Ensure we get an actual string whose length can be evaluated at
3794 compile-time, not an expression containing a string. This is
3795 because the latter will potentially produce pessimized code
3796 when used to produce the return value. */
3797 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3798 return expand_movstr (dst, src, target, /*endp=*/2);
3800 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3801 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3802 target, mode, /*endp=*/2);
3804 if (ret)
3805 return ret;
3807 if (TREE_CODE (len) == INTEGER_CST)
3809 rtx len_rtx = expand_normal (len);
3811 if (CONST_INT_P (len_rtx))
3813 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3814 dst, src, target, mode);
3816 if (ret)
3818 if (! target)
3820 if (mode != VOIDmode)
3821 target = gen_reg_rtx (mode);
3822 else
3823 target = gen_reg_rtx (GET_MODE (ret));
3825 if (GET_MODE (target) != GET_MODE (ret))
3826 ret = gen_lowpart (GET_MODE (target), ret);
3828 ret = plus_constant (ret, INTVAL (len_rtx));
3829 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3830 gcc_assert (ret);
3832 return target;
3837 return expand_movstr (dst, src, target, /*endp=*/2);
3841 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3842 bytes from constant string DATA + OFFSET and return it as target
3843 constant. */
3846 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3847 enum machine_mode mode)
3849 const char *str = (const char *) data;
3851 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3852 return const0_rtx;
3854 return c_readstr (str + offset, mode);
3857 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call. */
3860 static rtx
3861 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3863 tree fndecl = get_callee_fndecl (exp);
3864 location_t loc = EXPR_LOCATION (exp);
3866 if (validate_arglist (exp,
3867 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3869 tree dest = CALL_EXPR_ARG (exp, 0);
3870 tree src = CALL_EXPR_ARG (exp, 1);
3871 tree len = CALL_EXPR_ARG (exp, 2);
3872 tree slen = c_strlen (src, 1);
3873 tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
3874 fndecl, dest, src, len, slen);
3876 if (result)
3878 while (TREE_CODE (result) == COMPOUND_EXPR)
3880 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3881 EXPAND_NORMAL);
3882 result = TREE_OPERAND (result, 1);
3884 return expand_expr (result, target, mode, EXPAND_NORMAL);
3887 /* We must be passed a constant len and src parameter. */
3888 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3889 return NULL_RTX;
3891 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3893 /* We're required to pad with trailing zeros if the requested
3894 len is greater than strlen(s2)+1. In that case try to
3895 use store_by_pieces, if it fails, punt. */
3896 if (tree_int_cst_lt (slen, len))
3898 unsigned int dest_align
3899 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3900 const char *p = c_getstr (src);
3901 rtx dest_mem;
3903 if (!p || dest_align == 0 || !host_integerp (len, 1)
3904 || !can_store_by_pieces (tree_low_cst (len, 1),
3905 builtin_strncpy_read_str,
3906 CONST_CAST (char *, p),
3907 dest_align, false))
3908 return NULL_RTX;
3910 dest_mem = get_memory_rtx (dest, len);
3911 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3912 builtin_strncpy_read_str,
3913 CONST_CAST (char *, p), dest_align, false, 0);
3914 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3915 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3916 return dest_mem;
3919 return NULL_RTX;
3922 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3923 bytes from constant string DATA + OFFSET and return it as target
3924 constant. */
3927 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3928 enum machine_mode mode)
3930 const char *c = (const char *) data;
3931 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3933 memset (p, *c, GET_MODE_SIZE (mode));
3935 return c_readstr (p, mode);
3938 /* Callback routine for store_by_pieces. Return the RTL of a register
3939 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3940 char value given in the RTL register data. For example, if mode is
3941 4 bytes wide, return the RTL for 0x01010101*data. */
3943 static rtx
3944 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3945 enum machine_mode mode)
3947 rtx target, coeff;
3948 size_t size;
3949 char *p;
3951 size = GET_MODE_SIZE (mode);
3952 if (size == 1)
3953 return (rtx) data;
3955 p = XALLOCAVEC (char, size);
3956 memset (p, 1, size);
3957 coeff = c_readstr (p, mode);
3959 target = convert_to_mode (mode, (rtx) data, 1);
3960 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3961 return force_reg (mode, target);
3964 /* Expand expression EXP, which is a call to the memset builtin. Return
3965 NULL_RTX if we failed the caller should emit a normal call, otherwise
3966 try to get the result in TARGET, if convenient (and in mode MODE if that's
3967 convenient). */
3969 static rtx
3970 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3972 if (!validate_arglist (exp,
3973 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3974 return NULL_RTX;
3975 else
3977 tree dest = CALL_EXPR_ARG (exp, 0);
3978 tree val = CALL_EXPR_ARG (exp, 1);
3979 tree len = CALL_EXPR_ARG (exp, 2);
3980 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3984 /* Helper function to do the actual work for expand_builtin_memset. The
3985 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3986 so that this can also be called without constructing an actual CALL_EXPR.
3987 The other arguments and return value are the same as for
3988 expand_builtin_memset. */
3990 static rtx
3991 expand_builtin_memset_args (tree dest, tree val, tree len,
3992 rtx target, enum machine_mode mode, tree orig_exp)
3994 tree fndecl, fn;
3995 enum built_in_function fcode;
3996 char c;
3997 unsigned int dest_align;
3998 rtx dest_mem, dest_addr, len_rtx;
3999 HOST_WIDE_INT expected_size = -1;
4000 unsigned int expected_align = 0;
4001 tree_ann_common_t ann;
4003 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
4005 /* If DEST is not a pointer type, don't do this operation in-line. */
4006 if (dest_align == 0)
4007 return NULL_RTX;
4009 ann = tree_common_ann (orig_exp);
4010 if (ann)
4011 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
4013 if (expected_align < dest_align)
4014 expected_align = dest_align;
4016 /* If the LEN parameter is zero, return DEST. */
4017 if (integer_zerop (len))
4019 /* Evaluate and ignore VAL in case it has side-effects. */
4020 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4021 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4024 /* Stabilize the arguments in case we fail. */
4025 dest = builtin_save_expr (dest);
4026 val = builtin_save_expr (val);
4027 len = builtin_save_expr (len);
4029 len_rtx = expand_normal (len);
4030 dest_mem = get_memory_rtx (dest, len);
4032 if (TREE_CODE (val) != INTEGER_CST)
4034 rtx val_rtx;
4036 val_rtx = expand_normal (val);
4037 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4038 val_rtx, 0);
4040 /* Assume that we can memset by pieces if we can store
4041 * the coefficients by pieces (in the required modes).
4042 * We can't pass builtin_memset_gen_str as that emits RTL. */
4043 c = 1;
4044 if (host_integerp (len, 1)
4045 && can_store_by_pieces (tree_low_cst (len, 1),
4046 builtin_memset_read_str, &c, dest_align,
4047 true))
4049 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4050 val_rtx);
4051 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4052 builtin_memset_gen_str, val_rtx, dest_align,
4053 true, 0);
4055 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4056 dest_align, expected_align,
4057 expected_size))
4058 goto do_libcall;
4060 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4061 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4062 return dest_mem;
4065 if (target_char_cast (val, &c))
4066 goto do_libcall;
4068 if (c)
4070 if (host_integerp (len, 1)
4071 && can_store_by_pieces (tree_low_cst (len, 1),
4072 builtin_memset_read_str, &c, dest_align,
4073 true))
4074 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4075 builtin_memset_read_str, &c, dest_align, true, 0);
4076 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4077 dest_align, expected_align,
4078 expected_size))
4079 goto do_libcall;
4081 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4082 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4083 return dest_mem;
4086 set_mem_align (dest_mem, dest_align);
4087 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4088 CALL_EXPR_TAILCALL (orig_exp)
4089 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4090 expected_align, expected_size);
4092 if (dest_addr == 0)
4094 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4095 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4098 return dest_addr;
4100 do_libcall:
4101 fndecl = get_callee_fndecl (orig_exp);
4102 fcode = DECL_FUNCTION_CODE (fndecl);
4103 if (fcode == BUILT_IN_MEMSET)
4104 fn = build_call_expr (fndecl, 3, dest, val, len);
4105 else if (fcode == BUILT_IN_BZERO)
4106 fn = build_call_expr (fndecl, 2, dest, len);
4107 else
4108 gcc_unreachable ();
4109 if (TREE_CODE (fn) == CALL_EXPR)
4110 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4111 return expand_call (fn, target, target == const0_rtx);
4114 /* Expand expression EXP, which is a call to the bzero builtin. Return
4115 NULL_RTX if we failed the caller should emit a normal call. */
4117 static rtx
4118 expand_builtin_bzero (tree exp)
4120 tree dest, size;
4121 location_t loc = EXPR_LOCATION (exp);
4123 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4124 return NULL_RTX;
4126 dest = CALL_EXPR_ARG (exp, 0);
4127 size = CALL_EXPR_ARG (exp, 1);
4129 /* New argument list transforming bzero(ptr x, int y) to
4130 memset(ptr x, int 0, size_t y). This is done this way
4131 so that if it isn't expanded inline, we fallback to
4132 calling bzero instead of memset. */
4134 return expand_builtin_memset_args (dest, integer_zero_node,
4135 fold_convert_loc (loc, sizetype, size),
4136 const0_rtx, VOIDmode, exp);
4139 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4140 caller should emit a normal call, otherwise try to get the result
4141 in TARGET, if convenient (and in mode MODE if that's convenient). */
4143 static rtx
4144 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4146 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4147 INTEGER_TYPE, VOID_TYPE))
4149 tree type = TREE_TYPE (exp);
4150 tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
4151 CALL_EXPR_ARG (exp, 0),
4152 CALL_EXPR_ARG (exp, 1),
4153 CALL_EXPR_ARG (exp, 2), type);
4154 if (result)
4155 return expand_expr (result, target, mode, EXPAND_NORMAL);
4157 return NULL_RTX;
4160 /* Expand expression EXP, which is a call to the memcmp built-in function.
4161 Return NULL_RTX if we failed and the
4162 caller should emit a normal call, otherwise try to get the result in
4163 TARGET, if convenient (and in mode MODE, if that's convenient). */
4165 static rtx
4166 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4168 location_t loc = EXPR_LOCATION (exp);
4170 if (!validate_arglist (exp,
4171 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4172 return NULL_RTX;
4173 else
4175 tree result = fold_builtin_memcmp (loc,
4176 CALL_EXPR_ARG (exp, 0),
4177 CALL_EXPR_ARG (exp, 1),
4178 CALL_EXPR_ARG (exp, 2));
4179 if (result)
4180 return expand_expr (result, target, mode, EXPAND_NORMAL);
4183 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4185 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4186 rtx result;
4187 rtx insn;
4188 tree arg1 = CALL_EXPR_ARG (exp, 0);
4189 tree arg2 = CALL_EXPR_ARG (exp, 1);
4190 tree len = CALL_EXPR_ARG (exp, 2);
4192 int arg1_align
4193 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4194 int arg2_align
4195 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4196 enum machine_mode insn_mode;
4198 #ifdef HAVE_cmpmemsi
4199 if (HAVE_cmpmemsi)
4200 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4201 else
4202 #endif
4203 #ifdef HAVE_cmpstrnsi
4204 if (HAVE_cmpstrnsi)
4205 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4206 else
4207 #endif
4208 return NULL_RTX;
4210 /* If we don't have POINTER_TYPE, call the function. */
4211 if (arg1_align == 0 || arg2_align == 0)
4212 return NULL_RTX;
4214 /* Make a place to write the result of the instruction. */
4215 result = target;
4216 if (! (result != 0
4217 && REG_P (result) && GET_MODE (result) == insn_mode
4218 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4219 result = gen_reg_rtx (insn_mode);
4221 arg1_rtx = get_memory_rtx (arg1, len);
4222 arg2_rtx = get_memory_rtx (arg2, len);
4223 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4225 /* Set MEM_SIZE as appropriate. */
4226 if (CONST_INT_P (arg3_rtx))
4228 set_mem_size (arg1_rtx, arg3_rtx);
4229 set_mem_size (arg2_rtx, arg3_rtx);
4232 #ifdef HAVE_cmpmemsi
4233 if (HAVE_cmpmemsi)
4234 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4235 GEN_INT (MIN (arg1_align, arg2_align)));
4236 else
4237 #endif
4238 #ifdef HAVE_cmpstrnsi
4239 if (HAVE_cmpstrnsi)
4240 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4241 GEN_INT (MIN (arg1_align, arg2_align)));
4242 else
4243 #endif
4244 gcc_unreachable ();
4246 if (insn)
4247 emit_insn (insn);
4248 else
4249 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4250 TYPE_MODE (integer_type_node), 3,
4251 XEXP (arg1_rtx, 0), Pmode,
4252 XEXP (arg2_rtx, 0), Pmode,
4253 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4254 TYPE_UNSIGNED (sizetype)),
4255 TYPE_MODE (sizetype));
4257 /* Return the value in the proper mode for this function. */
4258 mode = TYPE_MODE (TREE_TYPE (exp));
4259 if (GET_MODE (result) == mode)
4260 return result;
4261 else if (target != 0)
4263 convert_move (target, result, 0);
4264 return target;
4266 else
4267 return convert_to_mode (mode, result, 0);
4269 #endif
4271 return NULL_RTX;
4274 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4275 if we failed the caller should emit a normal call, otherwise try to get
4276 the result in TARGET, if convenient. */
4278 static rtx
4279 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4281 location_t loc = EXPR_LOCATION (exp);
4283 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4284 return NULL_RTX;
4285 else
4287 tree result = fold_builtin_strcmp (loc,
4288 CALL_EXPR_ARG (exp, 0),
4289 CALL_EXPR_ARG (exp, 1));
4290 if (result)
4291 return expand_expr (result, target, mode, EXPAND_NORMAL);
4294 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4295 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4296 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4298 rtx arg1_rtx, arg2_rtx;
4299 rtx result, insn = NULL_RTX;
4300 tree fndecl, fn;
4301 tree arg1 = CALL_EXPR_ARG (exp, 0);
4302 tree arg2 = CALL_EXPR_ARG (exp, 1);
4304 int arg1_align
4305 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4306 int arg2_align
4307 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4309 /* If we don't have POINTER_TYPE, call the function. */
4310 if (arg1_align == 0 || arg2_align == 0)
4311 return NULL_RTX;
4313 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4314 arg1 = builtin_save_expr (arg1);
4315 arg2 = builtin_save_expr (arg2);
4317 arg1_rtx = get_memory_rtx (arg1, NULL);
4318 arg2_rtx = get_memory_rtx (arg2, NULL);
4320 #ifdef HAVE_cmpstrsi
4321 /* Try to call cmpstrsi. */
4322 if (HAVE_cmpstrsi)
4324 enum machine_mode insn_mode
4325 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4327 /* Make a place to write the result of the instruction. */
4328 result = target;
4329 if (! (result != 0
4330 && REG_P (result) && GET_MODE (result) == insn_mode
4331 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4332 result = gen_reg_rtx (insn_mode);
4334 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4335 GEN_INT (MIN (arg1_align, arg2_align)));
4337 #endif
4338 #ifdef HAVE_cmpstrnsi
4339 /* Try to determine at least one length and call cmpstrnsi. */
4340 if (!insn && HAVE_cmpstrnsi)
4342 tree len;
4343 rtx arg3_rtx;
4345 enum machine_mode insn_mode
4346 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4347 tree len1 = c_strlen (arg1, 1);
4348 tree len2 = c_strlen (arg2, 1);
4350 if (len1)
4351 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4352 if (len2)
4353 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4355 /* If we don't have a constant length for the first, use the length
4356 of the second, if we know it. We don't require a constant for
4357 this case; some cost analysis could be done if both are available
4358 but neither is constant. For now, assume they're equally cheap,
4359 unless one has side effects. If both strings have constant lengths,
4360 use the smaller. */
4362 if (!len1)
4363 len = len2;
4364 else if (!len2)
4365 len = len1;
4366 else if (TREE_SIDE_EFFECTS (len1))
4367 len = len2;
4368 else if (TREE_SIDE_EFFECTS (len2))
4369 len = len1;
4370 else if (TREE_CODE (len1) != INTEGER_CST)
4371 len = len2;
4372 else if (TREE_CODE (len2) != INTEGER_CST)
4373 len = len1;
4374 else if (tree_int_cst_lt (len1, len2))
4375 len = len1;
4376 else
4377 len = len2;
4379 /* If both arguments have side effects, we cannot optimize. */
4380 if (!len || TREE_SIDE_EFFECTS (len))
4381 goto do_libcall;
4383 arg3_rtx = expand_normal (len);
4385 /* Make a place to write the result of the instruction. */
4386 result = target;
4387 if (! (result != 0
4388 && REG_P (result) && GET_MODE (result) == insn_mode
4389 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4390 result = gen_reg_rtx (insn_mode);
4392 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4393 GEN_INT (MIN (arg1_align, arg2_align)));
4395 #endif
4397 if (insn)
4399 emit_insn (insn);
4401 /* Return the value in the proper mode for this function. */
4402 mode = TYPE_MODE (TREE_TYPE (exp));
4403 if (GET_MODE (result) == mode)
4404 return result;
4405 if (target == 0)
4406 return convert_to_mode (mode, result, 0);
4407 convert_move (target, result, 0);
4408 return target;
4411 /* Expand the library call ourselves using a stabilized argument
4412 list to avoid re-evaluating the function's arguments twice. */
4413 #ifdef HAVE_cmpstrnsi
4414 do_libcall:
4415 #endif
4416 fndecl = get_callee_fndecl (exp);
4417 fn = build_call_expr (fndecl, 2, arg1, arg2);
4418 if (TREE_CODE (fn) == CALL_EXPR)
4419 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4420 return expand_call (fn, target, target == const0_rtx);
4422 #endif
4423 return NULL_RTX;
4426 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4427 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4428 the result in TARGET, if convenient. */
4430 static rtx
4431 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4433 location_t loc = EXPR_LOCATION (exp);
4435 if (!validate_arglist (exp,
4436 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4437 return NULL_RTX;
4438 else
4440 tree result = fold_builtin_strncmp (loc,
4441 CALL_EXPR_ARG (exp, 0),
4442 CALL_EXPR_ARG (exp, 1),
4443 CALL_EXPR_ARG (exp, 2));
4444 if (result)
4445 return expand_expr (result, target, mode, EXPAND_NORMAL);
4448 /* If c_strlen can determine an expression for one of the string
4449 lengths, and it doesn't have side effects, then emit cmpstrnsi
4450 using length MIN(strlen(string)+1, arg3). */
4451 #ifdef HAVE_cmpstrnsi
4452 if (HAVE_cmpstrnsi)
4454 tree len, len1, len2;
4455 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4456 rtx result, insn;
4457 tree fndecl, fn;
4458 tree arg1 = CALL_EXPR_ARG (exp, 0);
4459 tree arg2 = CALL_EXPR_ARG (exp, 1);
4460 tree arg3 = CALL_EXPR_ARG (exp, 2);
4462 int arg1_align
4463 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4464 int arg2_align
4465 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4466 enum machine_mode insn_mode
4467 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4469 len1 = c_strlen (arg1, 1);
4470 len2 = c_strlen (arg2, 1);
4472 if (len1)
4473 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4474 if (len2)
4475 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4477 /* If we don't have a constant length for the first, use the length
4478 of the second, if we know it. We don't require a constant for
4479 this case; some cost analysis could be done if both are available
4480 but neither is constant. For now, assume they're equally cheap,
4481 unless one has side effects. If both strings have constant lengths,
4482 use the smaller. */
4484 if (!len1)
4485 len = len2;
4486 else if (!len2)
4487 len = len1;
4488 else if (TREE_SIDE_EFFECTS (len1))
4489 len = len2;
4490 else if (TREE_SIDE_EFFECTS (len2))
4491 len = len1;
4492 else if (TREE_CODE (len1) != INTEGER_CST)
4493 len = len2;
4494 else if (TREE_CODE (len2) != INTEGER_CST)
4495 len = len1;
4496 else if (tree_int_cst_lt (len1, len2))
4497 len = len1;
4498 else
4499 len = len2;
4501 /* If both arguments have side effects, we cannot optimize. */
4502 if (!len || TREE_SIDE_EFFECTS (len))
4503 return NULL_RTX;
4505 /* The actual new length parameter is MIN(len,arg3). */
4506 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4507 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4509 /* If we don't have POINTER_TYPE, call the function. */
4510 if (arg1_align == 0 || arg2_align == 0)
4511 return NULL_RTX;
4513 /* Make a place to write the result of the instruction. */
4514 result = target;
4515 if (! (result != 0
4516 && REG_P (result) && GET_MODE (result) == insn_mode
4517 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4518 result = gen_reg_rtx (insn_mode);
4520 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4521 arg1 = builtin_save_expr (arg1);
4522 arg2 = builtin_save_expr (arg2);
4523 len = builtin_save_expr (len);
4525 arg1_rtx = get_memory_rtx (arg1, len);
4526 arg2_rtx = get_memory_rtx (arg2, len);
4527 arg3_rtx = expand_normal (len);
4528 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4529 GEN_INT (MIN (arg1_align, arg2_align)));
4530 if (insn)
4532 emit_insn (insn);
4534 /* Return the value in the proper mode for this function. */
4535 mode = TYPE_MODE (TREE_TYPE (exp));
4536 if (GET_MODE (result) == mode)
4537 return result;
4538 if (target == 0)
4539 return convert_to_mode (mode, result, 0);
4540 convert_move (target, result, 0);
4541 return target;
4544 /* Expand the library call ourselves using a stabilized argument
4545 list to avoid re-evaluating the function's arguments twice. */
4546 fndecl = get_callee_fndecl (exp);
4547 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4548 if (TREE_CODE (fn) == CALL_EXPR)
4549 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4550 return expand_call (fn, target, target == const0_rtx);
4552 #endif
4553 return NULL_RTX;
4556 /* Expand expression EXP, which is a call to the strcat builtin.
4557 Return NULL_RTX if we failed the caller should emit a normal call,
4558 otherwise try to get the result in TARGET, if convenient. */
4560 static rtx
4561 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4563 location_t loc = EXPR_LOCATION (exp);
4565 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4566 return NULL_RTX;
4567 else
4569 tree dst = CALL_EXPR_ARG (exp, 0);
4570 tree src = CALL_EXPR_ARG (exp, 1);
4571 const char *p = c_getstr (src);
4573 /* If the string length is zero, return the dst parameter. */
4574 if (p && *p == '\0')
4575 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4577 if (optimize_insn_for_speed_p ())
4579 /* See if we can store by pieces into (dst + strlen(dst)). */
4580 tree newsrc, newdst,
4581 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4582 rtx insns;
4584 /* Stabilize the argument list. */
4585 newsrc = builtin_save_expr (src);
4586 dst = builtin_save_expr (dst);
4588 start_sequence ();
4590 /* Create strlen (dst). */
4591 newdst = build_call_expr (strlen_fn, 1, dst);
4592 /* Create (dst p+ strlen (dst)). */
4594 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
4595 TREE_TYPE (dst), dst, newdst);
4596 newdst = builtin_save_expr (newdst);
4598 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4600 end_sequence (); /* Stop sequence. */
4601 return NULL_RTX;
4604 /* Output the entire sequence. */
4605 insns = get_insns ();
4606 end_sequence ();
4607 emit_insn (insns);
4609 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4612 return NULL_RTX;
4616 /* Expand expression EXP, which is a call to the strncat builtin.
4617 Return NULL_RTX if we failed the caller should emit a normal call,
4618 otherwise try to get the result in TARGET, if convenient. */
4620 static rtx
4621 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4623 if (validate_arglist (exp,
4624 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4626 tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
4627 CALL_EXPR_ARG (exp, 0),
4628 CALL_EXPR_ARG (exp, 1),
4629 CALL_EXPR_ARG (exp, 2));
4630 if (result)
4631 return expand_expr (result, target, mode, EXPAND_NORMAL);
4633 return NULL_RTX;
4636 /* Expand expression EXP, which is a call to the strspn builtin.
4637 Return NULL_RTX if we failed the caller should emit a normal call,
4638 otherwise try to get the result in TARGET, if convenient. */
4640 static rtx
4641 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4643 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4645 tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
4646 CALL_EXPR_ARG (exp, 0),
4647 CALL_EXPR_ARG (exp, 1));
4648 if (result)
4649 return expand_expr (result, target, mode, EXPAND_NORMAL);
4651 return NULL_RTX;
4654 /* Expand expression EXP, which is a call to the strcspn builtin.
4655 Return NULL_RTX if we failed the caller should emit a normal call,
4656 otherwise try to get the result in TARGET, if convenient. */
4658 static rtx
4659 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4661 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4663 tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
4664 CALL_EXPR_ARG (exp, 0),
4665 CALL_EXPR_ARG (exp, 1));
4666 if (result)
4667 return expand_expr (result, target, mode, EXPAND_NORMAL);
4669 return NULL_RTX;
4672 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4673 if that's convenient. */
4676 expand_builtin_saveregs (void)
4678 rtx val, seq;
4680 /* Don't do __builtin_saveregs more than once in a function.
4681 Save the result of the first call and reuse it. */
4682 if (saveregs_value != 0)
4683 return saveregs_value;
4685 /* When this function is called, it means that registers must be
4686 saved on entry to this function. So we migrate the call to the
4687 first insn of this function. */
4689 start_sequence ();
4691 /* Do whatever the machine needs done in this case. */
4692 val = targetm.calls.expand_builtin_saveregs ();
4694 seq = get_insns ();
4695 end_sequence ();
4697 saveregs_value = val;
4699 /* Put the insns after the NOTE that starts the function. If this
4700 is inside a start_sequence, make the outer-level insn chain current, so
4701 the code is placed at the start of the function. */
4702 push_topmost_sequence ();
4703 emit_insn_after (seq, entry_of_function ());
4704 pop_topmost_sequence ();
4706 return val;
4709 /* __builtin_args_info (N) returns word N of the arg space info
4710 for the current function. The number and meanings of words
4711 is controlled by the definition of CUMULATIVE_ARGS. */
4713 static rtx
4714 expand_builtin_args_info (tree exp)
4716 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4717 int *word_ptr = (int *) &crtl->args.info;
4719 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4721 if (call_expr_nargs (exp) != 0)
4723 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4724 error ("argument of %<__builtin_args_info%> must be constant");
4725 else
4727 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4729 if (wordnum < 0 || wordnum >= nwords)
4730 error ("argument of %<__builtin_args_info%> out of range");
4731 else
4732 return GEN_INT (word_ptr[wordnum]);
4735 else
4736 error ("missing argument in %<__builtin_args_info%>");
4738 return const0_rtx;
4741 /* Expand a call to __builtin_next_arg. */
4743 static rtx
4744 expand_builtin_next_arg (void)
4746 /* Checking arguments is already done in fold_builtin_next_arg
4747 that must be called before this function. */
4748 return expand_binop (ptr_mode, add_optab,
4749 crtl->args.internal_arg_pointer,
4750 crtl->args.arg_offset_rtx,
4751 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4754 /* Make it easier for the backends by protecting the valist argument
4755 from multiple evaluations. */
4757 static tree
4758 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4760 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4762 gcc_assert (vatype != NULL_TREE);
4764 if (TREE_CODE (vatype) == ARRAY_TYPE)
4766 if (TREE_SIDE_EFFECTS (valist))
4767 valist = save_expr (valist);
4769 /* For this case, the backends will be expecting a pointer to
4770 vatype, but it's possible we've actually been given an array
4771 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4772 So fix it. */
4773 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4775 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4776 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4779 else
4781 tree pt;
4783 if (! needs_lvalue)
4785 if (! TREE_SIDE_EFFECTS (valist))
4786 return valist;
4788 pt = build_pointer_type (vatype);
4789 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4790 TREE_SIDE_EFFECTS (valist) = 1;
4793 if (TREE_SIDE_EFFECTS (valist))
4794 valist = save_expr (valist);
4795 valist = build_fold_indirect_ref_loc (loc, valist);
4798 return valist;
4801 /* The "standard" definition of va_list is void*. */
4803 tree
4804 std_build_builtin_va_list (void)
4806 return ptr_type_node;
4809 /* The "standard" abi va_list is va_list_type_node. */
4811 tree
4812 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4814 return va_list_type_node;
4817 /* The "standard" type of va_list is va_list_type_node. */
4819 tree
4820 std_canonical_va_list_type (tree type)
4822 tree wtype, htype;
4824 if (INDIRECT_REF_P (type))
4825 type = TREE_TYPE (type);
4826 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4827 type = TREE_TYPE (type);
4828 wtype = va_list_type_node;
4829 htype = type;
4830 /* Treat structure va_list types. */
4831 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4832 htype = TREE_TYPE (htype);
4833 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4835 /* If va_list is an array type, the argument may have decayed
4836 to a pointer type, e.g. by being passed to another function.
4837 In that case, unwrap both types so that we can compare the
4838 underlying records. */
4839 if (TREE_CODE (htype) == ARRAY_TYPE
4840 || POINTER_TYPE_P (htype))
4842 wtype = TREE_TYPE (wtype);
4843 htype = TREE_TYPE (htype);
4846 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4847 return va_list_type_node;
4849 return NULL_TREE;
4852 /* The "standard" implementation of va_start: just assign `nextarg' to
4853 the variable. */
4855 void
4856 std_expand_builtin_va_start (tree valist, rtx nextarg)
4858 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4859 convert_move (va_r, nextarg, 0);
4862 /* Expand EXP, a call to __builtin_va_start. */
4864 static rtx
4865 expand_builtin_va_start (tree exp)
4867 rtx nextarg;
4868 tree valist;
4869 location_t loc = EXPR_LOCATION (exp);
4871 if (call_expr_nargs (exp) < 2)
4873 error_at (loc, "too few arguments to function %<va_start%>");
4874 return const0_rtx;
4877 if (fold_builtin_next_arg (exp, true))
4878 return const0_rtx;
4880 nextarg = expand_builtin_next_arg ();
4881 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4883 if (targetm.expand_builtin_va_start)
4884 targetm.expand_builtin_va_start (valist, nextarg);
4885 else
4886 std_expand_builtin_va_start (valist, nextarg);
4888 return const0_rtx;
4891 /* The "standard" implementation of va_arg: read the value from the
4892 current (padded) address and increment by the (padded) size. */
4894 tree
4895 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4896 gimple_seq *post_p)
4898 tree addr, t, type_size, rounded_size, valist_tmp;
4899 unsigned HOST_WIDE_INT align, boundary;
4900 bool indirect;
4902 #ifdef ARGS_GROW_DOWNWARD
4903 /* All of the alignment and movement below is for args-grow-up machines.
4904 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4905 implement their own specialized gimplify_va_arg_expr routines. */
4906 gcc_unreachable ();
4907 #endif
4909 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4910 if (indirect)
4911 type = build_pointer_type (type);
4913 align = PARM_BOUNDARY / BITS_PER_UNIT;
4914 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4916 /* When we align parameter on stack for caller, if the parameter
4917 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4918 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4919 here with caller. */
4920 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4921 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4923 boundary /= BITS_PER_UNIT;
4925 /* Hoist the valist value into a temporary for the moment. */
4926 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4928 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4929 requires greater alignment, we must perform dynamic alignment. */
4930 if (boundary > align
4931 && !integer_zerop (TYPE_SIZE (type)))
4933 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4934 fold_build2 (POINTER_PLUS_EXPR,
4935 TREE_TYPE (valist),
4936 valist_tmp, size_int (boundary - 1)));
4937 gimplify_and_add (t, pre_p);
4939 t = fold_convert (sizetype, valist_tmp);
4940 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4941 fold_convert (TREE_TYPE (valist),
4942 fold_build2 (BIT_AND_EXPR, sizetype, t,
4943 size_int (-boundary))));
4944 gimplify_and_add (t, pre_p);
4946 else
4947 boundary = align;
4949 /* If the actual alignment is less than the alignment of the type,
4950 adjust the type accordingly so that we don't assume strict alignment
4951 when dereferencing the pointer. */
4952 boundary *= BITS_PER_UNIT;
4953 if (boundary < TYPE_ALIGN (type))
4955 type = build_variant_type_copy (type);
4956 TYPE_ALIGN (type) = boundary;
4959 /* Compute the rounded size of the type. */
4960 type_size = size_in_bytes (type);
4961 rounded_size = round_up (type_size, align);
4963 /* Reduce rounded_size so it's sharable with the postqueue. */
4964 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4966 /* Get AP. */
4967 addr = valist_tmp;
4968 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4970 /* Small args are padded downward. */
4971 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4972 rounded_size, size_int (align));
4973 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4974 size_binop (MINUS_EXPR, rounded_size, type_size));
4975 addr = fold_build2 (POINTER_PLUS_EXPR,
4976 TREE_TYPE (addr), addr, t);
4979 /* Compute new value for AP. */
4980 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4981 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4982 gimplify_and_add (t, pre_p);
4984 addr = fold_convert (build_pointer_type (type), addr);
4986 if (indirect)
4987 addr = build_va_arg_indirect_ref (addr);
4989 return build_va_arg_indirect_ref (addr);
4992 /* Build an indirect-ref expression over the given TREE, which represents a
4993 piece of a va_arg() expansion. */
4994 tree
4995 build_va_arg_indirect_ref (tree addr)
4997 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4999 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
5000 mf_mark (addr);
5002 return addr;
5005 /* Return a dummy expression of type TYPE in order to keep going after an
5006 error. */
5008 static tree
5009 dummy_object (tree type)
5011 tree t = build_int_cst (build_pointer_type (type), 0);
5012 return build1 (INDIRECT_REF, type, t);
5015 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
5016 builtin function, but a very special sort of operator. */
5018 enum gimplify_status
5019 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5021 tree promoted_type, have_va_type;
5022 tree valist = TREE_OPERAND (*expr_p, 0);
5023 tree type = TREE_TYPE (*expr_p);
5024 tree t;
5025 location_t loc = EXPR_LOCATION (*expr_p);
5027 /* Verify that valist is of the proper type. */
5028 have_va_type = TREE_TYPE (valist);
5029 if (have_va_type == error_mark_node)
5030 return GS_ERROR;
5031 have_va_type = targetm.canonical_va_list_type (have_va_type);
5033 if (have_va_type == NULL_TREE)
5035 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
5036 return GS_ERROR;
5039 /* Generate a diagnostic for requesting data of a type that cannot
5040 be passed through `...' due to type promotion at the call site. */
5041 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
5042 != type)
5044 static bool gave_help;
5045 bool warned;
5047 /* Unfortunately, this is merely undefined, rather than a constraint
5048 violation, so we cannot make this an error. If this call is never
5049 executed, the program is still strictly conforming. */
5050 warned = warning_at (loc, 0,
5051 "%qT is promoted to %qT when passed through %<...%>",
5052 type, promoted_type);
5053 if (!gave_help && warned)
5055 gave_help = true;
5056 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5057 promoted_type, type);
5060 /* We can, however, treat "undefined" any way we please.
5061 Call abort to encourage the user to fix the program. */
5062 if (warned)
5063 inform (loc, "if this code is reached, the program will abort");
5064 /* Before the abort, allow the evaluation of the va_list
5065 expression to exit or longjmp. */
5066 gimplify_and_add (valist, pre_p);
5067 t = build_call_expr_loc (loc,
5068 implicit_built_in_decls[BUILT_IN_TRAP], 0);
5069 gimplify_and_add (t, pre_p);
5071 /* This is dead code, but go ahead and finish so that the
5072 mode of the result comes out right. */
5073 *expr_p = dummy_object (type);
5074 return GS_ALL_DONE;
5076 else
5078 /* Make it easier for the backends by protecting the valist argument
5079 from multiple evaluations. */
5080 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5082 /* For this case, the backends will be expecting a pointer to
5083 TREE_TYPE (abi), but it's possible we've
5084 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5085 So fix it. */
5086 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5088 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5089 valist = fold_convert_loc (loc, p1,
5090 build_fold_addr_expr_loc (loc, valist));
5093 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5095 else
5096 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5098 if (!targetm.gimplify_va_arg_expr)
5099 /* FIXME: Once most targets are converted we should merely
5100 assert this is non-null. */
5101 return GS_ALL_DONE;
5103 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5104 return GS_OK;
5108 /* Expand EXP, a call to __builtin_va_end. */
5110 static rtx
5111 expand_builtin_va_end (tree exp)
5113 tree valist = CALL_EXPR_ARG (exp, 0);
5115 /* Evaluate for side effects, if needed. I hate macros that don't
5116 do that. */
5117 if (TREE_SIDE_EFFECTS (valist))
5118 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5120 return const0_rtx;
5123 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5124 builtin rather than just as an assignment in stdarg.h because of the
5125 nastiness of array-type va_list types. */
5127 static rtx
5128 expand_builtin_va_copy (tree exp)
5130 tree dst, src, t;
5131 location_t loc = EXPR_LOCATION (exp);
5133 dst = CALL_EXPR_ARG (exp, 0);
5134 src = CALL_EXPR_ARG (exp, 1);
5136 dst = stabilize_va_list_loc (loc, dst, 1);
5137 src = stabilize_va_list_loc (loc, src, 0);
5139 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5141 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5143 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5144 TREE_SIDE_EFFECTS (t) = 1;
5145 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5147 else
5149 rtx dstb, srcb, size;
5151 /* Evaluate to pointers. */
5152 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5153 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5154 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5155 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5157 dstb = convert_memory_address (Pmode, dstb);
5158 srcb = convert_memory_address (Pmode, srcb);
5160 /* "Dereference" to BLKmode memories. */
5161 dstb = gen_rtx_MEM (BLKmode, dstb);
5162 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5163 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5164 srcb = gen_rtx_MEM (BLKmode, srcb);
5165 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5166 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5168 /* Copy. */
5169 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5172 return const0_rtx;
5175 /* Expand a call to one of the builtin functions __builtin_frame_address or
5176 __builtin_return_address. */
5178 static rtx
5179 expand_builtin_frame_address (tree fndecl, tree exp)
5181 /* The argument must be a nonnegative integer constant.
5182 It counts the number of frames to scan up the stack.
5183 The value is the return address saved in that frame. */
5184 if (call_expr_nargs (exp) == 0)
5185 /* Warning about missing arg was already issued. */
5186 return const0_rtx;
5187 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5189 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5190 error ("invalid argument to %<__builtin_frame_address%>");
5191 else
5192 error ("invalid argument to %<__builtin_return_address%>");
5193 return const0_rtx;
5195 else
5197 rtx tem
5198 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5199 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5201 /* Some ports cannot access arbitrary stack frames. */
5202 if (tem == NULL)
5204 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5205 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5206 else
5207 warning (0, "unsupported argument to %<__builtin_return_address%>");
5208 return const0_rtx;
5211 /* For __builtin_frame_address, return what we've got. */
5212 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5213 return tem;
5215 if (!REG_P (tem)
5216 && ! CONSTANT_P (tem))
5217 tem = copy_to_mode_reg (Pmode, tem);
5218 return tem;
5222 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5223 we failed and the caller should emit a normal call, otherwise try to get
5224 the result in TARGET, if convenient. */
5226 static rtx
5227 expand_builtin_alloca (tree exp, rtx target)
5229 rtx op0;
5230 rtx result;
5232 /* Emit normal call if marked not-inlineable. */
5233 if (CALL_CANNOT_INLINE_P (exp))
5234 return NULL_RTX;
5236 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5237 return NULL_RTX;
5239 /* Compute the argument. */
5240 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5242 /* Allocate the desired space. */
5243 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5244 result = convert_memory_address (ptr_mode, result);
5246 return result;
5249 /* Expand a call to a bswap builtin with argument ARG0. MODE
5250 is the mode to expand with. */
5252 static rtx
5253 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5255 enum machine_mode mode;
5256 tree arg;
5257 rtx op0;
5259 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5260 return NULL_RTX;
5262 arg = CALL_EXPR_ARG (exp, 0);
5263 mode = TYPE_MODE (TREE_TYPE (arg));
5264 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5266 target = expand_unop (mode, bswap_optab, op0, target, 1);
5268 gcc_assert (target);
5270 return convert_to_mode (mode, target, 0);
5273 /* Expand a call to a unary builtin in EXP.
5274 Return NULL_RTX if a normal call should be emitted rather than expanding the
5275 function in-line. If convenient, the result should be placed in TARGET.
5276 SUBTARGET may be used as the target for computing one of EXP's operands. */
5278 static rtx
5279 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5280 rtx subtarget, optab op_optab)
5282 rtx op0;
5284 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5285 return NULL_RTX;
5287 /* Compute the argument. */
5288 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5289 VOIDmode, EXPAND_NORMAL);
5290 /* Compute op, into TARGET if possible.
5291 Set TARGET to wherever the result comes back. */
5292 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5293 op_optab, op0, target, 1);
5294 gcc_assert (target);
5296 return convert_to_mode (target_mode, target, 0);
5299 /* If the string passed to fputs is a constant and is one character
5300 long, we attempt to transform this call into __builtin_fputc(). */
5302 static rtx
5303 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5305 /* Verify the arguments in the original call. */
5306 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5308 tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
5309 CALL_EXPR_ARG (exp, 0),
5310 CALL_EXPR_ARG (exp, 1),
5311 (target == const0_rtx),
5312 unlocked, NULL_TREE);
5313 if (result)
5314 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5316 return NULL_RTX;
5319 /* Expand a call to __builtin_expect. We just return our argument
5320 as the builtin_expect semantic should've been already executed by
5321 tree branch prediction pass. */
5323 static rtx
5324 expand_builtin_expect (tree exp, rtx target)
5326 tree arg, c;
5328 if (call_expr_nargs (exp) < 2)
5329 return const0_rtx;
5330 arg = CALL_EXPR_ARG (exp, 0);
5331 c = CALL_EXPR_ARG (exp, 1);
5333 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5334 /* When guessing was done, the hints should be already stripped away. */
5335 gcc_assert (!flag_guess_branch_prob
5336 || optimize == 0 || errorcount || sorrycount);
5337 return target;
5340 void
5341 expand_builtin_trap (void)
5343 #ifdef HAVE_trap
5344 if (HAVE_trap)
5345 emit_insn (gen_trap ());
5346 else
5347 #endif
5348 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5349 emit_barrier ();
5352 /* Expand a call to __builtin_unreachable. We do nothing except emit
5353 a barrier saying that control flow will not pass here.
5355 It is the responsibility of the program being compiled to ensure
5356 that control flow does never reach __builtin_unreachable. */
5357 static void
5358 expand_builtin_unreachable (void)
5360 emit_barrier ();
5363 /* Expand EXP, a call to fabs, fabsf or fabsl.
5364 Return NULL_RTX if a normal call should be emitted rather than expanding
5365 the function inline. If convenient, the result should be placed
5366 in TARGET. SUBTARGET may be used as the target for computing
5367 the operand. */
5369 static rtx
5370 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5372 enum machine_mode mode;
5373 tree arg;
5374 rtx op0;
5376 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5377 return NULL_RTX;
5379 arg = CALL_EXPR_ARG (exp, 0);
5380 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5381 mode = TYPE_MODE (TREE_TYPE (arg));
5382 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5383 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5386 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5387 Return NULL is a normal call should be emitted rather than expanding the
5388 function inline. If convenient, the result should be placed in TARGET.
5389 SUBTARGET may be used as the target for computing the operand. */
5391 static rtx
5392 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5394 rtx op0, op1;
5395 tree arg;
5397 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5398 return NULL_RTX;
5400 arg = CALL_EXPR_ARG (exp, 0);
5401 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5403 arg = CALL_EXPR_ARG (exp, 1);
5404 op1 = expand_normal (arg);
5406 return expand_copysign (op0, op1, target);
5409 /* Create a new constant string literal and return a char* pointer to it.
5410 The STRING_CST value is the LEN characters at STR. */
5411 tree
5412 build_string_literal (int len, const char *str)
5414 tree t, elem, index, type;
5416 t = build_string (len, str);
5417 elem = build_type_variant (char_type_node, 1, 0);
5418 index = build_index_type (size_int (len - 1));
5419 type = build_array_type (elem, index);
5420 TREE_TYPE (t) = type;
5421 TREE_CONSTANT (t) = 1;
5422 TREE_READONLY (t) = 1;
5423 TREE_STATIC (t) = 1;
5425 type = build_pointer_type (elem);
5426 t = build1 (ADDR_EXPR, type,
5427 build4 (ARRAY_REF, elem,
5428 t, integer_zero_node, NULL_TREE, NULL_TREE));
5429 return t;
5432 /* Expand EXP, a call to printf or printf_unlocked.
5433 Return NULL_RTX if a normal call should be emitted rather than transforming
5434 the function inline. If convenient, the result should be placed in
5435 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5436 call. */
5437 static rtx
5438 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5439 bool unlocked)
5441 /* If we're using an unlocked function, assume the other unlocked
5442 functions exist explicitly. */
5443 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5444 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5445 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5446 : implicit_built_in_decls[BUILT_IN_PUTS];
5447 const char *fmt_str;
5448 tree fn = 0;
5449 tree fmt, arg;
5450 int nargs = call_expr_nargs (exp);
5452 /* If the return value is used, don't do the transformation. */
5453 if (target != const0_rtx)
5454 return NULL_RTX;
5456 /* Verify the required arguments in the original call. */
5457 if (nargs == 0)
5458 return NULL_RTX;
5459 fmt = CALL_EXPR_ARG (exp, 0);
5460 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5461 return NULL_RTX;
5463 /* Check whether the format is a literal string constant. */
5464 fmt_str = c_getstr (fmt);
5465 if (fmt_str == NULL)
5466 return NULL_RTX;
5468 if (!init_target_chars ())
5469 return NULL_RTX;
5471 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5472 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5474 if ((nargs != 2)
5475 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5476 return NULL_RTX;
5477 if (fn_puts)
5478 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5480 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5481 else if (strcmp (fmt_str, target_percent_c) == 0)
5483 if ((nargs != 2)
5484 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5485 return NULL_RTX;
5486 if (fn_putchar)
5487 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5489 else
5491 /* We can't handle anything else with % args or %% ... yet. */
5492 if (strchr (fmt_str, target_percent))
5493 return NULL_RTX;
5495 if (nargs > 1)
5496 return NULL_RTX;
5498 /* If the format specifier was "", printf does nothing. */
5499 if (fmt_str[0] == '\0')
5500 return const0_rtx;
5501 /* If the format specifier has length of 1, call putchar. */
5502 if (fmt_str[1] == '\0')
5504 /* Given printf("c"), (where c is any one character,)
5505 convert "c"[0] to an int and pass that to the replacement
5506 function. */
5507 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5508 if (fn_putchar)
5509 fn = build_call_expr (fn_putchar, 1, arg);
5511 else
5513 /* If the format specifier was "string\n", call puts("string"). */
5514 size_t len = strlen (fmt_str);
5515 if ((unsigned char)fmt_str[len - 1] == target_newline)
5517 /* Create a NUL-terminated string that's one char shorter
5518 than the original, stripping off the trailing '\n'. */
5519 char *newstr = XALLOCAVEC (char, len);
5520 memcpy (newstr, fmt_str, len - 1);
5521 newstr[len - 1] = 0;
5522 arg = build_string_literal (len, newstr);
5523 if (fn_puts)
5524 fn = build_call_expr (fn_puts, 1, arg);
5526 else
5527 /* We'd like to arrange to call fputs(string,stdout) here,
5528 but we need stdout and don't have a way to get it yet. */
5529 return NULL_RTX;
5533 if (!fn)
5534 return NULL_RTX;
5535 if (TREE_CODE (fn) == CALL_EXPR)
5536 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5537 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5540 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5541 Return NULL_RTX if a normal call should be emitted rather than transforming
5542 the function inline. If convenient, the result should be placed in
5543 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5544 call. */
5545 static rtx
5546 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5547 bool unlocked)
5549 /* If we're using an unlocked function, assume the other unlocked
5550 functions exist explicitly. */
5551 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5552 : implicit_built_in_decls[BUILT_IN_FPUTC];
5553 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5554 : implicit_built_in_decls[BUILT_IN_FPUTS];
5555 const char *fmt_str;
5556 tree fn = 0;
5557 tree fmt, fp, arg;
5558 int nargs = call_expr_nargs (exp);
5560 /* If the return value is used, don't do the transformation. */
5561 if (target != const0_rtx)
5562 return NULL_RTX;
5564 /* Verify the required arguments in the original call. */
5565 if (nargs < 2)
5566 return NULL_RTX;
5567 fp = CALL_EXPR_ARG (exp, 0);
5568 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5569 return NULL_RTX;
5570 fmt = CALL_EXPR_ARG (exp, 1);
5571 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5572 return NULL_RTX;
5574 /* Check whether the format is a literal string constant. */
5575 fmt_str = c_getstr (fmt);
5576 if (fmt_str == NULL)
5577 return NULL_RTX;
5579 if (!init_target_chars ())
5580 return NULL_RTX;
5582 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5583 if (strcmp (fmt_str, target_percent_s) == 0)
5585 if ((nargs != 3)
5586 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5587 return NULL_RTX;
5588 arg = CALL_EXPR_ARG (exp, 2);
5589 if (fn_fputs)
5590 fn = build_call_expr (fn_fputs, 2, arg, fp);
5592 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5593 else if (strcmp (fmt_str, target_percent_c) == 0)
5595 if ((nargs != 3)
5596 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5597 return NULL_RTX;
5598 arg = CALL_EXPR_ARG (exp, 2);
5599 if (fn_fputc)
5600 fn = build_call_expr (fn_fputc, 2, arg, fp);
5602 else
5604 /* We can't handle anything else with % args or %% ... yet. */
5605 if (strchr (fmt_str, target_percent))
5606 return NULL_RTX;
5608 if (nargs > 2)
5609 return NULL_RTX;
5611 /* If the format specifier was "", fprintf does nothing. */
5612 if (fmt_str[0] == '\0')
5614 /* Evaluate and ignore FILE* argument for side-effects. */
5615 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5616 return const0_rtx;
5619 /* When "string" doesn't contain %, replace all cases of
5620 fprintf(stream,string) with fputs(string,stream). The fputs
5621 builtin will take care of special cases like length == 1. */
5622 if (fn_fputs)
5623 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5626 if (!fn)
5627 return NULL_RTX;
5628 if (TREE_CODE (fn) == CALL_EXPR)
5629 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5630 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5633 /* Expand a call EXP to sprintf. Return NULL_RTX if
5634 a normal call should be emitted rather than expanding the function
5635 inline. If convenient, the result should be placed in TARGET with
5636 mode MODE. */
5638 static rtx
5639 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5641 tree dest, fmt;
5642 const char *fmt_str;
5643 int nargs = call_expr_nargs (exp);
5645 /* Verify the required arguments in the original call. */
5646 if (nargs < 2)
5647 return NULL_RTX;
5648 dest = CALL_EXPR_ARG (exp, 0);
5649 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5650 return NULL_RTX;
5651 fmt = CALL_EXPR_ARG (exp, 0);
5652 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5653 return NULL_RTX;
5655 /* Check whether the format is a literal string constant. */
5656 fmt_str = c_getstr (fmt);
5657 if (fmt_str == NULL)
5658 return NULL_RTX;
5660 if (!init_target_chars ())
5661 return NULL_RTX;
5663 /* If the format doesn't contain % args or %%, use strcpy. */
5664 if (strchr (fmt_str, target_percent) == 0)
5666 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5667 tree exp;
5669 if ((nargs > 2) || ! fn)
5670 return NULL_RTX;
5671 expand_expr (build_call_expr (fn, 2, dest, fmt),
5672 const0_rtx, VOIDmode, EXPAND_NORMAL);
5673 if (target == const0_rtx)
5674 return const0_rtx;
5675 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5676 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5678 /* If the format is "%s", use strcpy if the result isn't used. */
5679 else if (strcmp (fmt_str, target_percent_s) == 0)
5681 tree fn, arg, len;
5682 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5684 if (! fn)
5685 return NULL_RTX;
5686 if (nargs != 3)
5687 return NULL_RTX;
5688 arg = CALL_EXPR_ARG (exp, 2);
5689 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5690 return NULL_RTX;
5692 if (target != const0_rtx)
5694 len = c_strlen (arg, 1);
5695 if (! len || TREE_CODE (len) != INTEGER_CST)
5696 return NULL_RTX;
5698 else
5699 len = NULL_TREE;
5701 expand_expr (build_call_expr (fn, 2, dest, arg),
5702 const0_rtx, VOIDmode, EXPAND_NORMAL);
5704 if (target == const0_rtx)
5705 return const0_rtx;
5706 return expand_expr (len, target, mode, EXPAND_NORMAL);
5709 return NULL_RTX;
5712 /* Expand a call to either the entry or exit function profiler. */
5714 static rtx
5715 expand_builtin_profile_func (bool exitp)
5717 rtx this_rtx, which;
5719 this_rtx = DECL_RTL (current_function_decl);
5720 gcc_assert (MEM_P (this_rtx));
5721 this_rtx = XEXP (this_rtx, 0);
5723 if (exitp)
5724 which = profile_function_exit_libfunc;
5725 else
5726 which = profile_function_entry_libfunc;
5728 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5729 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5731 Pmode);
5733 return const0_rtx;
5736 /* Expand a call to __builtin___clear_cache. */
5738 static rtx
5739 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5741 #ifndef HAVE_clear_cache
5742 #ifdef CLEAR_INSN_CACHE
5743 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5744 does something. Just do the default expansion to a call to
5745 __clear_cache(). */
5746 return NULL_RTX;
5747 #else
5748 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5749 does nothing. There is no need to call it. Do nothing. */
5750 return const0_rtx;
5751 #endif /* CLEAR_INSN_CACHE */
5752 #else
5753 /* We have a "clear_cache" insn, and it will handle everything. */
5754 tree begin, end;
5755 rtx begin_rtx, end_rtx;
5756 enum insn_code icode;
5758 /* We must not expand to a library call. If we did, any
5759 fallback library function in libgcc that might contain a call to
5760 __builtin___clear_cache() would recurse infinitely. */
5761 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5763 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5764 return const0_rtx;
5767 if (HAVE_clear_cache)
5769 icode = CODE_FOR_clear_cache;
5771 begin = CALL_EXPR_ARG (exp, 0);
5772 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5773 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5774 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5775 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5777 end = CALL_EXPR_ARG (exp, 1);
5778 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5779 end_rtx = convert_memory_address (Pmode, end_rtx);
5780 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5781 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5783 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5785 return const0_rtx;
5786 #endif /* HAVE_clear_cache */
5789 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5791 static rtx
5792 round_trampoline_addr (rtx tramp)
5794 rtx temp, addend, mask;
5796 /* If we don't need too much alignment, we'll have been guaranteed
5797 proper alignment by get_trampoline_type. */
5798 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5799 return tramp;
5801 /* Round address up to desired boundary. */
5802 temp = gen_reg_rtx (Pmode);
5803 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5804 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5806 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5807 temp, 0, OPTAB_LIB_WIDEN);
5808 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5809 temp, 0, OPTAB_LIB_WIDEN);
5811 return tramp;
5814 static rtx
5815 expand_builtin_init_trampoline (tree exp)
5817 tree t_tramp, t_func, t_chain;
5818 rtx r_tramp, r_func, r_chain;
5819 #ifdef TRAMPOLINE_TEMPLATE
5820 rtx blktramp;
5821 #endif
5823 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5824 POINTER_TYPE, VOID_TYPE))
5825 return NULL_RTX;
5827 t_tramp = CALL_EXPR_ARG (exp, 0);
5828 t_func = CALL_EXPR_ARG (exp, 1);
5829 t_chain = CALL_EXPR_ARG (exp, 2);
5831 r_tramp = expand_normal (t_tramp);
5832 r_func = expand_normal (t_func);
5833 r_chain = expand_normal (t_chain);
5835 /* Generate insns to initialize the trampoline. */
5836 r_tramp = round_trampoline_addr (r_tramp);
5837 #ifdef TRAMPOLINE_TEMPLATE
5838 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5839 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5840 emit_block_move (blktramp, assemble_trampoline_template (),
5841 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5842 #endif
5843 trampolines_created = 1;
5844 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5846 return const0_rtx;
5849 static rtx
5850 expand_builtin_adjust_trampoline (tree exp)
5852 rtx tramp;
5854 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5855 return NULL_RTX;
5857 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5858 tramp = round_trampoline_addr (tramp);
5859 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5860 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5861 #endif
5863 return tramp;
5866 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5867 function. The function first checks whether the back end provides
5868 an insn to implement signbit for the respective mode. If not, it
5869 checks whether the floating point format of the value is such that
5870 the sign bit can be extracted. If that is not the case, the
5871 function returns NULL_RTX to indicate that a normal call should be
5872 emitted rather than expanding the function in-line. EXP is the
5873 expression that is a call to the builtin function; if convenient,
5874 the result should be placed in TARGET. */
5875 static rtx
5876 expand_builtin_signbit (tree exp, rtx target)
5878 const struct real_format *fmt;
5879 enum machine_mode fmode, imode, rmode;
5880 HOST_WIDE_INT hi, lo;
5881 tree arg;
5882 int word, bitpos;
5883 enum insn_code icode;
5884 rtx temp;
5885 location_t loc = EXPR_LOCATION (exp);
5887 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5888 return NULL_RTX;
5890 arg = CALL_EXPR_ARG (exp, 0);
5891 fmode = TYPE_MODE (TREE_TYPE (arg));
5892 rmode = TYPE_MODE (TREE_TYPE (exp));
5893 fmt = REAL_MODE_FORMAT (fmode);
5895 arg = builtin_save_expr (arg);
5897 /* Expand the argument yielding a RTX expression. */
5898 temp = expand_normal (arg);
5900 /* Check if the back end provides an insn that handles signbit for the
5901 argument's mode. */
5902 icode = signbit_optab->handlers [(int) fmode].insn_code;
5903 if (icode != CODE_FOR_nothing)
5905 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5906 emit_unop_insn (icode, target, temp, UNKNOWN);
5907 return target;
5910 /* For floating point formats without a sign bit, implement signbit
5911 as "ARG < 0.0". */
5912 bitpos = fmt->signbit_ro;
5913 if (bitpos < 0)
5915 /* But we can't do this if the format supports signed zero. */
5916 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5917 return NULL_RTX;
5919 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5920 build_real (TREE_TYPE (arg), dconst0));
5921 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5924 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5926 imode = int_mode_for_mode (fmode);
5927 if (imode == BLKmode)
5928 return NULL_RTX;
5929 temp = gen_lowpart (imode, temp);
5931 else
5933 imode = word_mode;
5934 /* Handle targets with different FP word orders. */
5935 if (FLOAT_WORDS_BIG_ENDIAN)
5936 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5937 else
5938 word = bitpos / BITS_PER_WORD;
5939 temp = operand_subword_force (temp, word, fmode);
5940 bitpos = bitpos % BITS_PER_WORD;
5943 /* Force the intermediate word_mode (or narrower) result into a
5944 register. This avoids attempting to create paradoxical SUBREGs
5945 of floating point modes below. */
5946 temp = force_reg (imode, temp);
5948 /* If the bitpos is within the "result mode" lowpart, the operation
5949 can be implement with a single bitwise AND. Otherwise, we need
5950 a right shift and an AND. */
5952 if (bitpos < GET_MODE_BITSIZE (rmode))
5954 if (bitpos < HOST_BITS_PER_WIDE_INT)
5956 hi = 0;
5957 lo = (HOST_WIDE_INT) 1 << bitpos;
5959 else
5961 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5962 lo = 0;
5965 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5966 temp = gen_lowpart (rmode, temp);
5967 temp = expand_binop (rmode, and_optab, temp,
5968 immed_double_const (lo, hi, rmode),
5969 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5971 else
5973 /* Perform a logical right shift to place the signbit in the least
5974 significant bit, then truncate the result to the desired mode
5975 and mask just this bit. */
5976 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5977 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5978 temp = gen_lowpart (rmode, temp);
5979 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5980 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5983 return temp;
5986 /* Expand fork or exec calls. TARGET is the desired target of the
5987 call. EXP is the call. FN is the
5988 identificator of the actual function. IGNORE is nonzero if the
5989 value is to be ignored. */
5991 static rtx
5992 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5994 tree id, decl;
5995 tree call;
5997 /* If we are not profiling, just call the function. */
5998 if (!profile_arc_flag)
5999 return NULL_RTX;
6001 /* Otherwise call the wrapper. This should be equivalent for the rest of
6002 compiler, so the code does not diverge, and the wrapper may run the
6003 code necessary for keeping the profiling sane. */
6005 switch (DECL_FUNCTION_CODE (fn))
6007 case BUILT_IN_FORK:
6008 id = get_identifier ("__gcov_fork");
6009 break;
6011 case BUILT_IN_EXECL:
6012 id = get_identifier ("__gcov_execl");
6013 break;
6015 case BUILT_IN_EXECV:
6016 id = get_identifier ("__gcov_execv");
6017 break;
6019 case BUILT_IN_EXECLP:
6020 id = get_identifier ("__gcov_execlp");
6021 break;
6023 case BUILT_IN_EXECLE:
6024 id = get_identifier ("__gcov_execle");
6025 break;
6027 case BUILT_IN_EXECVP:
6028 id = get_identifier ("__gcov_execvp");
6029 break;
6031 case BUILT_IN_EXECVE:
6032 id = get_identifier ("__gcov_execve");
6033 break;
6035 default:
6036 gcc_unreachable ();
6039 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6040 FUNCTION_DECL, id, TREE_TYPE (fn));
6041 DECL_EXTERNAL (decl) = 1;
6042 TREE_PUBLIC (decl) = 1;
6043 DECL_ARTIFICIAL (decl) = 1;
6044 TREE_NOTHROW (decl) = 1;
6045 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6046 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6047 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6048 return expand_call (call, target, ignore);
6053 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6054 the pointer in these functions is void*, the tree optimizers may remove
6055 casts. The mode computed in expand_builtin isn't reliable either, due
6056 to __sync_bool_compare_and_swap.
6058 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6059 group of builtins. This gives us log2 of the mode size. */
6061 static inline enum machine_mode
6062 get_builtin_sync_mode (int fcode_diff)
6064 /* The size is not negotiable, so ask not to get BLKmode in return
6065 if the target indicates that a smaller size would be better. */
6066 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6069 /* Expand the memory expression LOC and return the appropriate memory operand
6070 for the builtin_sync operations. */
6072 static rtx
6073 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6075 rtx addr, mem;
6077 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6079 /* Note that we explicitly do not want any alias information for this
6080 memory, so that we kill all other live memories. Otherwise we don't
6081 satisfy the full barrier semantics of the intrinsic. */
6082 mem = validize_mem (gen_rtx_MEM (mode, addr));
6084 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6085 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6086 MEM_VOLATILE_P (mem) = 1;
6088 return mem;
6091 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6092 EXP is the CALL_EXPR. CODE is the rtx code
6093 that corresponds to the arithmetic or logical operation from the name;
6094 an exception here is that NOT actually means NAND. TARGET is an optional
6095 place for us to store the results; AFTER is true if this is the
6096 fetch_and_xxx form. IGNORE is true if we don't actually care about
6097 the result of the operation at all. */
6099 static rtx
6100 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6101 enum rtx_code code, bool after,
6102 rtx target, bool ignore)
6104 rtx val, mem;
6105 enum machine_mode old_mode;
6106 location_t loc = EXPR_LOCATION (exp);
6108 if (code == NOT && warn_sync_nand)
6110 tree fndecl = get_callee_fndecl (exp);
6111 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6113 static bool warned_f_a_n, warned_n_a_f;
6115 switch (fcode)
6117 case BUILT_IN_FETCH_AND_NAND_1:
6118 case BUILT_IN_FETCH_AND_NAND_2:
6119 case BUILT_IN_FETCH_AND_NAND_4:
6120 case BUILT_IN_FETCH_AND_NAND_8:
6121 case BUILT_IN_FETCH_AND_NAND_16:
6123 if (warned_f_a_n)
6124 break;
6126 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6127 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6128 warned_f_a_n = true;
6129 break;
6131 case BUILT_IN_NAND_AND_FETCH_1:
6132 case BUILT_IN_NAND_AND_FETCH_2:
6133 case BUILT_IN_NAND_AND_FETCH_4:
6134 case BUILT_IN_NAND_AND_FETCH_8:
6135 case BUILT_IN_NAND_AND_FETCH_16:
6137 if (warned_n_a_f)
6138 break;
6140 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6141 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6142 warned_n_a_f = true;
6143 break;
6145 default:
6146 gcc_unreachable ();
6150 /* Expand the operands. */
6151 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6153 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6154 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6155 of CONST_INTs, where we know the old_mode only from the call argument. */
6156 old_mode = GET_MODE (val);
6157 if (old_mode == VOIDmode)
6158 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6159 val = convert_modes (mode, old_mode, val, 1);
6161 if (ignore)
6162 return expand_sync_operation (mem, val, code);
6163 else
6164 return expand_sync_fetch_operation (mem, val, code, after, target);
6167 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6168 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6169 true if this is the boolean form. TARGET is a place for us to store the
6170 results; this is NOT optional if IS_BOOL is true. */
6172 static rtx
6173 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6174 bool is_bool, rtx target)
6176 rtx old_val, new_val, mem;
6177 enum machine_mode old_mode;
6179 /* Expand the operands. */
6180 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6183 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6184 mode, EXPAND_NORMAL);
6185 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6186 of CONST_INTs, where we know the old_mode only from the call argument. */
6187 old_mode = GET_MODE (old_val);
6188 if (old_mode == VOIDmode)
6189 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6190 old_val = convert_modes (mode, old_mode, old_val, 1);
6192 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6193 mode, EXPAND_NORMAL);
6194 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6195 of CONST_INTs, where we know the old_mode only from the call argument. */
6196 old_mode = GET_MODE (new_val);
6197 if (old_mode == VOIDmode)
6198 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6199 new_val = convert_modes (mode, old_mode, new_val, 1);
6201 if (is_bool)
6202 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6203 else
6204 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6207 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6208 general form is actually an atomic exchange, and some targets only
6209 support a reduced form with the second argument being a constant 1.
6210 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6211 the results. */
6213 static rtx
6214 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6215 rtx target)
6217 rtx val, mem;
6218 enum machine_mode old_mode;
6220 /* Expand the operands. */
6221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6222 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6223 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6224 of CONST_INTs, where we know the old_mode only from the call argument. */
6225 old_mode = GET_MODE (val);
6226 if (old_mode == VOIDmode)
6227 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6228 val = convert_modes (mode, old_mode, val, 1);
6230 return expand_sync_lock_test_and_set (mem, val, target);
6233 /* Expand the __sync_synchronize intrinsic. */
6235 static void
6236 expand_builtin_synchronize (void)
6238 gimple x;
6240 #ifdef HAVE_memory_barrier
6241 if (HAVE_memory_barrier)
6243 emit_insn (gen_memory_barrier ());
6244 return;
6246 #endif
6248 if (synchronize_libfunc != NULL_RTX)
6250 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6251 return;
6254 /* If no explicit memory barrier instruction is available, create an
6255 empty asm stmt with a memory clobber. */
6256 x = gimple_build_asm ("", 0, 0, 1,
6257 tree_cons (NULL, build_string (6, "memory"), NULL));
6258 gimple_asm_set_volatile (x, true);
6259 expand_asm_stmt (x);
6262 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6264 static void
6265 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6267 enum insn_code icode;
6268 rtx mem, insn;
6269 rtx val = const0_rtx;
6271 /* Expand the operands. */
6272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6274 /* If there is an explicit operation in the md file, use it. */
6275 icode = sync_lock_release[mode];
6276 if (icode != CODE_FOR_nothing)
6278 if (!insn_data[icode].operand[1].predicate (val, mode))
6279 val = force_reg (mode, val);
6281 insn = GEN_FCN (icode) (mem, val);
6282 if (insn)
6284 emit_insn (insn);
6285 return;
6289 /* Otherwise we can implement this operation by emitting a barrier
6290 followed by a store of zero. */
6291 expand_builtin_synchronize ();
6292 emit_move_insn (mem, val);
6295 /* Expand an expression EXP that calls a built-in function,
6296 with result going to TARGET if that's convenient
6297 (and in mode MODE if that's convenient).
6298 SUBTARGET may be used as the target for computing one of EXP's operands.
6299 IGNORE is nonzero if the value is to be ignored. */
6302 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6303 int ignore)
6305 tree fndecl = get_callee_fndecl (exp);
6306 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6307 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6309 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6310 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6312 /* When not optimizing, generate calls to library functions for a certain
6313 set of builtins. */
6314 if (!optimize
6315 && !called_as_built_in (fndecl)
6316 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6317 && fcode != BUILT_IN_ALLOCA
6318 && fcode != BUILT_IN_FREE)
6319 return expand_call (exp, target, ignore);
6321 /* The built-in function expanders test for target == const0_rtx
6322 to determine whether the function's result will be ignored. */
6323 if (ignore)
6324 target = const0_rtx;
6326 /* If the result of a pure or const built-in function is ignored, and
6327 none of its arguments are volatile, we can avoid expanding the
6328 built-in call and just evaluate the arguments for side-effects. */
6329 if (target == const0_rtx
6330 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6332 bool volatilep = false;
6333 tree arg;
6334 call_expr_arg_iterator iter;
6336 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6337 if (TREE_THIS_VOLATILE (arg))
6339 volatilep = true;
6340 break;
6343 if (! volatilep)
6345 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6346 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6347 return const0_rtx;
6351 switch (fcode)
6353 CASE_FLT_FN (BUILT_IN_FABS):
6354 target = expand_builtin_fabs (exp, target, subtarget);
6355 if (target)
6356 return target;
6357 break;
6359 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6360 target = expand_builtin_copysign (exp, target, subtarget);
6361 if (target)
6362 return target;
6363 break;
6365 /* Just do a normal library call if we were unable to fold
6366 the values. */
6367 CASE_FLT_FN (BUILT_IN_CABS):
6368 break;
6370 CASE_FLT_FN (BUILT_IN_EXP):
6371 CASE_FLT_FN (BUILT_IN_EXP10):
6372 CASE_FLT_FN (BUILT_IN_POW10):
6373 CASE_FLT_FN (BUILT_IN_EXP2):
6374 CASE_FLT_FN (BUILT_IN_EXPM1):
6375 CASE_FLT_FN (BUILT_IN_LOGB):
6376 CASE_FLT_FN (BUILT_IN_LOG):
6377 CASE_FLT_FN (BUILT_IN_LOG10):
6378 CASE_FLT_FN (BUILT_IN_LOG2):
6379 CASE_FLT_FN (BUILT_IN_LOG1P):
6380 CASE_FLT_FN (BUILT_IN_TAN):
6381 CASE_FLT_FN (BUILT_IN_ASIN):
6382 CASE_FLT_FN (BUILT_IN_ACOS):
6383 CASE_FLT_FN (BUILT_IN_ATAN):
6384 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6385 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6386 because of possible accuracy problems. */
6387 if (! flag_unsafe_math_optimizations)
6388 break;
6389 CASE_FLT_FN (BUILT_IN_SQRT):
6390 CASE_FLT_FN (BUILT_IN_FLOOR):
6391 CASE_FLT_FN (BUILT_IN_CEIL):
6392 CASE_FLT_FN (BUILT_IN_TRUNC):
6393 CASE_FLT_FN (BUILT_IN_ROUND):
6394 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6395 CASE_FLT_FN (BUILT_IN_RINT):
6396 target = expand_builtin_mathfn (exp, target, subtarget);
6397 if (target)
6398 return target;
6399 break;
6401 CASE_FLT_FN (BUILT_IN_ILOGB):
6402 if (! flag_unsafe_math_optimizations)
6403 break;
6404 CASE_FLT_FN (BUILT_IN_ISINF):
6405 CASE_FLT_FN (BUILT_IN_FINITE):
6406 case BUILT_IN_ISFINITE:
6407 case BUILT_IN_ISNORMAL:
6408 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6409 if (target)
6410 return target;
6411 break;
6413 CASE_FLT_FN (BUILT_IN_LCEIL):
6414 CASE_FLT_FN (BUILT_IN_LLCEIL):
6415 CASE_FLT_FN (BUILT_IN_LFLOOR):
6416 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6417 target = expand_builtin_int_roundingfn (exp, target);
6418 if (target)
6419 return target;
6420 break;
6422 CASE_FLT_FN (BUILT_IN_LRINT):
6423 CASE_FLT_FN (BUILT_IN_LLRINT):
6424 CASE_FLT_FN (BUILT_IN_LROUND):
6425 CASE_FLT_FN (BUILT_IN_LLROUND):
6426 target = expand_builtin_int_roundingfn_2 (exp, target);
6427 if (target)
6428 return target;
6429 break;
6431 CASE_FLT_FN (BUILT_IN_POW):
6432 target = expand_builtin_pow (exp, target, subtarget);
6433 if (target)
6434 return target;
6435 break;
6437 CASE_FLT_FN (BUILT_IN_POWI):
6438 target = expand_builtin_powi (exp, target, subtarget);
6439 if (target)
6440 return target;
6441 break;
6443 CASE_FLT_FN (BUILT_IN_ATAN2):
6444 CASE_FLT_FN (BUILT_IN_LDEXP):
6445 CASE_FLT_FN (BUILT_IN_SCALB):
6446 CASE_FLT_FN (BUILT_IN_SCALBN):
6447 CASE_FLT_FN (BUILT_IN_SCALBLN):
6448 if (! flag_unsafe_math_optimizations)
6449 break;
6451 CASE_FLT_FN (BUILT_IN_FMOD):
6452 CASE_FLT_FN (BUILT_IN_REMAINDER):
6453 CASE_FLT_FN (BUILT_IN_DREM):
6454 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6455 if (target)
6456 return target;
6457 break;
6459 CASE_FLT_FN (BUILT_IN_CEXPI):
6460 target = expand_builtin_cexpi (exp, target, subtarget);
6461 gcc_assert (target);
6462 return target;
6464 CASE_FLT_FN (BUILT_IN_SIN):
6465 CASE_FLT_FN (BUILT_IN_COS):
6466 if (! flag_unsafe_math_optimizations)
6467 break;
6468 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6469 if (target)
6470 return target;
6471 break;
6473 CASE_FLT_FN (BUILT_IN_SINCOS):
6474 if (! flag_unsafe_math_optimizations)
6475 break;
6476 target = expand_builtin_sincos (exp);
6477 if (target)
6478 return target;
6479 break;
6481 case BUILT_IN_APPLY_ARGS:
6482 return expand_builtin_apply_args ();
6484 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6485 FUNCTION with a copy of the parameters described by
6486 ARGUMENTS, and ARGSIZE. It returns a block of memory
6487 allocated on the stack into which is stored all the registers
6488 that might possibly be used for returning the result of a
6489 function. ARGUMENTS is the value returned by
6490 __builtin_apply_args. ARGSIZE is the number of bytes of
6491 arguments that must be copied. ??? How should this value be
6492 computed? We'll also need a safe worst case value for varargs
6493 functions. */
6494 case BUILT_IN_APPLY:
6495 if (!validate_arglist (exp, POINTER_TYPE,
6496 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6497 && !validate_arglist (exp, REFERENCE_TYPE,
6498 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6499 return const0_rtx;
6500 else
6502 rtx ops[3];
6504 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6505 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6506 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6508 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6511 /* __builtin_return (RESULT) causes the function to return the
6512 value described by RESULT. RESULT is address of the block of
6513 memory returned by __builtin_apply. */
6514 case BUILT_IN_RETURN:
6515 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6516 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6517 return const0_rtx;
6519 case BUILT_IN_SAVEREGS:
6520 return expand_builtin_saveregs ();
6522 case BUILT_IN_ARGS_INFO:
6523 return expand_builtin_args_info (exp);
6525 case BUILT_IN_VA_ARG_PACK:
6526 /* All valid uses of __builtin_va_arg_pack () are removed during
6527 inlining. */
6528 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6529 return const0_rtx;
6531 case BUILT_IN_VA_ARG_PACK_LEN:
6532 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6533 inlining. */
6534 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6535 return const0_rtx;
6537 /* Return the address of the first anonymous stack arg. */
6538 case BUILT_IN_NEXT_ARG:
6539 if (fold_builtin_next_arg (exp, false))
6540 return const0_rtx;
6541 return expand_builtin_next_arg ();
6543 case BUILT_IN_CLEAR_CACHE:
6544 target = expand_builtin___clear_cache (exp);
6545 if (target)
6546 return target;
6547 break;
6549 case BUILT_IN_CLASSIFY_TYPE:
6550 return expand_builtin_classify_type (exp);
6552 case BUILT_IN_CONSTANT_P:
6553 return const0_rtx;
6555 case BUILT_IN_FRAME_ADDRESS:
6556 case BUILT_IN_RETURN_ADDRESS:
6557 return expand_builtin_frame_address (fndecl, exp);
6559 /* Returns the address of the area where the structure is returned.
6560 0 otherwise. */
6561 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6562 if (call_expr_nargs (exp) != 0
6563 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6564 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6565 return const0_rtx;
6566 else
6567 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6569 case BUILT_IN_ALLOCA:
6570 target = expand_builtin_alloca (exp, target);
6571 if (target)
6572 return target;
6573 break;
6575 case BUILT_IN_STACK_SAVE:
6576 return expand_stack_save ();
6578 case BUILT_IN_STACK_RESTORE:
6579 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6580 return const0_rtx;
6582 case BUILT_IN_BSWAP32:
6583 case BUILT_IN_BSWAP64:
6584 target = expand_builtin_bswap (exp, target, subtarget);
6586 if (target)
6587 return target;
6588 break;
6590 CASE_INT_FN (BUILT_IN_FFS):
6591 case BUILT_IN_FFSIMAX:
6592 target = expand_builtin_unop (target_mode, exp, target,
6593 subtarget, ffs_optab);
6594 if (target)
6595 return target;
6596 break;
6598 CASE_INT_FN (BUILT_IN_CLZ):
6599 case BUILT_IN_CLZIMAX:
6600 target = expand_builtin_unop (target_mode, exp, target,
6601 subtarget, clz_optab);
6602 if (target)
6603 return target;
6604 break;
6606 CASE_INT_FN (BUILT_IN_CTZ):
6607 case BUILT_IN_CTZIMAX:
6608 target = expand_builtin_unop (target_mode, exp, target,
6609 subtarget, ctz_optab);
6610 if (target)
6611 return target;
6612 break;
6614 CASE_INT_FN (BUILT_IN_POPCOUNT):
6615 case BUILT_IN_POPCOUNTIMAX:
6616 target = expand_builtin_unop (target_mode, exp, target,
6617 subtarget, popcount_optab);
6618 if (target)
6619 return target;
6620 break;
6622 CASE_INT_FN (BUILT_IN_PARITY):
6623 case BUILT_IN_PARITYIMAX:
6624 target = expand_builtin_unop (target_mode, exp, target,
6625 subtarget, parity_optab);
6626 if (target)
6627 return target;
6628 break;
6630 case BUILT_IN_STRLEN:
6631 target = expand_builtin_strlen (exp, target, target_mode);
6632 if (target)
6633 return target;
6634 break;
6636 case BUILT_IN_STRCPY:
6637 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6638 if (target)
6639 return target;
6640 break;
6642 case BUILT_IN_STRNCPY:
6643 target = expand_builtin_strncpy (exp, target, mode);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_STPCPY:
6649 target = expand_builtin_stpcpy (exp, target, mode);
6650 if (target)
6651 return target;
6652 break;
6654 case BUILT_IN_STRCAT:
6655 target = expand_builtin_strcat (fndecl, exp, target, mode);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_STRNCAT:
6661 target = expand_builtin_strncat (exp, target, mode);
6662 if (target)
6663 return target;
6664 break;
6666 case BUILT_IN_STRSPN:
6667 target = expand_builtin_strspn (exp, target, mode);
6668 if (target)
6669 return target;
6670 break;
6672 case BUILT_IN_STRCSPN:
6673 target = expand_builtin_strcspn (exp, target, mode);
6674 if (target)
6675 return target;
6676 break;
6678 case BUILT_IN_STRSTR:
6679 target = expand_builtin_strstr (exp, target, mode);
6680 if (target)
6681 return target;
6682 break;
6684 case BUILT_IN_STRPBRK:
6685 target = expand_builtin_strpbrk (exp, target, mode);
6686 if (target)
6687 return target;
6688 break;
6690 case BUILT_IN_INDEX:
6691 case BUILT_IN_STRCHR:
6692 target = expand_builtin_strchr (exp, target, mode);
6693 if (target)
6694 return target;
6695 break;
6697 case BUILT_IN_RINDEX:
6698 case BUILT_IN_STRRCHR:
6699 target = expand_builtin_strrchr (exp, target, mode);
6700 if (target)
6701 return target;
6702 break;
6704 case BUILT_IN_MEMCPY:
6705 target = expand_builtin_memcpy (exp, target, mode);
6706 if (target)
6707 return target;
6708 break;
6710 case BUILT_IN_MEMPCPY:
6711 target = expand_builtin_mempcpy (exp, target, mode);
6712 if (target)
6713 return target;
6714 break;
6716 case BUILT_IN_MEMMOVE:
6717 target = expand_builtin_memmove (exp, target, mode, ignore);
6718 if (target)
6719 return target;
6720 break;
6722 case BUILT_IN_BCOPY:
6723 target = expand_builtin_bcopy (exp, ignore);
6724 if (target)
6725 return target;
6726 break;
6728 case BUILT_IN_MEMSET:
6729 target = expand_builtin_memset (exp, target, mode);
6730 if (target)
6731 return target;
6732 break;
6734 case BUILT_IN_BZERO:
6735 target = expand_builtin_bzero (exp);
6736 if (target)
6737 return target;
6738 break;
6740 case BUILT_IN_STRCMP:
6741 target = expand_builtin_strcmp (exp, target, mode);
6742 if (target)
6743 return target;
6744 break;
6746 case BUILT_IN_STRNCMP:
6747 target = expand_builtin_strncmp (exp, target, mode);
6748 if (target)
6749 return target;
6750 break;
6752 case BUILT_IN_MEMCHR:
6753 target = expand_builtin_memchr (exp, target, mode);
6754 if (target)
6755 return target;
6756 break;
6758 case BUILT_IN_BCMP:
6759 case BUILT_IN_MEMCMP:
6760 target = expand_builtin_memcmp (exp, target, mode);
6761 if (target)
6762 return target;
6763 break;
6765 case BUILT_IN_SETJMP:
6766 /* This should have been lowered to the builtins below. */
6767 gcc_unreachable ();
6769 case BUILT_IN_SETJMP_SETUP:
6770 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6771 and the receiver label. */
6772 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6774 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6775 VOIDmode, EXPAND_NORMAL);
6776 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6777 rtx label_r = label_rtx (label);
6779 /* This is copied from the handling of non-local gotos. */
6780 expand_builtin_setjmp_setup (buf_addr, label_r);
6781 nonlocal_goto_handler_labels
6782 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6783 nonlocal_goto_handler_labels);
6784 /* ??? Do not let expand_label treat us as such since we would
6785 not want to be both on the list of non-local labels and on
6786 the list of forced labels. */
6787 FORCED_LABEL (label) = 0;
6788 return const0_rtx;
6790 break;
6792 case BUILT_IN_SETJMP_DISPATCHER:
6793 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6794 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6796 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6797 rtx label_r = label_rtx (label);
6799 /* Remove the dispatcher label from the list of non-local labels
6800 since the receiver labels have been added to it above. */
6801 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6802 return const0_rtx;
6804 break;
6806 case BUILT_IN_SETJMP_RECEIVER:
6807 /* __builtin_setjmp_receiver is passed the receiver label. */
6808 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6810 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6811 rtx label_r = label_rtx (label);
6813 expand_builtin_setjmp_receiver (label_r);
6814 return const0_rtx;
6816 break;
6818 /* __builtin_longjmp is passed a pointer to an array of five words.
6819 It's similar to the C library longjmp function but works with
6820 __builtin_setjmp above. */
6821 case BUILT_IN_LONGJMP:
6822 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6824 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6825 VOIDmode, EXPAND_NORMAL);
6826 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6828 if (value != const1_rtx)
6830 error ("%<__builtin_longjmp%> second argument must be 1");
6831 return const0_rtx;
6834 expand_builtin_longjmp (buf_addr, value);
6835 return const0_rtx;
6837 break;
6839 case BUILT_IN_NONLOCAL_GOTO:
6840 target = expand_builtin_nonlocal_goto (exp);
6841 if (target)
6842 return target;
6843 break;
6845 /* This updates the setjmp buffer that is its argument with the value
6846 of the current stack pointer. */
6847 case BUILT_IN_UPDATE_SETJMP_BUF:
6848 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6850 rtx buf_addr
6851 = expand_normal (CALL_EXPR_ARG (exp, 0));
6853 expand_builtin_update_setjmp_buf (buf_addr);
6854 return const0_rtx;
6856 break;
6858 case BUILT_IN_TRAP:
6859 expand_builtin_trap ();
6860 return const0_rtx;
6862 case BUILT_IN_UNREACHABLE:
6863 expand_builtin_unreachable ();
6864 return const0_rtx;
6866 case BUILT_IN_PRINTF:
6867 target = expand_builtin_printf (exp, target, mode, false);
6868 if (target)
6869 return target;
6870 break;
6872 case BUILT_IN_PRINTF_UNLOCKED:
6873 target = expand_builtin_printf (exp, target, mode, true);
6874 if (target)
6875 return target;
6876 break;
6878 case BUILT_IN_FPUTS:
6879 target = expand_builtin_fputs (exp, target, false);
6880 if (target)
6881 return target;
6882 break;
6883 case BUILT_IN_FPUTS_UNLOCKED:
6884 target = expand_builtin_fputs (exp, target, true);
6885 if (target)
6886 return target;
6887 break;
6889 case BUILT_IN_FPRINTF:
6890 target = expand_builtin_fprintf (exp, target, mode, false);
6891 if (target)
6892 return target;
6893 break;
6895 case BUILT_IN_FPRINTF_UNLOCKED:
6896 target = expand_builtin_fprintf (exp, target, mode, true);
6897 if (target)
6898 return target;
6899 break;
6901 case BUILT_IN_SPRINTF:
6902 target = expand_builtin_sprintf (exp, target, mode);
6903 if (target)
6904 return target;
6905 break;
6907 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6908 case BUILT_IN_SIGNBITD32:
6909 case BUILT_IN_SIGNBITD64:
6910 case BUILT_IN_SIGNBITD128:
6911 target = expand_builtin_signbit (exp, target);
6912 if (target)
6913 return target;
6914 break;
6916 /* Various hooks for the DWARF 2 __throw routine. */
6917 case BUILT_IN_UNWIND_INIT:
6918 expand_builtin_unwind_init ();
6919 return const0_rtx;
6920 case BUILT_IN_DWARF_CFA:
6921 return virtual_cfa_rtx;
6922 #ifdef DWARF2_UNWIND_INFO
6923 case BUILT_IN_DWARF_SP_COLUMN:
6924 return expand_builtin_dwarf_sp_column ();
6925 case BUILT_IN_INIT_DWARF_REG_SIZES:
6926 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6927 return const0_rtx;
6928 #endif
6929 case BUILT_IN_FROB_RETURN_ADDR:
6930 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6931 case BUILT_IN_EXTRACT_RETURN_ADDR:
6932 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6933 case BUILT_IN_EH_RETURN:
6934 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6935 CALL_EXPR_ARG (exp, 1));
6936 return const0_rtx;
6937 #ifdef EH_RETURN_DATA_REGNO
6938 case BUILT_IN_EH_RETURN_DATA_REGNO:
6939 return expand_builtin_eh_return_data_regno (exp);
6940 #endif
6941 case BUILT_IN_EXTEND_POINTER:
6942 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6944 case BUILT_IN_VA_START:
6945 return expand_builtin_va_start (exp);
6946 case BUILT_IN_VA_END:
6947 return expand_builtin_va_end (exp);
6948 case BUILT_IN_VA_COPY:
6949 return expand_builtin_va_copy (exp);
6950 case BUILT_IN_EXPECT:
6951 return expand_builtin_expect (exp, target);
6952 case BUILT_IN_PREFETCH:
6953 expand_builtin_prefetch (exp);
6954 return const0_rtx;
6956 case BUILT_IN_PROFILE_FUNC_ENTER:
6957 return expand_builtin_profile_func (false);
6958 case BUILT_IN_PROFILE_FUNC_EXIT:
6959 return expand_builtin_profile_func (true);
6961 case BUILT_IN_INIT_TRAMPOLINE:
6962 return expand_builtin_init_trampoline (exp);
6963 case BUILT_IN_ADJUST_TRAMPOLINE:
6964 return expand_builtin_adjust_trampoline (exp);
6966 case BUILT_IN_FORK:
6967 case BUILT_IN_EXECL:
6968 case BUILT_IN_EXECV:
6969 case BUILT_IN_EXECLP:
6970 case BUILT_IN_EXECLE:
6971 case BUILT_IN_EXECVP:
6972 case BUILT_IN_EXECVE:
6973 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6974 if (target)
6975 return target;
6976 break;
6978 case BUILT_IN_FETCH_AND_ADD_1:
6979 case BUILT_IN_FETCH_AND_ADD_2:
6980 case BUILT_IN_FETCH_AND_ADD_4:
6981 case BUILT_IN_FETCH_AND_ADD_8:
6982 case BUILT_IN_FETCH_AND_ADD_16:
6983 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6984 target = expand_builtin_sync_operation (mode, exp, PLUS,
6985 false, target, ignore);
6986 if (target)
6987 return target;
6988 break;
6990 case BUILT_IN_FETCH_AND_SUB_1:
6991 case BUILT_IN_FETCH_AND_SUB_2:
6992 case BUILT_IN_FETCH_AND_SUB_4:
6993 case BUILT_IN_FETCH_AND_SUB_8:
6994 case BUILT_IN_FETCH_AND_SUB_16:
6995 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6996 target = expand_builtin_sync_operation (mode, exp, MINUS,
6997 false, target, ignore);
6998 if (target)
6999 return target;
7000 break;
7002 case BUILT_IN_FETCH_AND_OR_1:
7003 case BUILT_IN_FETCH_AND_OR_2:
7004 case BUILT_IN_FETCH_AND_OR_4:
7005 case BUILT_IN_FETCH_AND_OR_8:
7006 case BUILT_IN_FETCH_AND_OR_16:
7007 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
7008 target = expand_builtin_sync_operation (mode, exp, IOR,
7009 false, target, ignore);
7010 if (target)
7011 return target;
7012 break;
7014 case BUILT_IN_FETCH_AND_AND_1:
7015 case BUILT_IN_FETCH_AND_AND_2:
7016 case BUILT_IN_FETCH_AND_AND_4:
7017 case BUILT_IN_FETCH_AND_AND_8:
7018 case BUILT_IN_FETCH_AND_AND_16:
7019 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
7020 target = expand_builtin_sync_operation (mode, exp, AND,
7021 false, target, ignore);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_FETCH_AND_XOR_1:
7027 case BUILT_IN_FETCH_AND_XOR_2:
7028 case BUILT_IN_FETCH_AND_XOR_4:
7029 case BUILT_IN_FETCH_AND_XOR_8:
7030 case BUILT_IN_FETCH_AND_XOR_16:
7031 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
7032 target = expand_builtin_sync_operation (mode, exp, XOR,
7033 false, target, ignore);
7034 if (target)
7035 return target;
7036 break;
7038 case BUILT_IN_FETCH_AND_NAND_1:
7039 case BUILT_IN_FETCH_AND_NAND_2:
7040 case BUILT_IN_FETCH_AND_NAND_4:
7041 case BUILT_IN_FETCH_AND_NAND_8:
7042 case BUILT_IN_FETCH_AND_NAND_16:
7043 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
7044 target = expand_builtin_sync_operation (mode, exp, NOT,
7045 false, target, ignore);
7046 if (target)
7047 return target;
7048 break;
7050 case BUILT_IN_ADD_AND_FETCH_1:
7051 case BUILT_IN_ADD_AND_FETCH_2:
7052 case BUILT_IN_ADD_AND_FETCH_4:
7053 case BUILT_IN_ADD_AND_FETCH_8:
7054 case BUILT_IN_ADD_AND_FETCH_16:
7055 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7056 target = expand_builtin_sync_operation (mode, exp, PLUS,
7057 true, target, ignore);
7058 if (target)
7059 return target;
7060 break;
7062 case BUILT_IN_SUB_AND_FETCH_1:
7063 case BUILT_IN_SUB_AND_FETCH_2:
7064 case BUILT_IN_SUB_AND_FETCH_4:
7065 case BUILT_IN_SUB_AND_FETCH_8:
7066 case BUILT_IN_SUB_AND_FETCH_16:
7067 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7068 target = expand_builtin_sync_operation (mode, exp, MINUS,
7069 true, target, ignore);
7070 if (target)
7071 return target;
7072 break;
7074 case BUILT_IN_OR_AND_FETCH_1:
7075 case BUILT_IN_OR_AND_FETCH_2:
7076 case BUILT_IN_OR_AND_FETCH_4:
7077 case BUILT_IN_OR_AND_FETCH_8:
7078 case BUILT_IN_OR_AND_FETCH_16:
7079 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7080 target = expand_builtin_sync_operation (mode, exp, IOR,
7081 true, target, ignore);
7082 if (target)
7083 return target;
7084 break;
7086 case BUILT_IN_AND_AND_FETCH_1:
7087 case BUILT_IN_AND_AND_FETCH_2:
7088 case BUILT_IN_AND_AND_FETCH_4:
7089 case BUILT_IN_AND_AND_FETCH_8:
7090 case BUILT_IN_AND_AND_FETCH_16:
7091 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7092 target = expand_builtin_sync_operation (mode, exp, AND,
7093 true, target, ignore);
7094 if (target)
7095 return target;
7096 break;
7098 case BUILT_IN_XOR_AND_FETCH_1:
7099 case BUILT_IN_XOR_AND_FETCH_2:
7100 case BUILT_IN_XOR_AND_FETCH_4:
7101 case BUILT_IN_XOR_AND_FETCH_8:
7102 case BUILT_IN_XOR_AND_FETCH_16:
7103 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7104 target = expand_builtin_sync_operation (mode, exp, XOR,
7105 true, target, ignore);
7106 if (target)
7107 return target;
7108 break;
7110 case BUILT_IN_NAND_AND_FETCH_1:
7111 case BUILT_IN_NAND_AND_FETCH_2:
7112 case BUILT_IN_NAND_AND_FETCH_4:
7113 case BUILT_IN_NAND_AND_FETCH_8:
7114 case BUILT_IN_NAND_AND_FETCH_16:
7115 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7116 target = expand_builtin_sync_operation (mode, exp, NOT,
7117 true, target, ignore);
7118 if (target)
7119 return target;
7120 break;
7122 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7123 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7124 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7125 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7126 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7127 if (mode == VOIDmode)
7128 mode = TYPE_MODE (boolean_type_node);
7129 if (!target || !register_operand (target, mode))
7130 target = gen_reg_rtx (mode);
7132 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7133 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7134 if (target)
7135 return target;
7136 break;
7138 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7139 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7140 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7141 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7142 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7143 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7144 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7145 if (target)
7146 return target;
7147 break;
7149 case BUILT_IN_LOCK_TEST_AND_SET_1:
7150 case BUILT_IN_LOCK_TEST_AND_SET_2:
7151 case BUILT_IN_LOCK_TEST_AND_SET_4:
7152 case BUILT_IN_LOCK_TEST_AND_SET_8:
7153 case BUILT_IN_LOCK_TEST_AND_SET_16:
7154 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7155 target = expand_builtin_lock_test_and_set (mode, exp, target);
7156 if (target)
7157 return target;
7158 break;
7160 case BUILT_IN_LOCK_RELEASE_1:
7161 case BUILT_IN_LOCK_RELEASE_2:
7162 case BUILT_IN_LOCK_RELEASE_4:
7163 case BUILT_IN_LOCK_RELEASE_8:
7164 case BUILT_IN_LOCK_RELEASE_16:
7165 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7166 expand_builtin_lock_release (mode, exp);
7167 return const0_rtx;
7169 case BUILT_IN_SYNCHRONIZE:
7170 expand_builtin_synchronize ();
7171 return const0_rtx;
7173 case BUILT_IN_OBJECT_SIZE:
7174 return expand_builtin_object_size (exp);
7176 case BUILT_IN_MEMCPY_CHK:
7177 case BUILT_IN_MEMPCPY_CHK:
7178 case BUILT_IN_MEMMOVE_CHK:
7179 case BUILT_IN_MEMSET_CHK:
7180 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7181 if (target)
7182 return target;
7183 break;
7185 case BUILT_IN_STRCPY_CHK:
7186 case BUILT_IN_STPCPY_CHK:
7187 case BUILT_IN_STRNCPY_CHK:
7188 case BUILT_IN_STRCAT_CHK:
7189 case BUILT_IN_STRNCAT_CHK:
7190 case BUILT_IN_SNPRINTF_CHK:
7191 case BUILT_IN_VSNPRINTF_CHK:
7192 maybe_emit_chk_warning (exp, fcode);
7193 break;
7195 case BUILT_IN_SPRINTF_CHK:
7196 case BUILT_IN_VSPRINTF_CHK:
7197 maybe_emit_sprintf_chk_warning (exp, fcode);
7198 break;
7200 case BUILT_IN_FREE:
7201 maybe_emit_free_warning (exp);
7202 break;
7204 default: /* just do library call, if unknown builtin */
7205 break;
7208 /* The switch statement above can drop through to cause the function
7209 to be called normally. */
7210 return expand_call (exp, target, ignore);
7213 /* Determine whether a tree node represents a call to a built-in
7214 function. If the tree T is a call to a built-in function with
7215 the right number of arguments of the appropriate types, return
7216 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7217 Otherwise the return value is END_BUILTINS. */
7219 enum built_in_function
7220 builtin_mathfn_code (const_tree t)
7222 const_tree fndecl, arg, parmlist;
7223 const_tree argtype, parmtype;
7224 const_call_expr_arg_iterator iter;
7226 if (TREE_CODE (t) != CALL_EXPR
7227 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7228 return END_BUILTINS;
7230 fndecl = get_callee_fndecl (t);
7231 if (fndecl == NULL_TREE
7232 || TREE_CODE (fndecl) != FUNCTION_DECL
7233 || ! DECL_BUILT_IN (fndecl)
7234 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7235 return END_BUILTINS;
7237 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7238 init_const_call_expr_arg_iterator (t, &iter);
7239 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7241 /* If a function doesn't take a variable number of arguments,
7242 the last element in the list will have type `void'. */
7243 parmtype = TREE_VALUE (parmlist);
7244 if (VOID_TYPE_P (parmtype))
7246 if (more_const_call_expr_args_p (&iter))
7247 return END_BUILTINS;
7248 return DECL_FUNCTION_CODE (fndecl);
7251 if (! more_const_call_expr_args_p (&iter))
7252 return END_BUILTINS;
7254 arg = next_const_call_expr_arg (&iter);
7255 argtype = TREE_TYPE (arg);
7257 if (SCALAR_FLOAT_TYPE_P (parmtype))
7259 if (! SCALAR_FLOAT_TYPE_P (argtype))
7260 return END_BUILTINS;
7262 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7264 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7265 return END_BUILTINS;
7267 else if (POINTER_TYPE_P (parmtype))
7269 if (! POINTER_TYPE_P (argtype))
7270 return END_BUILTINS;
7272 else if (INTEGRAL_TYPE_P (parmtype))
7274 if (! INTEGRAL_TYPE_P (argtype))
7275 return END_BUILTINS;
7277 else
7278 return END_BUILTINS;
7281 /* Variable-length argument list. */
7282 return DECL_FUNCTION_CODE (fndecl);
7285 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7286 evaluate to a constant. */
7288 static tree
7289 fold_builtin_constant_p (tree arg)
7291 /* We return 1 for a numeric type that's known to be a constant
7292 value at compile-time or for an aggregate type that's a
7293 literal constant. */
7294 STRIP_NOPS (arg);
7296 /* If we know this is a constant, emit the constant of one. */
7297 if (CONSTANT_CLASS_P (arg)
7298 || (TREE_CODE (arg) == CONSTRUCTOR
7299 && TREE_CONSTANT (arg)))
7300 return integer_one_node;
7301 if (TREE_CODE (arg) == ADDR_EXPR)
7303 tree op = TREE_OPERAND (arg, 0);
7304 if (TREE_CODE (op) == STRING_CST
7305 || (TREE_CODE (op) == ARRAY_REF
7306 && integer_zerop (TREE_OPERAND (op, 1))
7307 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7308 return integer_one_node;
7311 /* If this expression has side effects, show we don't know it to be a
7312 constant. Likewise if it's a pointer or aggregate type since in
7313 those case we only want literals, since those are only optimized
7314 when generating RTL, not later.
7315 And finally, if we are compiling an initializer, not code, we
7316 need to return a definite result now; there's not going to be any
7317 more optimization done. */
7318 if (TREE_SIDE_EFFECTS (arg)
7319 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7320 || POINTER_TYPE_P (TREE_TYPE (arg))
7321 || cfun == 0
7322 || folding_initializer)
7323 return integer_zero_node;
7325 return NULL_TREE;
7328 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7329 return it as a truthvalue. */
7331 static tree
7332 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7334 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7336 fn = built_in_decls[BUILT_IN_EXPECT];
7337 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7338 ret_type = TREE_TYPE (TREE_TYPE (fn));
7339 pred_type = TREE_VALUE (arg_types);
7340 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7342 pred = fold_convert_loc (loc, pred_type, pred);
7343 expected = fold_convert_loc (loc, expected_type, expected);
7344 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7346 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7347 build_int_cst (ret_type, 0));
7350 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7351 NULL_TREE if no simplification is possible. */
7353 static tree
7354 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7356 tree inner, fndecl;
7357 enum tree_code code;
7359 /* If this is a builtin_expect within a builtin_expect keep the
7360 inner one. See through a comparison against a constant. It
7361 might have been added to create a thruthvalue. */
7362 inner = arg0;
7363 if (COMPARISON_CLASS_P (inner)
7364 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7365 inner = TREE_OPERAND (inner, 0);
7367 if (TREE_CODE (inner) == CALL_EXPR
7368 && (fndecl = get_callee_fndecl (inner))
7369 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7370 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7371 return arg0;
7373 /* Distribute the expected value over short-circuiting operators.
7374 See through the cast from truthvalue_type_node to long. */
7375 inner = arg0;
7376 while (TREE_CODE (inner) == NOP_EXPR
7377 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7378 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7379 inner = TREE_OPERAND (inner, 0);
7381 code = TREE_CODE (inner);
7382 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7384 tree op0 = TREE_OPERAND (inner, 0);
7385 tree op1 = TREE_OPERAND (inner, 1);
7387 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7388 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7389 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7391 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7394 /* If the argument isn't invariant then there's nothing else we can do. */
7395 if (!TREE_CONSTANT (arg0))
7396 return NULL_TREE;
7398 /* If we expect that a comparison against the argument will fold to
7399 a constant return the constant. In practice, this means a true
7400 constant or the address of a non-weak symbol. */
7401 inner = arg0;
7402 STRIP_NOPS (inner);
7403 if (TREE_CODE (inner) == ADDR_EXPR)
7407 inner = TREE_OPERAND (inner, 0);
7409 while (TREE_CODE (inner) == COMPONENT_REF
7410 || TREE_CODE (inner) == ARRAY_REF);
7411 if ((TREE_CODE (inner) == VAR_DECL
7412 || TREE_CODE (inner) == FUNCTION_DECL)
7413 && DECL_WEAK (inner))
7414 return NULL_TREE;
7417 /* Otherwise, ARG0 already has the proper type for the return value. */
7418 return arg0;
7421 /* Fold a call to __builtin_classify_type with argument ARG. */
7423 static tree
7424 fold_builtin_classify_type (tree arg)
7426 if (arg == 0)
7427 return build_int_cst (NULL_TREE, no_type_class);
7429 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7432 /* Fold a call to __builtin_strlen with argument ARG. */
7434 static tree
7435 fold_builtin_strlen (location_t loc, tree arg)
7437 if (!validate_arg (arg, POINTER_TYPE))
7438 return NULL_TREE;
7439 else
7441 tree len = c_strlen (arg, 0);
7443 if (len)
7445 /* Convert from the internal "sizetype" type to "size_t". */
7446 if (size_type_node)
7447 len = fold_convert_loc (loc, size_type_node, len);
7448 return len;
7451 return NULL_TREE;
7455 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7457 static tree
7458 fold_builtin_inf (location_t loc, tree type, int warn)
7460 REAL_VALUE_TYPE real;
7462 /* __builtin_inff is intended to be usable to define INFINITY on all
7463 targets. If an infinity is not available, INFINITY expands "to a
7464 positive constant of type float that overflows at translation
7465 time", footnote "In this case, using INFINITY will violate the
7466 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7467 Thus we pedwarn to ensure this constraint violation is
7468 diagnosed. */
7469 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7470 pedwarn (loc, 0, "target format does not support infinity");
7472 real_inf (&real);
7473 return build_real (type, real);
7476 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7478 static tree
7479 fold_builtin_nan (tree arg, tree type, int quiet)
7481 REAL_VALUE_TYPE real;
7482 const char *str;
7484 if (!validate_arg (arg, POINTER_TYPE))
7485 return NULL_TREE;
7486 str = c_getstr (arg);
7487 if (!str)
7488 return NULL_TREE;
7490 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7491 return NULL_TREE;
7493 return build_real (type, real);
7496 /* Return true if the floating point expression T has an integer value.
7497 We also allow +Inf, -Inf and NaN to be considered integer values. */
7499 static bool
7500 integer_valued_real_p (tree t)
7502 switch (TREE_CODE (t))
7504 case FLOAT_EXPR:
7505 return true;
7507 case ABS_EXPR:
7508 case SAVE_EXPR:
7509 return integer_valued_real_p (TREE_OPERAND (t, 0));
7511 case COMPOUND_EXPR:
7512 case MODIFY_EXPR:
7513 case BIND_EXPR:
7514 return integer_valued_real_p (TREE_OPERAND (t, 1));
7516 case PLUS_EXPR:
7517 case MINUS_EXPR:
7518 case MULT_EXPR:
7519 case MIN_EXPR:
7520 case MAX_EXPR:
7521 return integer_valued_real_p (TREE_OPERAND (t, 0))
7522 && integer_valued_real_p (TREE_OPERAND (t, 1));
7524 case COND_EXPR:
7525 return integer_valued_real_p (TREE_OPERAND (t, 1))
7526 && integer_valued_real_p (TREE_OPERAND (t, 2));
7528 case REAL_CST:
7529 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7531 case NOP_EXPR:
7533 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7534 if (TREE_CODE (type) == INTEGER_TYPE)
7535 return true;
7536 if (TREE_CODE (type) == REAL_TYPE)
7537 return integer_valued_real_p (TREE_OPERAND (t, 0));
7538 break;
7541 case CALL_EXPR:
7542 switch (builtin_mathfn_code (t))
7544 CASE_FLT_FN (BUILT_IN_CEIL):
7545 CASE_FLT_FN (BUILT_IN_FLOOR):
7546 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7547 CASE_FLT_FN (BUILT_IN_RINT):
7548 CASE_FLT_FN (BUILT_IN_ROUND):
7549 CASE_FLT_FN (BUILT_IN_TRUNC):
7550 return true;
7552 CASE_FLT_FN (BUILT_IN_FMIN):
7553 CASE_FLT_FN (BUILT_IN_FMAX):
7554 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7555 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7557 default:
7558 break;
7560 break;
7562 default:
7563 break;
7565 return false;
7568 /* FNDECL is assumed to be a builtin where truncation can be propagated
7569 across (for instance floor((double)f) == (double)floorf (f).
7570 Do the transformation for a call with argument ARG. */
7572 static tree
7573 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7575 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7577 if (!validate_arg (arg, REAL_TYPE))
7578 return NULL_TREE;
7580 /* Integer rounding functions are idempotent. */
7581 if (fcode == builtin_mathfn_code (arg))
7582 return arg;
7584 /* If argument is already integer valued, and we don't need to worry
7585 about setting errno, there's no need to perform rounding. */
7586 if (! flag_errno_math && integer_valued_real_p (arg))
7587 return arg;
7589 if (optimize)
7591 tree arg0 = strip_float_extensions (arg);
7592 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7593 tree newtype = TREE_TYPE (arg0);
7594 tree decl;
7596 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7597 && (decl = mathfn_built_in (newtype, fcode)))
7598 return fold_convert_loc (loc, ftype,
7599 build_call_expr_loc (loc, decl, 1,
7600 fold_convert_loc (loc,
7601 newtype,
7602 arg0)));
7604 return NULL_TREE;
7607 /* FNDECL is assumed to be builtin which can narrow the FP type of
7608 the argument, for instance lround((double)f) -> lroundf (f).
7609 Do the transformation for a call with argument ARG. */
7611 static tree
7612 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7614 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7616 if (!validate_arg (arg, REAL_TYPE))
7617 return NULL_TREE;
7619 /* If argument is already integer valued, and we don't need to worry
7620 about setting errno, there's no need to perform rounding. */
7621 if (! flag_errno_math && integer_valued_real_p (arg))
7622 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7623 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7625 if (optimize)
7627 tree ftype = TREE_TYPE (arg);
7628 tree arg0 = strip_float_extensions (arg);
7629 tree newtype = TREE_TYPE (arg0);
7630 tree decl;
7632 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7633 && (decl = mathfn_built_in (newtype, fcode)))
7634 return build_call_expr_loc (loc, decl, 1,
7635 fold_convert_loc (loc, newtype, arg0));
7638 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7639 sizeof (long long) == sizeof (long). */
7640 if (TYPE_PRECISION (long_long_integer_type_node)
7641 == TYPE_PRECISION (long_integer_type_node))
7643 tree newfn = NULL_TREE;
7644 switch (fcode)
7646 CASE_FLT_FN (BUILT_IN_LLCEIL):
7647 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7648 break;
7650 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7651 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7652 break;
7654 CASE_FLT_FN (BUILT_IN_LLROUND):
7655 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7656 break;
7658 CASE_FLT_FN (BUILT_IN_LLRINT):
7659 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7660 break;
7662 default:
7663 break;
7666 if (newfn)
7668 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7669 return fold_convert_loc (loc,
7670 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7674 return NULL_TREE;
7677 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7678 return type. Return NULL_TREE if no simplification can be made. */
7680 static tree
7681 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7683 tree res;
7685 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7686 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7687 return NULL_TREE;
7689 /* Calculate the result when the argument is a constant. */
7690 if (TREE_CODE (arg) == COMPLEX_CST
7691 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7692 type, mpfr_hypot)))
7693 return res;
7695 if (TREE_CODE (arg) == COMPLEX_EXPR)
7697 tree real = TREE_OPERAND (arg, 0);
7698 tree imag = TREE_OPERAND (arg, 1);
7700 /* If either part is zero, cabs is fabs of the other. */
7701 if (real_zerop (real))
7702 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7703 if (real_zerop (imag))
7704 return fold_build1_loc (loc, ABS_EXPR, type, real);
7706 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7707 if (flag_unsafe_math_optimizations
7708 && operand_equal_p (real, imag, OEP_PURE_SAME))
7710 const REAL_VALUE_TYPE sqrt2_trunc
7711 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7712 STRIP_NOPS (real);
7713 return fold_build2_loc (loc, MULT_EXPR, type,
7714 fold_build1_loc (loc, ABS_EXPR, type, real),
7715 build_real (type, sqrt2_trunc));
7719 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7720 if (TREE_CODE (arg) == NEGATE_EXPR
7721 || TREE_CODE (arg) == CONJ_EXPR)
7722 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7724 /* Don't do this when optimizing for size. */
7725 if (flag_unsafe_math_optimizations
7726 && optimize && optimize_function_for_speed_p (cfun))
7728 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7730 if (sqrtfn != NULL_TREE)
7732 tree rpart, ipart, result;
7734 arg = builtin_save_expr (arg);
7736 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7737 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7739 rpart = builtin_save_expr (rpart);
7740 ipart = builtin_save_expr (ipart);
7742 result = fold_build2_loc (loc, PLUS_EXPR, type,
7743 fold_build2_loc (loc, MULT_EXPR, type,
7744 rpart, rpart),
7745 fold_build2_loc (loc, MULT_EXPR, type,
7746 ipart, ipart));
7748 return build_call_expr_loc (loc, sqrtfn, 1, result);
7752 return NULL_TREE;
7755 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7756 Return NULL_TREE if no simplification can be made. */
7758 static tree
7759 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7762 enum built_in_function fcode;
7763 tree res;
7765 if (!validate_arg (arg, REAL_TYPE))
7766 return NULL_TREE;
7768 /* Calculate the result when the argument is a constant. */
7769 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7770 return res;
7772 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7773 fcode = builtin_mathfn_code (arg);
7774 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7776 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7777 arg = fold_build2_loc (loc, MULT_EXPR, type,
7778 CALL_EXPR_ARG (arg, 0),
7779 build_real (type, dconsthalf));
7780 return build_call_expr_loc (loc, expfn, 1, arg);
7783 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7784 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7786 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7788 if (powfn)
7790 tree arg0 = CALL_EXPR_ARG (arg, 0);
7791 tree tree_root;
7792 /* The inner root was either sqrt or cbrt. */
7793 /* This was a conditional expression but it triggered a bug
7794 in Sun C 5.5. */
7795 REAL_VALUE_TYPE dconstroot;
7796 if (BUILTIN_SQRT_P (fcode))
7797 dconstroot = dconsthalf;
7798 else
7799 dconstroot = dconst_third ();
7801 /* Adjust for the outer root. */
7802 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7803 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7804 tree_root = build_real (type, dconstroot);
7805 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7809 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7810 if (flag_unsafe_math_optimizations
7811 && (fcode == BUILT_IN_POW
7812 || fcode == BUILT_IN_POWF
7813 || fcode == BUILT_IN_POWL))
7815 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7816 tree arg0 = CALL_EXPR_ARG (arg, 0);
7817 tree arg1 = CALL_EXPR_ARG (arg, 1);
7818 tree narg1;
7819 if (!tree_expr_nonnegative_p (arg0))
7820 arg0 = build1 (ABS_EXPR, type, arg0);
7821 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7822 build_real (type, dconsthalf));
7823 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7826 return NULL_TREE;
7829 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7830 Return NULL_TREE if no simplification can be made. */
7832 static tree
7833 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7835 const enum built_in_function fcode = builtin_mathfn_code (arg);
7836 tree res;
7838 if (!validate_arg (arg, REAL_TYPE))
7839 return NULL_TREE;
7841 /* Calculate the result when the argument is a constant. */
7842 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7843 return res;
7845 if (flag_unsafe_math_optimizations)
7847 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7848 if (BUILTIN_EXPONENT_P (fcode))
7850 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7851 const REAL_VALUE_TYPE third_trunc =
7852 real_value_truncate (TYPE_MODE (type), dconst_third ());
7853 arg = fold_build2_loc (loc, MULT_EXPR, type,
7854 CALL_EXPR_ARG (arg, 0),
7855 build_real (type, third_trunc));
7856 return build_call_expr_loc (loc, expfn, 1, arg);
7859 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7860 if (BUILTIN_SQRT_P (fcode))
7862 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7864 if (powfn)
7866 tree arg0 = CALL_EXPR_ARG (arg, 0);
7867 tree tree_root;
7868 REAL_VALUE_TYPE dconstroot = dconst_third ();
7870 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7871 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7872 tree_root = build_real (type, dconstroot);
7873 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7877 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7878 if (BUILTIN_CBRT_P (fcode))
7880 tree arg0 = CALL_EXPR_ARG (arg, 0);
7881 if (tree_expr_nonnegative_p (arg0))
7883 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7885 if (powfn)
7887 tree tree_root;
7888 REAL_VALUE_TYPE dconstroot;
7890 real_arithmetic (&dconstroot, MULT_EXPR,
7891 dconst_third_ptr (), dconst_third_ptr ());
7892 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7893 tree_root = build_real (type, dconstroot);
7894 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7899 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7900 if (fcode == BUILT_IN_POW
7901 || fcode == BUILT_IN_POWF
7902 || fcode == BUILT_IN_POWL)
7904 tree arg00 = CALL_EXPR_ARG (arg, 0);
7905 tree arg01 = CALL_EXPR_ARG (arg, 1);
7906 if (tree_expr_nonnegative_p (arg00))
7908 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7909 const REAL_VALUE_TYPE dconstroot
7910 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7911 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7912 build_real (type, dconstroot));
7913 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7917 return NULL_TREE;
7920 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7921 TYPE is the type of the return value. Return NULL_TREE if no
7922 simplification can be made. */
7924 static tree
7925 fold_builtin_cos (location_t loc,
7926 tree arg, tree type, tree fndecl)
7928 tree res, narg;
7930 if (!validate_arg (arg, REAL_TYPE))
7931 return NULL_TREE;
7933 /* Calculate the result when the argument is a constant. */
7934 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7935 return res;
7937 /* Optimize cos(-x) into cos (x). */
7938 if ((narg = fold_strip_sign_ops (arg)))
7939 return build_call_expr_loc (loc, fndecl, 1, narg);
7941 return NULL_TREE;
7944 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7945 Return NULL_TREE if no simplification can be made. */
7947 static tree
7948 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7950 if (validate_arg (arg, REAL_TYPE))
7952 tree res, narg;
7954 /* Calculate the result when the argument is a constant. */
7955 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7956 return res;
7958 /* Optimize cosh(-x) into cosh (x). */
7959 if ((narg = fold_strip_sign_ops (arg)))
7960 return build_call_expr_loc (loc, fndecl, 1, narg);
7963 return NULL_TREE;
7966 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7967 argument ARG. TYPE is the type of the return value. Return
7968 NULL_TREE if no simplification can be made. */
7970 static tree
7971 fold_builtin_ccos (location_t loc,
7972 tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7973 bool hyper ATTRIBUTE_UNUSED)
7975 if (validate_arg (arg, COMPLEX_TYPE)
7976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7978 tree tmp;
7980 #ifdef HAVE_mpc
7981 /* Calculate the result when the argument is a constant. */
7982 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7983 return tmp;
7984 #endif
7986 /* Optimize fn(-x) into fn(x). */
7987 if ((tmp = fold_strip_sign_ops (arg)))
7988 return build_call_expr_loc (loc, fndecl, 1, tmp);
7991 return NULL_TREE;
7994 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7995 Return NULL_TREE if no simplification can be made. */
7997 static tree
7998 fold_builtin_tan (tree arg, tree type)
8000 enum built_in_function fcode;
8001 tree res;
8003 if (!validate_arg (arg, REAL_TYPE))
8004 return NULL_TREE;
8006 /* Calculate the result when the argument is a constant. */
8007 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8008 return res;
8010 /* Optimize tan(atan(x)) = x. */
8011 fcode = builtin_mathfn_code (arg);
8012 if (flag_unsafe_math_optimizations
8013 && (fcode == BUILT_IN_ATAN
8014 || fcode == BUILT_IN_ATANF
8015 || fcode == BUILT_IN_ATANL))
8016 return CALL_EXPR_ARG (arg, 0);
8018 return NULL_TREE;
8021 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8022 NULL_TREE if no simplification can be made. */
8024 static tree
8025 fold_builtin_sincos (location_t loc,
8026 tree arg0, tree arg1, tree arg2)
8028 tree type;
8029 tree res, fn, call;
8031 if (!validate_arg (arg0, REAL_TYPE)
8032 || !validate_arg (arg1, POINTER_TYPE)
8033 || !validate_arg (arg2, POINTER_TYPE))
8034 return NULL_TREE;
8036 type = TREE_TYPE (arg0);
8038 /* Calculate the result when the argument is a constant. */
8039 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8040 return res;
8042 /* Canonicalize sincos to cexpi. */
8043 if (!TARGET_C99_FUNCTIONS)
8044 return NULL_TREE;
8045 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8046 if (!fn)
8047 return NULL_TREE;
8049 call = build_call_expr_loc (loc, fn, 1, arg0);
8050 call = builtin_save_expr (call);
8052 return build2 (COMPOUND_EXPR, void_type_node,
8053 build2 (MODIFY_EXPR, void_type_node,
8054 build_fold_indirect_ref_loc (loc, arg1),
8055 build1 (IMAGPART_EXPR, type, call)),
8056 build2 (MODIFY_EXPR, void_type_node,
8057 build_fold_indirect_ref_loc (loc, arg2),
8058 build1 (REALPART_EXPR, type, call)));
8061 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8062 NULL_TREE if no simplification can be made. */
8064 static tree
8065 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8067 tree rtype;
8068 tree realp, imagp, ifn;
8069 #ifdef HAVE_mpc
8070 tree res;
8071 #endif
8073 if (!validate_arg (arg0, COMPLEX_TYPE)
8074 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8075 return NULL_TREE;
8077 #ifdef HAVE_mpc
8078 /* Calculate the result when the argument is a constant. */
8079 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8080 return res;
8081 #endif
8083 rtype = TREE_TYPE (TREE_TYPE (arg0));
8085 /* In case we can figure out the real part of arg0 and it is constant zero
8086 fold to cexpi. */
8087 if (!TARGET_C99_FUNCTIONS)
8088 return NULL_TREE;
8089 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8090 if (!ifn)
8091 return NULL_TREE;
8093 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8094 && real_zerop (realp))
8096 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8097 return build_call_expr_loc (loc, ifn, 1, narg);
8100 /* In case we can easily decompose real and imaginary parts split cexp
8101 to exp (r) * cexpi (i). */
8102 if (flag_unsafe_math_optimizations
8103 && realp)
8105 tree rfn, rcall, icall;
8107 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8108 if (!rfn)
8109 return NULL_TREE;
8111 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8112 if (!imagp)
8113 return NULL_TREE;
8115 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8116 icall = builtin_save_expr (icall);
8117 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8118 rcall = builtin_save_expr (rcall);
8119 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8120 fold_build2_loc (loc, MULT_EXPR, rtype,
8121 rcall,
8122 fold_build1_loc (loc, REALPART_EXPR,
8123 rtype, icall)),
8124 fold_build2_loc (loc, MULT_EXPR, rtype,
8125 rcall,
8126 fold_build1_loc (loc, IMAGPART_EXPR,
8127 rtype, icall)));
8130 return NULL_TREE;
8133 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8134 Return NULL_TREE if no simplification can be made. */
8136 static tree
8137 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8139 if (!validate_arg (arg, REAL_TYPE))
8140 return NULL_TREE;
8142 /* Optimize trunc of constant value. */
8143 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8145 REAL_VALUE_TYPE r, x;
8146 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8148 x = TREE_REAL_CST (arg);
8149 real_trunc (&r, TYPE_MODE (type), &x);
8150 return build_real (type, r);
8153 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8156 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8157 Return NULL_TREE if no simplification can be made. */
8159 static tree
8160 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8162 if (!validate_arg (arg, REAL_TYPE))
8163 return NULL_TREE;
8165 /* Optimize floor of constant value. */
8166 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8168 REAL_VALUE_TYPE x;
8170 x = TREE_REAL_CST (arg);
8171 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8174 REAL_VALUE_TYPE r;
8176 real_floor (&r, TYPE_MODE (type), &x);
8177 return build_real (type, r);
8181 /* Fold floor (x) where x is nonnegative to trunc (x). */
8182 if (tree_expr_nonnegative_p (arg))
8184 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8185 if (truncfn)
8186 return build_call_expr_loc (loc, truncfn, 1, arg);
8189 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8192 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8193 Return NULL_TREE if no simplification can be made. */
8195 static tree
8196 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8198 if (!validate_arg (arg, REAL_TYPE))
8199 return NULL_TREE;
8201 /* Optimize ceil of constant value. */
8202 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8204 REAL_VALUE_TYPE x;
8206 x = TREE_REAL_CST (arg);
8207 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8209 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8210 REAL_VALUE_TYPE r;
8212 real_ceil (&r, TYPE_MODE (type), &x);
8213 return build_real (type, r);
8217 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8220 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8221 Return NULL_TREE if no simplification can be made. */
8223 static tree
8224 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8226 if (!validate_arg (arg, REAL_TYPE))
8227 return NULL_TREE;
8229 /* Optimize round of constant value. */
8230 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8232 REAL_VALUE_TYPE x;
8234 x = TREE_REAL_CST (arg);
8235 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 REAL_VALUE_TYPE r;
8240 real_round (&r, TYPE_MODE (type), &x);
8241 return build_real (type, r);
8245 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8248 /* Fold function call to builtin lround, lroundf or lroundl (or the
8249 corresponding long long versions) and other rounding functions. ARG
8250 is the argument to the call. Return NULL_TREE if no simplification
8251 can be made. */
8253 static tree
8254 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8256 if (!validate_arg (arg, REAL_TYPE))
8257 return NULL_TREE;
8259 /* Optimize lround of constant value. */
8260 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8262 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8264 if (real_isfinite (&x))
8266 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8267 tree ftype = TREE_TYPE (arg);
8268 unsigned HOST_WIDE_INT lo2;
8269 HOST_WIDE_INT hi, lo;
8270 REAL_VALUE_TYPE r;
8272 switch (DECL_FUNCTION_CODE (fndecl))
8274 CASE_FLT_FN (BUILT_IN_LFLOOR):
8275 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8276 real_floor (&r, TYPE_MODE (ftype), &x);
8277 break;
8279 CASE_FLT_FN (BUILT_IN_LCEIL):
8280 CASE_FLT_FN (BUILT_IN_LLCEIL):
8281 real_ceil (&r, TYPE_MODE (ftype), &x);
8282 break;
8284 CASE_FLT_FN (BUILT_IN_LROUND):
8285 CASE_FLT_FN (BUILT_IN_LLROUND):
8286 real_round (&r, TYPE_MODE (ftype), &x);
8287 break;
8289 default:
8290 gcc_unreachable ();
8293 REAL_VALUE_TO_INT (&lo, &hi, r);
8294 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8295 return build_int_cst_wide (itype, lo2, hi);
8299 switch (DECL_FUNCTION_CODE (fndecl))
8301 CASE_FLT_FN (BUILT_IN_LFLOOR):
8302 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8303 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8304 if (tree_expr_nonnegative_p (arg))
8305 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8306 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8307 break;
8308 default:;
8311 return fold_fixed_mathfn (loc, fndecl, arg);
8314 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8315 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8316 the argument to the call. Return NULL_TREE if no simplification can
8317 be made. */
8319 static tree
8320 fold_builtin_bitop (tree fndecl, tree arg)
8322 if (!validate_arg (arg, INTEGER_TYPE))
8323 return NULL_TREE;
8325 /* Optimize for constant argument. */
8326 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8328 HOST_WIDE_INT hi, width, result;
8329 unsigned HOST_WIDE_INT lo;
8330 tree type;
8332 type = TREE_TYPE (arg);
8333 width = TYPE_PRECISION (type);
8334 lo = TREE_INT_CST_LOW (arg);
8336 /* Clear all the bits that are beyond the type's precision. */
8337 if (width > HOST_BITS_PER_WIDE_INT)
8339 hi = TREE_INT_CST_HIGH (arg);
8340 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8341 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8343 else
8345 hi = 0;
8346 if (width < HOST_BITS_PER_WIDE_INT)
8347 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8350 switch (DECL_FUNCTION_CODE (fndecl))
8352 CASE_INT_FN (BUILT_IN_FFS):
8353 if (lo != 0)
8354 result = exact_log2 (lo & -lo) + 1;
8355 else if (hi != 0)
8356 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8357 else
8358 result = 0;
8359 break;
8361 CASE_INT_FN (BUILT_IN_CLZ):
8362 if (hi != 0)
8363 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8364 else if (lo != 0)
8365 result = width - floor_log2 (lo) - 1;
8366 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8367 result = width;
8368 break;
8370 CASE_INT_FN (BUILT_IN_CTZ):
8371 if (lo != 0)
8372 result = exact_log2 (lo & -lo);
8373 else if (hi != 0)
8374 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8375 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8376 result = width;
8377 break;
8379 CASE_INT_FN (BUILT_IN_POPCOUNT):
8380 result = 0;
8381 while (lo)
8382 result++, lo &= lo - 1;
8383 while (hi)
8384 result++, hi &= hi - 1;
8385 break;
8387 CASE_INT_FN (BUILT_IN_PARITY):
8388 result = 0;
8389 while (lo)
8390 result++, lo &= lo - 1;
8391 while (hi)
8392 result++, hi &= hi - 1;
8393 result &= 1;
8394 break;
8396 default:
8397 gcc_unreachable ();
8400 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8403 return NULL_TREE;
8406 /* Fold function call to builtin_bswap and the long and long long
8407 variants. Return NULL_TREE if no simplification can be made. */
8408 static tree
8409 fold_builtin_bswap (tree fndecl, tree arg)
8411 if (! validate_arg (arg, INTEGER_TYPE))
8412 return NULL_TREE;
8414 /* Optimize constant value. */
8415 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8417 HOST_WIDE_INT hi, width, r_hi = 0;
8418 unsigned HOST_WIDE_INT lo, r_lo = 0;
8419 tree type;
8421 type = TREE_TYPE (arg);
8422 width = TYPE_PRECISION (type);
8423 lo = TREE_INT_CST_LOW (arg);
8424 hi = TREE_INT_CST_HIGH (arg);
8426 switch (DECL_FUNCTION_CODE (fndecl))
8428 case BUILT_IN_BSWAP32:
8429 case BUILT_IN_BSWAP64:
8431 int s;
8433 for (s = 0; s < width; s += 8)
8435 int d = width - s - 8;
8436 unsigned HOST_WIDE_INT byte;
8438 if (s < HOST_BITS_PER_WIDE_INT)
8439 byte = (lo >> s) & 0xff;
8440 else
8441 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8443 if (d < HOST_BITS_PER_WIDE_INT)
8444 r_lo |= byte << d;
8445 else
8446 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8450 break;
8452 default:
8453 gcc_unreachable ();
8456 if (width < HOST_BITS_PER_WIDE_INT)
8457 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8458 else
8459 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8462 return NULL_TREE;
8465 /* A subroutine of fold_builtin to fold the various logarithmic
8466 functions. Return NULL_TREE if no simplification can me made.
8467 FUNC is the corresponding MPFR logarithm function. */
8469 static tree
8470 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8471 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8473 if (validate_arg (arg, REAL_TYPE))
8475 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8476 tree res;
8477 const enum built_in_function fcode = builtin_mathfn_code (arg);
8479 /* Calculate the result when the argument is a constant. */
8480 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8481 return res;
8483 /* Special case, optimize logN(expN(x)) = x. */
8484 if (flag_unsafe_math_optimizations
8485 && ((func == mpfr_log
8486 && (fcode == BUILT_IN_EXP
8487 || fcode == BUILT_IN_EXPF
8488 || fcode == BUILT_IN_EXPL))
8489 || (func == mpfr_log2
8490 && (fcode == BUILT_IN_EXP2
8491 || fcode == BUILT_IN_EXP2F
8492 || fcode == BUILT_IN_EXP2L))
8493 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8494 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8496 /* Optimize logN(func()) for various exponential functions. We
8497 want to determine the value "x" and the power "exponent" in
8498 order to transform logN(x**exponent) into exponent*logN(x). */
8499 if (flag_unsafe_math_optimizations)
8501 tree exponent = 0, x = 0;
8503 switch (fcode)
8505 CASE_FLT_FN (BUILT_IN_EXP):
8506 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8507 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8508 dconst_e ()));
8509 exponent = CALL_EXPR_ARG (arg, 0);
8510 break;
8511 CASE_FLT_FN (BUILT_IN_EXP2):
8512 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8513 x = build_real (type, dconst2);
8514 exponent = CALL_EXPR_ARG (arg, 0);
8515 break;
8516 CASE_FLT_FN (BUILT_IN_EXP10):
8517 CASE_FLT_FN (BUILT_IN_POW10):
8518 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8520 REAL_VALUE_TYPE dconst10;
8521 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8522 x = build_real (type, dconst10);
8524 exponent = CALL_EXPR_ARG (arg, 0);
8525 break;
8526 CASE_FLT_FN (BUILT_IN_SQRT):
8527 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8528 x = CALL_EXPR_ARG (arg, 0);
8529 exponent = build_real (type, dconsthalf);
8530 break;
8531 CASE_FLT_FN (BUILT_IN_CBRT):
8532 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8533 x = CALL_EXPR_ARG (arg, 0);
8534 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8535 dconst_third ()));
8536 break;
8537 CASE_FLT_FN (BUILT_IN_POW):
8538 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8539 x = CALL_EXPR_ARG (arg, 0);
8540 exponent = CALL_EXPR_ARG (arg, 1);
8541 break;
8542 default:
8543 break;
8546 /* Now perform the optimization. */
8547 if (x && exponent)
8549 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8550 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8555 return NULL_TREE;
8558 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8559 NULL_TREE if no simplification can be made. */
8561 static tree
8562 fold_builtin_hypot (location_t loc, tree fndecl,
8563 tree arg0, tree arg1, tree type)
8565 tree res, narg0, narg1;
8567 if (!validate_arg (arg0, REAL_TYPE)
8568 || !validate_arg (arg1, REAL_TYPE))
8569 return NULL_TREE;
8571 /* Calculate the result when the argument is a constant. */
8572 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8573 return res;
8575 /* If either argument to hypot has a negate or abs, strip that off.
8576 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8577 narg0 = fold_strip_sign_ops (arg0);
8578 narg1 = fold_strip_sign_ops (arg1);
8579 if (narg0 || narg1)
8581 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8582 narg1 ? narg1 : arg1);
8585 /* If either argument is zero, hypot is fabs of the other. */
8586 if (real_zerop (arg0))
8587 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8588 else if (real_zerop (arg1))
8589 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8591 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8592 if (flag_unsafe_math_optimizations
8593 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8595 const REAL_VALUE_TYPE sqrt2_trunc
8596 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8597 return fold_build2_loc (loc, MULT_EXPR, type,
8598 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8599 build_real (type, sqrt2_trunc));
8602 return NULL_TREE;
8606 /* Fold a builtin function call to pow, powf, or powl. Return
8607 NULL_TREE if no simplification can be made. */
8608 static tree
8609 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8611 tree res;
8613 if (!validate_arg (arg0, REAL_TYPE)
8614 || !validate_arg (arg1, REAL_TYPE))
8615 return NULL_TREE;
8617 /* Calculate the result when the argument is a constant. */
8618 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8619 return res;
8621 /* Optimize pow(1.0,y) = 1.0. */
8622 if (real_onep (arg0))
8623 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8625 if (TREE_CODE (arg1) == REAL_CST
8626 && !TREE_OVERFLOW (arg1))
8628 REAL_VALUE_TYPE cint;
8629 REAL_VALUE_TYPE c;
8630 HOST_WIDE_INT n;
8632 c = TREE_REAL_CST (arg1);
8634 /* Optimize pow(x,0.0) = 1.0. */
8635 if (REAL_VALUES_EQUAL (c, dconst0))
8636 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8637 arg0);
8639 /* Optimize pow(x,1.0) = x. */
8640 if (REAL_VALUES_EQUAL (c, dconst1))
8641 return arg0;
8643 /* Optimize pow(x,-1.0) = 1.0/x. */
8644 if (REAL_VALUES_EQUAL (c, dconstm1))
8645 return fold_build2_loc (loc, RDIV_EXPR, type,
8646 build_real (type, dconst1), arg0);
8648 /* Optimize pow(x,0.5) = sqrt(x). */
8649 if (flag_unsafe_math_optimizations
8650 && REAL_VALUES_EQUAL (c, dconsthalf))
8652 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8654 if (sqrtfn != NULL_TREE)
8655 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8658 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8659 if (flag_unsafe_math_optimizations)
8661 const REAL_VALUE_TYPE dconstroot
8662 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8664 if (REAL_VALUES_EQUAL (c, dconstroot))
8666 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8667 if (cbrtfn != NULL_TREE)
8668 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8672 /* Check for an integer exponent. */
8673 n = real_to_integer (&c);
8674 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8675 if (real_identical (&c, &cint))
8677 /* Attempt to evaluate pow at compile-time, unless this should
8678 raise an exception. */
8679 if (TREE_CODE (arg0) == REAL_CST
8680 && !TREE_OVERFLOW (arg0)
8681 && (n > 0
8682 || (!flag_trapping_math && !flag_errno_math)
8683 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8685 REAL_VALUE_TYPE x;
8686 bool inexact;
8688 x = TREE_REAL_CST (arg0);
8689 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8690 if (flag_unsafe_math_optimizations || !inexact)
8691 return build_real (type, x);
8694 /* Strip sign ops from even integer powers. */
8695 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8697 tree narg0 = fold_strip_sign_ops (arg0);
8698 if (narg0)
8699 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8704 if (flag_unsafe_math_optimizations)
8706 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8708 /* Optimize pow(expN(x),y) = expN(x*y). */
8709 if (BUILTIN_EXPONENT_P (fcode))
8711 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8712 tree arg = CALL_EXPR_ARG (arg0, 0);
8713 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8714 return build_call_expr_loc (loc, expfn, 1, arg);
8717 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8718 if (BUILTIN_SQRT_P (fcode))
8720 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8721 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8722 build_real (type, dconsthalf));
8723 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8726 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8727 if (BUILTIN_CBRT_P (fcode))
8729 tree arg = CALL_EXPR_ARG (arg0, 0);
8730 if (tree_expr_nonnegative_p (arg))
8732 const REAL_VALUE_TYPE dconstroot
8733 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8734 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8735 build_real (type, dconstroot));
8736 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8740 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8741 if (fcode == BUILT_IN_POW
8742 || fcode == BUILT_IN_POWF
8743 || fcode == BUILT_IN_POWL)
8745 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8746 if (tree_expr_nonnegative_p (arg00))
8748 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8749 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8750 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8755 return NULL_TREE;
8758 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8759 Return NULL_TREE if no simplification can be made. */
8760 static tree
8761 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8762 tree arg0, tree arg1, tree type)
8764 if (!validate_arg (arg0, REAL_TYPE)
8765 || !validate_arg (arg1, INTEGER_TYPE))
8766 return NULL_TREE;
8768 /* Optimize pow(1.0,y) = 1.0. */
8769 if (real_onep (arg0))
8770 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8772 if (host_integerp (arg1, 0))
8774 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8776 /* Evaluate powi at compile-time. */
8777 if (TREE_CODE (arg0) == REAL_CST
8778 && !TREE_OVERFLOW (arg0))
8780 REAL_VALUE_TYPE x;
8781 x = TREE_REAL_CST (arg0);
8782 real_powi (&x, TYPE_MODE (type), &x, c);
8783 return build_real (type, x);
8786 /* Optimize pow(x,0) = 1.0. */
8787 if (c == 0)
8788 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8789 arg0);
8791 /* Optimize pow(x,1) = x. */
8792 if (c == 1)
8793 return arg0;
8795 /* Optimize pow(x,-1) = 1.0/x. */
8796 if (c == -1)
8797 return fold_build2_loc (loc, RDIV_EXPR, type,
8798 build_real (type, dconst1), arg0);
8801 return NULL_TREE;
8804 /* A subroutine of fold_builtin to fold the various exponent
8805 functions. Return NULL_TREE if no simplification can be made.
8806 FUNC is the corresponding MPFR exponent function. */
8808 static tree
8809 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8810 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8812 if (validate_arg (arg, REAL_TYPE))
8814 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8815 tree res;
8817 /* Calculate the result when the argument is a constant. */
8818 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8819 return res;
8821 /* Optimize expN(logN(x)) = x. */
8822 if (flag_unsafe_math_optimizations)
8824 const enum built_in_function fcode = builtin_mathfn_code (arg);
8826 if ((func == mpfr_exp
8827 && (fcode == BUILT_IN_LOG
8828 || fcode == BUILT_IN_LOGF
8829 || fcode == BUILT_IN_LOGL))
8830 || (func == mpfr_exp2
8831 && (fcode == BUILT_IN_LOG2
8832 || fcode == BUILT_IN_LOG2F
8833 || fcode == BUILT_IN_LOG2L))
8834 || (func == mpfr_exp10
8835 && (fcode == BUILT_IN_LOG10
8836 || fcode == BUILT_IN_LOG10F
8837 || fcode == BUILT_IN_LOG10L)))
8838 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8842 return NULL_TREE;
8845 /* Return true if VAR is a VAR_DECL or a component thereof. */
8847 static bool
8848 var_decl_component_p (tree var)
8850 tree inner = var;
8851 while (handled_component_p (inner))
8852 inner = TREE_OPERAND (inner, 0);
8853 return SSA_VAR_P (inner);
8856 /* Fold function call to builtin memset. Return
8857 NULL_TREE if no simplification can be made. */
8859 static tree
8860 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8861 tree type, bool ignore)
8863 tree var, ret, etype;
8864 unsigned HOST_WIDE_INT length, cval;
8866 if (! validate_arg (dest, POINTER_TYPE)
8867 || ! validate_arg (c, INTEGER_TYPE)
8868 || ! validate_arg (len, INTEGER_TYPE))
8869 return NULL_TREE;
8871 if (! host_integerp (len, 1))
8872 return NULL_TREE;
8874 /* If the LEN parameter is zero, return DEST. */
8875 if (integer_zerop (len))
8876 return omit_one_operand_loc (loc, type, dest, c);
8878 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8879 return NULL_TREE;
8881 var = dest;
8882 STRIP_NOPS (var);
8883 if (TREE_CODE (var) != ADDR_EXPR)
8884 return NULL_TREE;
8886 var = TREE_OPERAND (var, 0);
8887 if (TREE_THIS_VOLATILE (var))
8888 return NULL_TREE;
8890 etype = TREE_TYPE (var);
8891 if (TREE_CODE (etype) == ARRAY_TYPE)
8892 etype = TREE_TYPE (etype);
8894 if (!INTEGRAL_TYPE_P (etype)
8895 && !POINTER_TYPE_P (etype))
8896 return NULL_TREE;
8898 if (! var_decl_component_p (var))
8899 return NULL_TREE;
8901 length = tree_low_cst (len, 1);
8902 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8903 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8904 < (int) length)
8905 return NULL_TREE;
8907 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8908 return NULL_TREE;
8910 if (integer_zerop (c))
8911 cval = 0;
8912 else
8914 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8915 return NULL_TREE;
8917 cval = tree_low_cst (c, 1);
8918 cval &= 0xff;
8919 cval |= cval << 8;
8920 cval |= cval << 16;
8921 cval |= (cval << 31) << 1;
8924 ret = build_int_cst_type (etype, cval);
8925 var = build_fold_indirect_ref_loc (loc,
8926 fold_convert_loc (loc,
8927 build_pointer_type (etype),
8928 dest));
8929 ret = build2 (MODIFY_EXPR, etype, var, ret);
8930 if (ignore)
8931 return ret;
8933 return omit_one_operand_loc (loc, type, dest, ret);
8936 /* Fold function call to builtin memset. Return
8937 NULL_TREE if no simplification can be made. */
8939 static tree
8940 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8942 if (! validate_arg (dest, POINTER_TYPE)
8943 || ! validate_arg (size, INTEGER_TYPE))
8944 return NULL_TREE;
8946 if (!ignore)
8947 return NULL_TREE;
8949 /* New argument list transforming bzero(ptr x, int y) to
8950 memset(ptr x, int 0, size_t y). This is done this way
8951 so that if it isn't expanded inline, we fallback to
8952 calling bzero instead of memset. */
8954 return fold_builtin_memset (loc, dest, integer_zero_node,
8955 fold_convert_loc (loc, sizetype, size),
8956 void_type_node, ignore);
8959 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8960 NULL_TREE if no simplification can be made.
8961 If ENDP is 0, return DEST (like memcpy).
8962 If ENDP is 1, return DEST+LEN (like mempcpy).
8963 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8964 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8965 (memmove). */
8967 static tree
8968 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8969 tree len, tree type, bool ignore, int endp)
8971 tree destvar, srcvar, expr;
8973 if (! validate_arg (dest, POINTER_TYPE)
8974 || ! validate_arg (src, POINTER_TYPE)
8975 || ! validate_arg (len, INTEGER_TYPE))
8976 return NULL_TREE;
8978 /* If the LEN parameter is zero, return DEST. */
8979 if (integer_zerop (len))
8980 return omit_one_operand_loc (loc, type, dest, src);
8982 /* If SRC and DEST are the same (and not volatile), return
8983 DEST{,+LEN,+LEN-1}. */
8984 if (operand_equal_p (src, dest, 0))
8985 expr = len;
8986 else
8988 tree srctype, desttype;
8989 int src_align, dest_align;
8991 if (endp == 3)
8993 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8994 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8996 /* Both DEST and SRC must be pointer types.
8997 ??? This is what old code did. Is the testing for pointer types
8998 really mandatory?
9000 If either SRC is readonly or length is 1, we can use memcpy. */
9001 if (!dest_align || !src_align)
9002 return NULL_TREE;
9003 if (readonly_data_expr (src)
9004 || (host_integerp (len, 1)
9005 && (MIN (src_align, dest_align) / BITS_PER_UNIT
9006 >= tree_low_cst (len, 1))))
9008 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9009 if (!fn)
9010 return NULL_TREE;
9011 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9014 /* If *src and *dest can't overlap, optimize into memcpy as well. */
9015 srcvar = build_fold_indirect_ref_loc (loc, src);
9016 destvar = build_fold_indirect_ref_loc (loc, dest);
9017 if (srcvar
9018 && !TREE_THIS_VOLATILE (srcvar)
9019 && destvar
9020 && !TREE_THIS_VOLATILE (destvar))
9022 tree src_base, dest_base, fn;
9023 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
9024 HOST_WIDE_INT size = -1;
9025 HOST_WIDE_INT maxsize = -1;
9027 src_base = srcvar;
9028 if (handled_component_p (src_base))
9029 src_base = get_ref_base_and_extent (src_base, &src_offset,
9030 &size, &maxsize);
9031 dest_base = destvar;
9032 if (handled_component_p (dest_base))
9033 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
9034 &size, &maxsize);
9035 if (host_integerp (len, 1))
9037 maxsize = tree_low_cst (len, 1);
9038 if (maxsize
9039 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
9040 maxsize = -1;
9041 else
9042 maxsize *= BITS_PER_UNIT;
9044 else
9045 maxsize = -1;
9046 if (SSA_VAR_P (src_base)
9047 && SSA_VAR_P (dest_base))
9049 if (operand_equal_p (src_base, dest_base, 0)
9050 && ranges_overlap_p (src_offset, maxsize,
9051 dest_offset, maxsize))
9052 return NULL_TREE;
9054 else if (TREE_CODE (src_base) == INDIRECT_REF
9055 && TREE_CODE (dest_base) == INDIRECT_REF)
9057 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
9058 TREE_OPERAND (dest_base, 0), 0)
9059 || ranges_overlap_p (src_offset, maxsize,
9060 dest_offset, maxsize))
9061 return NULL_TREE;
9063 else
9064 return NULL_TREE;
9066 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9067 if (!fn)
9068 return NULL_TREE;
9069 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9071 return NULL_TREE;
9074 if (!host_integerp (len, 0))
9075 return NULL_TREE;
9076 /* FIXME:
9077 This logic lose for arguments like (type *)malloc (sizeof (type)),
9078 since we strip the casts of up to VOID return value from malloc.
9079 Perhaps we ought to inherit type from non-VOID argument here? */
9080 STRIP_NOPS (src);
9081 STRIP_NOPS (dest);
9082 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9083 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9085 tree tem = TREE_OPERAND (src, 0);
9086 STRIP_NOPS (tem);
9087 if (tem != TREE_OPERAND (src, 0))
9088 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9090 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9092 tree tem = TREE_OPERAND (dest, 0);
9093 STRIP_NOPS (tem);
9094 if (tem != TREE_OPERAND (dest, 0))
9095 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9097 srctype = TREE_TYPE (TREE_TYPE (src));
9098 if (srctype
9099 && TREE_CODE (srctype) == ARRAY_TYPE
9100 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9102 srctype = TREE_TYPE (srctype);
9103 STRIP_NOPS (src);
9104 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9106 desttype = TREE_TYPE (TREE_TYPE (dest));
9107 if (desttype
9108 && TREE_CODE (desttype) == ARRAY_TYPE
9109 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9111 desttype = TREE_TYPE (desttype);
9112 STRIP_NOPS (dest);
9113 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9115 if (!srctype || !desttype
9116 || !TYPE_SIZE_UNIT (srctype)
9117 || !TYPE_SIZE_UNIT (desttype)
9118 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9119 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9120 || TYPE_VOLATILE (srctype)
9121 || TYPE_VOLATILE (desttype))
9122 return NULL_TREE;
9124 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9125 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9126 if (dest_align < (int) TYPE_ALIGN (desttype)
9127 || src_align < (int) TYPE_ALIGN (srctype))
9128 return NULL_TREE;
9130 if (!ignore)
9131 dest = builtin_save_expr (dest);
9133 srcvar = NULL_TREE;
9134 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9136 srcvar = build_fold_indirect_ref_loc (loc, src);
9137 if (TREE_THIS_VOLATILE (srcvar))
9138 return NULL_TREE;
9139 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
9140 srcvar = NULL_TREE;
9141 /* With memcpy, it is possible to bypass aliasing rules, so without
9142 this check i.e. execute/20060930-2.c would be misoptimized,
9143 because it use conflicting alias set to hold argument for the
9144 memcpy call. This check is probably unnecessary with
9145 -fno-strict-aliasing. Similarly for destvar. See also
9146 PR29286. */
9147 else if (!var_decl_component_p (srcvar))
9148 srcvar = NULL_TREE;
9151 destvar = NULL_TREE;
9152 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9154 destvar = build_fold_indirect_ref_loc (loc, dest);
9155 if (TREE_THIS_VOLATILE (destvar))
9156 return NULL_TREE;
9157 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
9158 destvar = NULL_TREE;
9159 else if (!var_decl_component_p (destvar))
9160 destvar = NULL_TREE;
9163 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9164 return NULL_TREE;
9166 if (srcvar == NULL_TREE)
9168 tree srcptype;
9169 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9170 return NULL_TREE;
9172 srctype = build_qualified_type (desttype, 0);
9173 if (src_align < (int) TYPE_ALIGN (srctype))
9175 if (AGGREGATE_TYPE_P (srctype)
9176 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9177 return NULL_TREE;
9179 srctype = build_variant_type_copy (srctype);
9180 TYPE_ALIGN (srctype) = src_align;
9181 TYPE_USER_ALIGN (srctype) = 1;
9182 TYPE_PACKED (srctype) = 1;
9184 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9185 src = fold_convert_loc (loc, srcptype, src);
9186 srcvar = build_fold_indirect_ref_loc (loc, src);
9188 else if (destvar == NULL_TREE)
9190 tree destptype;
9191 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9192 return NULL_TREE;
9194 desttype = build_qualified_type (srctype, 0);
9195 if (dest_align < (int) TYPE_ALIGN (desttype))
9197 if (AGGREGATE_TYPE_P (desttype)
9198 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9199 return NULL_TREE;
9201 desttype = build_variant_type_copy (desttype);
9202 TYPE_ALIGN (desttype) = dest_align;
9203 TYPE_USER_ALIGN (desttype) = 1;
9204 TYPE_PACKED (desttype) = 1;
9206 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9207 dest = fold_convert_loc (loc, destptype, dest);
9208 destvar = build_fold_indirect_ref_loc (loc, dest);
9211 if (srctype == desttype
9212 || (gimple_in_ssa_p (cfun)
9213 && useless_type_conversion_p (desttype, srctype)))
9214 expr = srcvar;
9215 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9216 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9217 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9218 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9219 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
9220 else
9221 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
9222 TREE_TYPE (destvar), srcvar);
9223 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9226 if (ignore)
9227 return expr;
9229 if (endp == 0 || endp == 3)
9230 return omit_one_operand_loc (loc, type, dest, expr);
9232 if (expr == len)
9233 expr = NULL_TREE;
9235 if (endp == 2)
9236 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9237 ssize_int (1));
9239 len = fold_convert_loc (loc, sizetype, len);
9240 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9241 dest = fold_convert_loc (loc, type, dest);
9242 if (expr)
9243 dest = omit_one_operand_loc (loc, type, dest, expr);
9244 return dest;
9247 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9248 If LEN is not NULL, it represents the length of the string to be
9249 copied. Return NULL_TREE if no simplification can be made. */
9251 tree
9252 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9254 tree fn;
9256 if (!validate_arg (dest, POINTER_TYPE)
9257 || !validate_arg (src, POINTER_TYPE))
9258 return NULL_TREE;
9260 /* If SRC and DEST are the same (and not volatile), return DEST. */
9261 if (operand_equal_p (src, dest, 0))
9262 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9264 if (optimize_function_for_size_p (cfun))
9265 return NULL_TREE;
9267 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9268 if (!fn)
9269 return NULL_TREE;
9271 if (!len)
9273 len = c_strlen (src, 1);
9274 if (! len || TREE_SIDE_EFFECTS (len))
9275 return NULL_TREE;
9278 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
9279 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9280 build_call_expr_loc (loc, fn, 3, dest, src, len));
9283 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9284 If SLEN is not NULL, it represents the length of the source string.
9285 Return NULL_TREE if no simplification can be made. */
9287 tree
9288 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9289 tree src, tree len, tree slen)
9291 tree fn;
9293 if (!validate_arg (dest, POINTER_TYPE)
9294 || !validate_arg (src, POINTER_TYPE)
9295 || !validate_arg (len, INTEGER_TYPE))
9296 return NULL_TREE;
9298 /* If the LEN parameter is zero, return DEST. */
9299 if (integer_zerop (len))
9300 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9302 /* We can't compare slen with len as constants below if len is not a
9303 constant. */
9304 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9305 return NULL_TREE;
9307 if (!slen)
9308 slen = c_strlen (src, 1);
9310 /* Now, we must be passed a constant src ptr parameter. */
9311 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9312 return NULL_TREE;
9314 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9316 /* We do not support simplification of this case, though we do
9317 support it when expanding trees into RTL. */
9318 /* FIXME: generate a call to __builtin_memset. */
9319 if (tree_int_cst_lt (slen, len))
9320 return NULL_TREE;
9322 /* OK transform into builtin memcpy. */
9323 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9324 if (!fn)
9325 return NULL_TREE;
9326 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9327 build_call_expr_loc (loc, fn, 3, dest, src, len));
9330 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9331 arguments to the call, and TYPE is its return type.
9332 Return NULL_TREE if no simplification can be made. */
9334 static tree
9335 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9337 if (!validate_arg (arg1, POINTER_TYPE)
9338 || !validate_arg (arg2, INTEGER_TYPE)
9339 || !validate_arg (len, INTEGER_TYPE))
9340 return NULL_TREE;
9341 else
9343 const char *p1;
9345 if (TREE_CODE (arg2) != INTEGER_CST
9346 || !host_integerp (len, 1))
9347 return NULL_TREE;
9349 p1 = c_getstr (arg1);
9350 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9352 char c;
9353 const char *r;
9354 tree tem;
9356 if (target_char_cast (arg2, &c))
9357 return NULL_TREE;
9359 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9361 if (r == NULL)
9362 return build_int_cst (TREE_TYPE (arg1), 0);
9364 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9365 size_int (r - p1));
9366 return fold_convert_loc (loc, type, tem);
9368 return NULL_TREE;
9372 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9373 Return NULL_TREE if no simplification can be made. */
9375 static tree
9376 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9378 const char *p1, *p2;
9380 if (!validate_arg (arg1, POINTER_TYPE)
9381 || !validate_arg (arg2, POINTER_TYPE)
9382 || !validate_arg (len, INTEGER_TYPE))
9383 return NULL_TREE;
9385 /* If the LEN parameter is zero, return zero. */
9386 if (integer_zerop (len))
9387 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9388 arg1, arg2);
9390 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9391 if (operand_equal_p (arg1, arg2, 0))
9392 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9394 p1 = c_getstr (arg1);
9395 p2 = c_getstr (arg2);
9397 /* If all arguments are constant, and the value of len is not greater
9398 than the lengths of arg1 and arg2, evaluate at compile-time. */
9399 if (host_integerp (len, 1) && p1 && p2
9400 && compare_tree_int (len, strlen (p1) + 1) <= 0
9401 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9403 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9405 if (r > 0)
9406 return integer_one_node;
9407 else if (r < 0)
9408 return integer_minus_one_node;
9409 else
9410 return integer_zero_node;
9413 /* If len parameter is one, return an expression corresponding to
9414 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9415 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9417 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9418 tree cst_uchar_ptr_node
9419 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9421 tree ind1
9422 = fold_convert_loc (loc, integer_type_node,
9423 build1 (INDIRECT_REF, cst_uchar_node,
9424 fold_convert_loc (loc,
9425 cst_uchar_ptr_node,
9426 arg1)));
9427 tree ind2
9428 = fold_convert_loc (loc, integer_type_node,
9429 build1 (INDIRECT_REF, cst_uchar_node,
9430 fold_convert_loc (loc,
9431 cst_uchar_ptr_node,
9432 arg2)));
9433 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9436 return NULL_TREE;
9439 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9440 Return NULL_TREE if no simplification can be made. */
9442 static tree
9443 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9445 const char *p1, *p2;
9447 if (!validate_arg (arg1, POINTER_TYPE)
9448 || !validate_arg (arg2, POINTER_TYPE))
9449 return NULL_TREE;
9451 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9452 if (operand_equal_p (arg1, arg2, 0))
9453 return integer_zero_node;
9455 p1 = c_getstr (arg1);
9456 p2 = c_getstr (arg2);
9458 if (p1 && p2)
9460 const int i = strcmp (p1, p2);
9461 if (i < 0)
9462 return integer_minus_one_node;
9463 else if (i > 0)
9464 return integer_one_node;
9465 else
9466 return integer_zero_node;
9469 /* If the second arg is "", return *(const unsigned char*)arg1. */
9470 if (p2 && *p2 == '\0')
9472 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9473 tree cst_uchar_ptr_node
9474 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9476 return fold_convert_loc (loc, integer_type_node,
9477 build1 (INDIRECT_REF, cst_uchar_node,
9478 fold_convert_loc (loc,
9479 cst_uchar_ptr_node,
9480 arg1)));
9483 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9484 if (p1 && *p1 == '\0')
9486 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9487 tree cst_uchar_ptr_node
9488 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9490 tree temp
9491 = fold_convert_loc (loc, integer_type_node,
9492 build1 (INDIRECT_REF, cst_uchar_node,
9493 fold_convert_loc (loc,
9494 cst_uchar_ptr_node,
9495 arg2)));
9496 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9499 return NULL_TREE;
9502 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9503 Return NULL_TREE if no simplification can be made. */
9505 static tree
9506 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9508 const char *p1, *p2;
9510 if (!validate_arg (arg1, POINTER_TYPE)
9511 || !validate_arg (arg2, POINTER_TYPE)
9512 || !validate_arg (len, INTEGER_TYPE))
9513 return NULL_TREE;
9515 /* If the LEN parameter is zero, return zero. */
9516 if (integer_zerop (len))
9517 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9518 arg1, arg2);
9520 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9521 if (operand_equal_p (arg1, arg2, 0))
9522 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9524 p1 = c_getstr (arg1);
9525 p2 = c_getstr (arg2);
9527 if (host_integerp (len, 1) && p1 && p2)
9529 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9530 if (i > 0)
9531 return integer_one_node;
9532 else if (i < 0)
9533 return integer_minus_one_node;
9534 else
9535 return integer_zero_node;
9538 /* If the second arg is "", and the length is greater than zero,
9539 return *(const unsigned char*)arg1. */
9540 if (p2 && *p2 == '\0'
9541 && TREE_CODE (len) == INTEGER_CST
9542 && tree_int_cst_sgn (len) == 1)
9544 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9545 tree cst_uchar_ptr_node
9546 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9548 return fold_convert_loc (loc, integer_type_node,
9549 build1 (INDIRECT_REF, cst_uchar_node,
9550 fold_convert_loc (loc,
9551 cst_uchar_ptr_node,
9552 arg1)));
9555 /* If the first arg is "", and the length is greater than zero,
9556 return -*(const unsigned char*)arg2. */
9557 if (p1 && *p1 == '\0'
9558 && TREE_CODE (len) == INTEGER_CST
9559 && tree_int_cst_sgn (len) == 1)
9561 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9562 tree cst_uchar_ptr_node
9563 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9565 tree temp = fold_convert_loc (loc, integer_type_node,
9566 build1 (INDIRECT_REF, cst_uchar_node,
9567 fold_convert_loc (loc,
9568 cst_uchar_ptr_node,
9569 arg2)));
9570 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9573 /* If len parameter is one, return an expression corresponding to
9574 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9575 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9577 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9578 tree cst_uchar_ptr_node
9579 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9581 tree ind1 = fold_convert_loc (loc, integer_type_node,
9582 build1 (INDIRECT_REF, cst_uchar_node,
9583 fold_convert_loc (loc,
9584 cst_uchar_ptr_node,
9585 arg1)));
9586 tree ind2 = fold_convert_loc (loc, integer_type_node,
9587 build1 (INDIRECT_REF, cst_uchar_node,
9588 fold_convert_loc (loc,
9589 cst_uchar_ptr_node,
9590 arg2)));
9591 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9594 return NULL_TREE;
9597 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9598 ARG. Return NULL_TREE if no simplification can be made. */
9600 static tree
9601 fold_builtin_signbit (location_t loc, tree arg, tree type)
9603 tree temp;
9605 if (!validate_arg (arg, REAL_TYPE))
9606 return NULL_TREE;
9608 /* If ARG is a compile-time constant, determine the result. */
9609 if (TREE_CODE (arg) == REAL_CST
9610 && !TREE_OVERFLOW (arg))
9612 REAL_VALUE_TYPE c;
9614 c = TREE_REAL_CST (arg);
9615 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9616 return fold_convert_loc (loc, type, temp);
9619 /* If ARG is non-negative, the result is always zero. */
9620 if (tree_expr_nonnegative_p (arg))
9621 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9623 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9624 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9625 return fold_build2_loc (loc, LT_EXPR, type, arg,
9626 build_real (TREE_TYPE (arg), dconst0));
9628 return NULL_TREE;
9631 /* Fold function call to builtin copysign, copysignf or copysignl with
9632 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9633 be made. */
9635 static tree
9636 fold_builtin_copysign (location_t loc, tree fndecl,
9637 tree arg1, tree arg2, tree type)
9639 tree tem;
9641 if (!validate_arg (arg1, REAL_TYPE)
9642 || !validate_arg (arg2, REAL_TYPE))
9643 return NULL_TREE;
9645 /* copysign(X,X) is X. */
9646 if (operand_equal_p (arg1, arg2, 0))
9647 return fold_convert_loc (loc, type, arg1);
9649 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9650 if (TREE_CODE (arg1) == REAL_CST
9651 && TREE_CODE (arg2) == REAL_CST
9652 && !TREE_OVERFLOW (arg1)
9653 && !TREE_OVERFLOW (arg2))
9655 REAL_VALUE_TYPE c1, c2;
9657 c1 = TREE_REAL_CST (arg1);
9658 c2 = TREE_REAL_CST (arg2);
9659 /* c1.sign := c2.sign. */
9660 real_copysign (&c1, &c2);
9661 return build_real (type, c1);
9664 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9665 Remember to evaluate Y for side-effects. */
9666 if (tree_expr_nonnegative_p (arg2))
9667 return omit_one_operand_loc (loc, type,
9668 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9669 arg2);
9671 /* Strip sign changing operations for the first argument. */
9672 tem = fold_strip_sign_ops (arg1);
9673 if (tem)
9674 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9676 return NULL_TREE;
9679 /* Fold a call to builtin isascii with argument ARG. */
9681 static tree
9682 fold_builtin_isascii (location_t loc, tree arg)
9684 if (!validate_arg (arg, INTEGER_TYPE))
9685 return NULL_TREE;
9686 else
9688 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9689 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9690 build_int_cst (NULL_TREE,
9691 ~ (unsigned HOST_WIDE_INT) 0x7f));
9692 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9693 arg, integer_zero_node);
9697 /* Fold a call to builtin toascii with argument ARG. */
9699 static tree
9700 fold_builtin_toascii (location_t loc, tree arg)
9702 if (!validate_arg (arg, INTEGER_TYPE))
9703 return NULL_TREE;
9705 /* Transform toascii(c) -> (c & 0x7f). */
9706 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9707 build_int_cst (NULL_TREE, 0x7f));
9710 /* Fold a call to builtin isdigit with argument ARG. */
9712 static tree
9713 fold_builtin_isdigit (location_t loc, tree arg)
9715 if (!validate_arg (arg, INTEGER_TYPE))
9716 return NULL_TREE;
9717 else
9719 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9720 /* According to the C standard, isdigit is unaffected by locale.
9721 However, it definitely is affected by the target character set. */
9722 unsigned HOST_WIDE_INT target_digit0
9723 = lang_hooks.to_target_charset ('0');
9725 if (target_digit0 == 0)
9726 return NULL_TREE;
9728 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9729 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9730 build_int_cst (unsigned_type_node, target_digit0));
9731 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9732 build_int_cst (unsigned_type_node, 9));
9736 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9738 static tree
9739 fold_builtin_fabs (location_t loc, tree arg, tree type)
9741 if (!validate_arg (arg, REAL_TYPE))
9742 return NULL_TREE;
9744 arg = fold_convert_loc (loc, type, arg);
9745 if (TREE_CODE (arg) == REAL_CST)
9746 return fold_abs_const (arg, type);
9747 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9750 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9752 static tree
9753 fold_builtin_abs (location_t loc, tree arg, tree type)
9755 if (!validate_arg (arg, INTEGER_TYPE))
9756 return NULL_TREE;
9758 arg = fold_convert_loc (loc, type, arg);
9759 if (TREE_CODE (arg) == INTEGER_CST)
9760 return fold_abs_const (arg, type);
9761 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9764 /* Fold a call to builtin fmin or fmax. */
9766 static tree
9767 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9768 tree type, bool max)
9770 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9772 /* Calculate the result when the argument is a constant. */
9773 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9775 if (res)
9776 return res;
9778 /* If either argument is NaN, return the other one. Avoid the
9779 transformation if we get (and honor) a signalling NaN. Using
9780 omit_one_operand() ensures we create a non-lvalue. */
9781 if (TREE_CODE (arg0) == REAL_CST
9782 && real_isnan (&TREE_REAL_CST (arg0))
9783 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9784 || ! TREE_REAL_CST (arg0).signalling))
9785 return omit_one_operand_loc (loc, type, arg1, arg0);
9786 if (TREE_CODE (arg1) == REAL_CST
9787 && real_isnan (&TREE_REAL_CST (arg1))
9788 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9789 || ! TREE_REAL_CST (arg1).signalling))
9790 return omit_one_operand_loc (loc, type, arg0, arg1);
9792 /* Transform fmin/fmax(x,x) -> x. */
9793 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9794 return omit_one_operand_loc (loc, type, arg0, arg1);
9796 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9797 functions to return the numeric arg if the other one is NaN.
9798 These tree codes don't honor that, so only transform if
9799 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9800 handled, so we don't have to worry about it either. */
9801 if (flag_finite_math_only)
9802 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9803 fold_convert_loc (loc, type, arg0),
9804 fold_convert_loc (loc, type, arg1));
9806 return NULL_TREE;
9809 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9811 static tree
9812 fold_builtin_carg (location_t loc, tree arg, tree type)
9814 if (validate_arg (arg, COMPLEX_TYPE)
9815 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9817 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9819 if (atan2_fn)
9821 tree new_arg = builtin_save_expr (arg);
9822 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9823 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9824 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9828 return NULL_TREE;
9831 /* Fold a call to builtin logb/ilogb. */
9833 static tree
9834 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9836 if (! validate_arg (arg, REAL_TYPE))
9837 return NULL_TREE;
9839 STRIP_NOPS (arg);
9841 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9843 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9845 switch (value->cl)
9847 case rvc_nan:
9848 case rvc_inf:
9849 /* If arg is Inf or NaN and we're logb, return it. */
9850 if (TREE_CODE (rettype) == REAL_TYPE)
9851 return fold_convert_loc (loc, rettype, arg);
9852 /* Fall through... */
9853 case rvc_zero:
9854 /* Zero may set errno and/or raise an exception for logb, also
9855 for ilogb we don't know FP_ILOGB0. */
9856 return NULL_TREE;
9857 case rvc_normal:
9858 /* For normal numbers, proceed iff radix == 2. In GCC,
9859 normalized significands are in the range [0.5, 1.0). We
9860 want the exponent as if they were [1.0, 2.0) so get the
9861 exponent and subtract 1. */
9862 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9863 return fold_convert_loc (loc, rettype,
9864 build_int_cst (NULL_TREE,
9865 REAL_EXP (value)-1));
9866 break;
9870 return NULL_TREE;
9873 /* Fold a call to builtin significand, if radix == 2. */
9875 static tree
9876 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9878 if (! validate_arg (arg, REAL_TYPE))
9879 return NULL_TREE;
9881 STRIP_NOPS (arg);
9883 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9885 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9887 switch (value->cl)
9889 case rvc_zero:
9890 case rvc_nan:
9891 case rvc_inf:
9892 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9893 return fold_convert_loc (loc, rettype, arg);
9894 case rvc_normal:
9895 /* For normal numbers, proceed iff radix == 2. */
9896 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9898 REAL_VALUE_TYPE result = *value;
9899 /* In GCC, normalized significands are in the range [0.5,
9900 1.0). We want them to be [1.0, 2.0) so set the
9901 exponent to 1. */
9902 SET_REAL_EXP (&result, 1);
9903 return build_real (rettype, result);
9905 break;
9909 return NULL_TREE;
9912 /* Fold a call to builtin frexp, we can assume the base is 2. */
9914 static tree
9915 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9917 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9918 return NULL_TREE;
9920 STRIP_NOPS (arg0);
9922 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9923 return NULL_TREE;
9925 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9927 /* Proceed if a valid pointer type was passed in. */
9928 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9930 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9931 tree frac, exp;
9933 switch (value->cl)
9935 case rvc_zero:
9936 /* For +-0, return (*exp = 0, +-0). */
9937 exp = integer_zero_node;
9938 frac = arg0;
9939 break;
9940 case rvc_nan:
9941 case rvc_inf:
9942 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9943 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9944 case rvc_normal:
9946 /* Since the frexp function always expects base 2, and in
9947 GCC normalized significands are already in the range
9948 [0.5, 1.0), we have exactly what frexp wants. */
9949 REAL_VALUE_TYPE frac_rvt = *value;
9950 SET_REAL_EXP (&frac_rvt, 0);
9951 frac = build_real (rettype, frac_rvt);
9952 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9954 break;
9955 default:
9956 gcc_unreachable ();
9959 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9960 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9961 TREE_SIDE_EFFECTS (arg1) = 1;
9962 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9965 return NULL_TREE;
9968 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9969 then we can assume the base is two. If it's false, then we have to
9970 check the mode of the TYPE parameter in certain cases. */
9972 static tree
9973 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9974 tree type, bool ldexp)
9976 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9978 STRIP_NOPS (arg0);
9979 STRIP_NOPS (arg1);
9981 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9982 if (real_zerop (arg0) || integer_zerop (arg1)
9983 || (TREE_CODE (arg0) == REAL_CST
9984 && !real_isfinite (&TREE_REAL_CST (arg0))))
9985 return omit_one_operand_loc (loc, type, arg0, arg1);
9987 /* If both arguments are constant, then try to evaluate it. */
9988 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9989 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9990 && host_integerp (arg1, 0))
9992 /* Bound the maximum adjustment to twice the range of the
9993 mode's valid exponents. Use abs to ensure the range is
9994 positive as a sanity check. */
9995 const long max_exp_adj = 2 *
9996 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9997 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9999 /* Get the user-requested adjustment. */
10000 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
10002 /* The requested adjustment must be inside this range. This
10003 is a preliminary cap to avoid things like overflow, we
10004 may still fail to compute the result for other reasons. */
10005 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
10007 REAL_VALUE_TYPE initial_result;
10009 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
10011 /* Ensure we didn't overflow. */
10012 if (! real_isinf (&initial_result))
10014 const REAL_VALUE_TYPE trunc_result
10015 = real_value_truncate (TYPE_MODE (type), initial_result);
10017 /* Only proceed if the target mode can hold the
10018 resulting value. */
10019 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
10020 return build_real (type, trunc_result);
10026 return NULL_TREE;
10029 /* Fold a call to builtin modf. */
10031 static tree
10032 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10034 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10035 return NULL_TREE;
10037 STRIP_NOPS (arg0);
10039 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10040 return NULL_TREE;
10042 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10044 /* Proceed if a valid pointer type was passed in. */
10045 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10047 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10048 REAL_VALUE_TYPE trunc, frac;
10050 switch (value->cl)
10052 case rvc_nan:
10053 case rvc_zero:
10054 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10055 trunc = frac = *value;
10056 break;
10057 case rvc_inf:
10058 /* For +-Inf, return (*arg1 = arg0, +-0). */
10059 frac = dconst0;
10060 frac.sign = value->sign;
10061 trunc = *value;
10062 break;
10063 case rvc_normal:
10064 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10065 real_trunc (&trunc, VOIDmode, value);
10066 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10067 /* If the original number was negative and already
10068 integral, then the fractional part is -0.0. */
10069 if (value->sign && frac.cl == rvc_zero)
10070 frac.sign = value->sign;
10071 break;
10074 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10075 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10076 build_real (rettype, trunc));
10077 TREE_SIDE_EFFECTS (arg1) = 1;
10078 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10079 build_real (rettype, frac));
10082 return NULL_TREE;
10085 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10086 ARG is the argument for the call. */
10088 static tree
10089 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10091 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10092 REAL_VALUE_TYPE r;
10094 if (!validate_arg (arg, REAL_TYPE))
10095 return NULL_TREE;
10097 switch (builtin_index)
10099 case BUILT_IN_ISINF:
10100 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10101 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10103 if (TREE_CODE (arg) == REAL_CST)
10105 r = TREE_REAL_CST (arg);
10106 if (real_isinf (&r))
10107 return real_compare (GT_EXPR, &r, &dconst0)
10108 ? integer_one_node : integer_minus_one_node;
10109 else
10110 return integer_zero_node;
10113 return NULL_TREE;
10115 case BUILT_IN_ISINF_SIGN:
10117 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10118 /* In a boolean context, GCC will fold the inner COND_EXPR to
10119 1. So e.g. "if (isinf_sign(x))" would be folded to just
10120 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10121 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10122 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10123 tree tmp = NULL_TREE;
10125 arg = builtin_save_expr (arg);
10127 if (signbit_fn && isinf_fn)
10129 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10130 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10132 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10133 signbit_call, integer_zero_node);
10134 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10135 isinf_call, integer_zero_node);
10137 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10138 integer_minus_one_node, integer_one_node);
10139 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10140 isinf_call, tmp,
10141 integer_zero_node);
10144 return tmp;
10147 case BUILT_IN_ISFINITE:
10148 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10149 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10150 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10152 if (TREE_CODE (arg) == REAL_CST)
10154 r = TREE_REAL_CST (arg);
10155 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10158 return NULL_TREE;
10160 case BUILT_IN_ISNAN:
10161 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10162 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10164 if (TREE_CODE (arg) == REAL_CST)
10166 r = TREE_REAL_CST (arg);
10167 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10170 arg = builtin_save_expr (arg);
10171 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10173 default:
10174 gcc_unreachable ();
10178 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10179 This builtin will generate code to return the appropriate floating
10180 point classification depending on the value of the floating point
10181 number passed in. The possible return values must be supplied as
10182 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10183 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10184 one floating point argument which is "type generic". */
10186 static tree
10187 fold_builtin_fpclassify (location_t loc, tree exp)
10189 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10190 arg, type, res, tmp;
10191 enum machine_mode mode;
10192 REAL_VALUE_TYPE r;
10193 char buf[128];
10195 /* Verify the required arguments in the original call. */
10196 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10197 INTEGER_TYPE, INTEGER_TYPE,
10198 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10199 return NULL_TREE;
10201 fp_nan = CALL_EXPR_ARG (exp, 0);
10202 fp_infinite = CALL_EXPR_ARG (exp, 1);
10203 fp_normal = CALL_EXPR_ARG (exp, 2);
10204 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10205 fp_zero = CALL_EXPR_ARG (exp, 4);
10206 arg = CALL_EXPR_ARG (exp, 5);
10207 type = TREE_TYPE (arg);
10208 mode = TYPE_MODE (type);
10209 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10211 /* fpclassify(x) ->
10212 isnan(x) ? FP_NAN :
10213 (fabs(x) == Inf ? FP_INFINITE :
10214 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10215 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10217 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10218 build_real (type, dconst0));
10219 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10220 tmp, fp_zero, fp_subnormal);
10222 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10223 real_from_string (&r, buf);
10224 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10225 arg, build_real (type, r));
10226 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10228 if (HONOR_INFINITIES (mode))
10230 real_inf (&r);
10231 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10232 build_real (type, r));
10233 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10234 fp_infinite, res);
10237 if (HONOR_NANS (mode))
10239 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10240 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10243 return res;
10246 /* Fold a call to an unordered comparison function such as
10247 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10248 being called and ARG0 and ARG1 are the arguments for the call.
10249 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10250 the opposite of the desired result. UNORDERED_CODE is used
10251 for modes that can hold NaNs and ORDERED_CODE is used for
10252 the rest. */
10254 static tree
10255 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10256 enum tree_code unordered_code,
10257 enum tree_code ordered_code)
10259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10260 enum tree_code code;
10261 tree type0, type1;
10262 enum tree_code code0, code1;
10263 tree cmp_type = NULL_TREE;
10265 type0 = TREE_TYPE (arg0);
10266 type1 = TREE_TYPE (arg1);
10268 code0 = TREE_CODE (type0);
10269 code1 = TREE_CODE (type1);
10271 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10272 /* Choose the wider of two real types. */
10273 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10274 ? type0 : type1;
10275 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10276 cmp_type = type0;
10277 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10278 cmp_type = type1;
10280 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10281 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10283 if (unordered_code == UNORDERED_EXPR)
10285 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10286 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10287 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10290 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10291 : ordered_code;
10292 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10293 fold_build2_loc (loc, code, type, arg0, arg1));
10296 /* Fold a call to built-in function FNDECL with 0 arguments.
10297 IGNORE is true if the result of the function call is ignored. This
10298 function returns NULL_TREE if no simplification was possible. */
10300 static tree
10301 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10303 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10304 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10305 switch (fcode)
10307 CASE_FLT_FN (BUILT_IN_INF):
10308 case BUILT_IN_INFD32:
10309 case BUILT_IN_INFD64:
10310 case BUILT_IN_INFD128:
10311 return fold_builtin_inf (loc, type, true);
10313 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10314 return fold_builtin_inf (loc, type, false);
10316 case BUILT_IN_CLASSIFY_TYPE:
10317 return fold_builtin_classify_type (NULL_TREE);
10319 default:
10320 break;
10322 return NULL_TREE;
10325 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10326 IGNORE is true if the result of the function call is ignored. This
10327 function returns NULL_TREE if no simplification was possible. */
10329 static tree
10330 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10332 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10333 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10334 switch (fcode)
10337 case BUILT_IN_CONSTANT_P:
10339 tree val = fold_builtin_constant_p (arg0);
10341 /* Gimplification will pull the CALL_EXPR for the builtin out of
10342 an if condition. When not optimizing, we'll not CSE it back.
10343 To avoid link error types of regressions, return false now. */
10344 if (!val && !optimize)
10345 val = integer_zero_node;
10347 return val;
10350 case BUILT_IN_CLASSIFY_TYPE:
10351 return fold_builtin_classify_type (arg0);
10353 case BUILT_IN_STRLEN:
10354 return fold_builtin_strlen (loc, arg0);
10356 CASE_FLT_FN (BUILT_IN_FABS):
10357 return fold_builtin_fabs (loc, arg0, type);
10359 case BUILT_IN_ABS:
10360 case BUILT_IN_LABS:
10361 case BUILT_IN_LLABS:
10362 case BUILT_IN_IMAXABS:
10363 return fold_builtin_abs (loc, arg0, type);
10365 CASE_FLT_FN (BUILT_IN_CONJ):
10366 if (validate_arg (arg0, COMPLEX_TYPE)
10367 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10368 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10369 break;
10371 CASE_FLT_FN (BUILT_IN_CREAL):
10372 if (validate_arg (arg0, COMPLEX_TYPE)
10373 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10374 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10375 break;
10377 CASE_FLT_FN (BUILT_IN_CIMAG):
10378 if (validate_arg (arg0, COMPLEX_TYPE))
10379 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10380 break;
10382 CASE_FLT_FN (BUILT_IN_CCOS):
10383 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10385 CASE_FLT_FN (BUILT_IN_CCOSH):
10386 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10388 #ifdef HAVE_mpc
10389 CASE_FLT_FN (BUILT_IN_CSIN):
10390 if (validate_arg (arg0, COMPLEX_TYPE)
10391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10392 return do_mpc_arg1 (arg0, type, mpc_sin);
10393 break;
10395 CASE_FLT_FN (BUILT_IN_CSINH):
10396 if (validate_arg (arg0, COMPLEX_TYPE)
10397 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10398 return do_mpc_arg1 (arg0, type, mpc_sinh);
10399 break;
10401 CASE_FLT_FN (BUILT_IN_CTAN):
10402 if (validate_arg (arg0, COMPLEX_TYPE)
10403 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10404 return do_mpc_arg1 (arg0, type, mpc_tan);
10405 break;
10407 CASE_FLT_FN (BUILT_IN_CTANH):
10408 if (validate_arg (arg0, COMPLEX_TYPE)
10409 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10410 return do_mpc_arg1 (arg0, type, mpc_tanh);
10411 break;
10413 CASE_FLT_FN (BUILT_IN_CLOG):
10414 if (validate_arg (arg0, COMPLEX_TYPE)
10415 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10416 return do_mpc_arg1 (arg0, type, mpc_log);
10417 break;
10419 CASE_FLT_FN (BUILT_IN_CSQRT):
10420 if (validate_arg (arg0, COMPLEX_TYPE)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10422 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10423 break;
10424 #endif
10426 CASE_FLT_FN (BUILT_IN_CABS):
10427 return fold_builtin_cabs (loc, arg0, type, fndecl);
10429 CASE_FLT_FN (BUILT_IN_CARG):
10430 return fold_builtin_carg (loc, arg0, type);
10432 CASE_FLT_FN (BUILT_IN_SQRT):
10433 return fold_builtin_sqrt (loc, arg0, type);
10435 CASE_FLT_FN (BUILT_IN_CBRT):
10436 return fold_builtin_cbrt (loc, arg0, type);
10438 CASE_FLT_FN (BUILT_IN_ASIN):
10439 if (validate_arg (arg0, REAL_TYPE))
10440 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10441 &dconstm1, &dconst1, true);
10442 break;
10444 CASE_FLT_FN (BUILT_IN_ACOS):
10445 if (validate_arg (arg0, REAL_TYPE))
10446 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10447 &dconstm1, &dconst1, true);
10448 break;
10450 CASE_FLT_FN (BUILT_IN_ATAN):
10451 if (validate_arg (arg0, REAL_TYPE))
10452 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10453 break;
10455 CASE_FLT_FN (BUILT_IN_ASINH):
10456 if (validate_arg (arg0, REAL_TYPE))
10457 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10458 break;
10460 CASE_FLT_FN (BUILT_IN_ACOSH):
10461 if (validate_arg (arg0, REAL_TYPE))
10462 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10463 &dconst1, NULL, true);
10464 break;
10466 CASE_FLT_FN (BUILT_IN_ATANH):
10467 if (validate_arg (arg0, REAL_TYPE))
10468 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10469 &dconstm1, &dconst1, false);
10470 break;
10472 CASE_FLT_FN (BUILT_IN_SIN):
10473 if (validate_arg (arg0, REAL_TYPE))
10474 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10475 break;
10477 CASE_FLT_FN (BUILT_IN_COS):
10478 return fold_builtin_cos (loc, arg0, type, fndecl);
10480 CASE_FLT_FN (BUILT_IN_TAN):
10481 return fold_builtin_tan (arg0, type);
10483 CASE_FLT_FN (BUILT_IN_CEXP):
10484 return fold_builtin_cexp (loc, arg0, type);
10486 CASE_FLT_FN (BUILT_IN_CEXPI):
10487 if (validate_arg (arg0, REAL_TYPE))
10488 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10489 break;
10491 CASE_FLT_FN (BUILT_IN_SINH):
10492 if (validate_arg (arg0, REAL_TYPE))
10493 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10494 break;
10496 CASE_FLT_FN (BUILT_IN_COSH):
10497 return fold_builtin_cosh (loc, arg0, type, fndecl);
10499 CASE_FLT_FN (BUILT_IN_TANH):
10500 if (validate_arg (arg0, REAL_TYPE))
10501 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10502 break;
10504 CASE_FLT_FN (BUILT_IN_ERF):
10505 if (validate_arg (arg0, REAL_TYPE))
10506 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10507 break;
10509 CASE_FLT_FN (BUILT_IN_ERFC):
10510 if (validate_arg (arg0, REAL_TYPE))
10511 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10512 break;
10514 CASE_FLT_FN (BUILT_IN_TGAMMA):
10515 if (validate_arg (arg0, REAL_TYPE))
10516 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10517 break;
10519 CASE_FLT_FN (BUILT_IN_EXP):
10520 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10522 CASE_FLT_FN (BUILT_IN_EXP2):
10523 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10525 CASE_FLT_FN (BUILT_IN_EXP10):
10526 CASE_FLT_FN (BUILT_IN_POW10):
10527 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10529 CASE_FLT_FN (BUILT_IN_EXPM1):
10530 if (validate_arg (arg0, REAL_TYPE))
10531 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10532 break;
10534 CASE_FLT_FN (BUILT_IN_LOG):
10535 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10537 CASE_FLT_FN (BUILT_IN_LOG2):
10538 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10540 CASE_FLT_FN (BUILT_IN_LOG10):
10541 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10543 CASE_FLT_FN (BUILT_IN_LOG1P):
10544 if (validate_arg (arg0, REAL_TYPE))
10545 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10546 &dconstm1, NULL, false);
10547 break;
10549 CASE_FLT_FN (BUILT_IN_J0):
10550 if (validate_arg (arg0, REAL_TYPE))
10551 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10552 NULL, NULL, 0);
10553 break;
10555 CASE_FLT_FN (BUILT_IN_J1):
10556 if (validate_arg (arg0, REAL_TYPE))
10557 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10558 NULL, NULL, 0);
10559 break;
10561 CASE_FLT_FN (BUILT_IN_Y0):
10562 if (validate_arg (arg0, REAL_TYPE))
10563 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10564 &dconst0, NULL, false);
10565 break;
10567 CASE_FLT_FN (BUILT_IN_Y1):
10568 if (validate_arg (arg0, REAL_TYPE))
10569 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10570 &dconst0, NULL, false);
10571 break;
10573 CASE_FLT_FN (BUILT_IN_NAN):
10574 case BUILT_IN_NAND32:
10575 case BUILT_IN_NAND64:
10576 case BUILT_IN_NAND128:
10577 return fold_builtin_nan (arg0, type, true);
10579 CASE_FLT_FN (BUILT_IN_NANS):
10580 return fold_builtin_nan (arg0, type, false);
10582 CASE_FLT_FN (BUILT_IN_FLOOR):
10583 return fold_builtin_floor (loc, fndecl, arg0);
10585 CASE_FLT_FN (BUILT_IN_CEIL):
10586 return fold_builtin_ceil (loc, fndecl, arg0);
10588 CASE_FLT_FN (BUILT_IN_TRUNC):
10589 return fold_builtin_trunc (loc, fndecl, arg0);
10591 CASE_FLT_FN (BUILT_IN_ROUND):
10592 return fold_builtin_round (loc, fndecl, arg0);
10594 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10595 CASE_FLT_FN (BUILT_IN_RINT):
10596 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10598 CASE_FLT_FN (BUILT_IN_LCEIL):
10599 CASE_FLT_FN (BUILT_IN_LLCEIL):
10600 CASE_FLT_FN (BUILT_IN_LFLOOR):
10601 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10602 CASE_FLT_FN (BUILT_IN_LROUND):
10603 CASE_FLT_FN (BUILT_IN_LLROUND):
10604 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10606 CASE_FLT_FN (BUILT_IN_LRINT):
10607 CASE_FLT_FN (BUILT_IN_LLRINT):
10608 return fold_fixed_mathfn (loc, fndecl, arg0);
10610 case BUILT_IN_BSWAP32:
10611 case BUILT_IN_BSWAP64:
10612 return fold_builtin_bswap (fndecl, arg0);
10614 CASE_INT_FN (BUILT_IN_FFS):
10615 CASE_INT_FN (BUILT_IN_CLZ):
10616 CASE_INT_FN (BUILT_IN_CTZ):
10617 CASE_INT_FN (BUILT_IN_POPCOUNT):
10618 CASE_INT_FN (BUILT_IN_PARITY):
10619 return fold_builtin_bitop (fndecl, arg0);
10621 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10622 return fold_builtin_signbit (loc, arg0, type);
10624 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10625 return fold_builtin_significand (loc, arg0, type);
10627 CASE_FLT_FN (BUILT_IN_ILOGB):
10628 CASE_FLT_FN (BUILT_IN_LOGB):
10629 return fold_builtin_logb (loc, arg0, type);
10631 case BUILT_IN_ISASCII:
10632 return fold_builtin_isascii (loc, arg0);
10634 case BUILT_IN_TOASCII:
10635 return fold_builtin_toascii (loc, arg0);
10637 case BUILT_IN_ISDIGIT:
10638 return fold_builtin_isdigit (loc, arg0);
10640 CASE_FLT_FN (BUILT_IN_FINITE):
10641 case BUILT_IN_FINITED32:
10642 case BUILT_IN_FINITED64:
10643 case BUILT_IN_FINITED128:
10644 case BUILT_IN_ISFINITE:
10645 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10647 CASE_FLT_FN (BUILT_IN_ISINF):
10648 case BUILT_IN_ISINFD32:
10649 case BUILT_IN_ISINFD64:
10650 case BUILT_IN_ISINFD128:
10651 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10653 case BUILT_IN_ISINF_SIGN:
10654 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10656 CASE_FLT_FN (BUILT_IN_ISNAN):
10657 case BUILT_IN_ISNAND32:
10658 case BUILT_IN_ISNAND64:
10659 case BUILT_IN_ISNAND128:
10660 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10662 case BUILT_IN_PRINTF:
10663 case BUILT_IN_PRINTF_UNLOCKED:
10664 case BUILT_IN_VPRINTF:
10665 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10667 default:
10668 break;
10671 return NULL_TREE;
10675 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10676 IGNORE is true if the result of the function call is ignored. This
10677 function returns NULL_TREE if no simplification was possible. */
10679 static tree
10680 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10682 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10683 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10685 switch (fcode)
10687 CASE_FLT_FN (BUILT_IN_JN):
10688 if (validate_arg (arg0, INTEGER_TYPE)
10689 && validate_arg (arg1, REAL_TYPE))
10690 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10691 break;
10693 CASE_FLT_FN (BUILT_IN_YN):
10694 if (validate_arg (arg0, INTEGER_TYPE)
10695 && validate_arg (arg1, REAL_TYPE))
10696 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10697 &dconst0, false);
10698 break;
10700 CASE_FLT_FN (BUILT_IN_DREM):
10701 CASE_FLT_FN (BUILT_IN_REMAINDER):
10702 if (validate_arg (arg0, REAL_TYPE)
10703 && validate_arg(arg1, REAL_TYPE))
10704 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10705 break;
10707 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10708 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10709 if (validate_arg (arg0, REAL_TYPE)
10710 && validate_arg(arg1, POINTER_TYPE))
10711 return do_mpfr_lgamma_r (arg0, arg1, type);
10712 break;
10714 CASE_FLT_FN (BUILT_IN_ATAN2):
10715 if (validate_arg (arg0, REAL_TYPE)
10716 && validate_arg(arg1, REAL_TYPE))
10717 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10718 break;
10720 CASE_FLT_FN (BUILT_IN_FDIM):
10721 if (validate_arg (arg0, REAL_TYPE)
10722 && validate_arg(arg1, REAL_TYPE))
10723 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10724 break;
10726 CASE_FLT_FN (BUILT_IN_HYPOT):
10727 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10729 #ifdef HAVE_mpc_pow
10730 CASE_FLT_FN (BUILT_IN_CPOW):
10731 if (validate_arg (arg0, COMPLEX_TYPE)
10732 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10733 && validate_arg (arg1, COMPLEX_TYPE)
10734 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10735 return do_mpc_arg2 (arg0, arg1, type, mpc_pow);
10736 break;
10737 #endif
10739 CASE_FLT_FN (BUILT_IN_LDEXP):
10740 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10741 CASE_FLT_FN (BUILT_IN_SCALBN):
10742 CASE_FLT_FN (BUILT_IN_SCALBLN):
10743 return fold_builtin_load_exponent (loc, arg0, arg1,
10744 type, /*ldexp=*/false);
10746 CASE_FLT_FN (BUILT_IN_FREXP):
10747 return fold_builtin_frexp (loc, arg0, arg1, type);
10749 CASE_FLT_FN (BUILT_IN_MODF):
10750 return fold_builtin_modf (loc, arg0, arg1, type);
10752 case BUILT_IN_BZERO:
10753 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10755 case BUILT_IN_FPUTS:
10756 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10758 case BUILT_IN_FPUTS_UNLOCKED:
10759 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10761 case BUILT_IN_STRSTR:
10762 return fold_builtin_strstr (loc, arg0, arg1, type);
10764 case BUILT_IN_STRCAT:
10765 return fold_builtin_strcat (loc, arg0, arg1);
10767 case BUILT_IN_STRSPN:
10768 return fold_builtin_strspn (loc, arg0, arg1);
10770 case BUILT_IN_STRCSPN:
10771 return fold_builtin_strcspn (loc, arg0, arg1);
10773 case BUILT_IN_STRCHR:
10774 case BUILT_IN_INDEX:
10775 return fold_builtin_strchr (loc, arg0, arg1, type);
10777 case BUILT_IN_STRRCHR:
10778 case BUILT_IN_RINDEX:
10779 return fold_builtin_strrchr (loc, arg0, arg1, type);
10781 case BUILT_IN_STRCPY:
10782 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10784 case BUILT_IN_STPCPY:
10785 if (ignore)
10787 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10788 if (!fn)
10789 break;
10791 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10793 break;
10795 case BUILT_IN_STRCMP:
10796 return fold_builtin_strcmp (loc, arg0, arg1);
10798 case BUILT_IN_STRPBRK:
10799 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10801 case BUILT_IN_EXPECT:
10802 return fold_builtin_expect (loc, arg0, arg1);
10804 CASE_FLT_FN (BUILT_IN_POW):
10805 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10807 CASE_FLT_FN (BUILT_IN_POWI):
10808 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10810 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10811 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10813 CASE_FLT_FN (BUILT_IN_FMIN):
10814 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10816 CASE_FLT_FN (BUILT_IN_FMAX):
10817 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10819 case BUILT_IN_ISGREATER:
10820 return fold_builtin_unordered_cmp (loc, fndecl,
10821 arg0, arg1, UNLE_EXPR, LE_EXPR);
10822 case BUILT_IN_ISGREATEREQUAL:
10823 return fold_builtin_unordered_cmp (loc, fndecl,
10824 arg0, arg1, UNLT_EXPR, LT_EXPR);
10825 case BUILT_IN_ISLESS:
10826 return fold_builtin_unordered_cmp (loc, fndecl,
10827 arg0, arg1, UNGE_EXPR, GE_EXPR);
10828 case BUILT_IN_ISLESSEQUAL:
10829 return fold_builtin_unordered_cmp (loc, fndecl,
10830 arg0, arg1, UNGT_EXPR, GT_EXPR);
10831 case BUILT_IN_ISLESSGREATER:
10832 return fold_builtin_unordered_cmp (loc, fndecl,
10833 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10834 case BUILT_IN_ISUNORDERED:
10835 return fold_builtin_unordered_cmp (loc, fndecl,
10836 arg0, arg1, UNORDERED_EXPR,
10837 NOP_EXPR);
10839 /* We do the folding for va_start in the expander. */
10840 case BUILT_IN_VA_START:
10841 break;
10843 case BUILT_IN_SPRINTF:
10844 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10846 case BUILT_IN_OBJECT_SIZE:
10847 return fold_builtin_object_size (arg0, arg1);
10849 case BUILT_IN_PRINTF:
10850 case BUILT_IN_PRINTF_UNLOCKED:
10851 case BUILT_IN_VPRINTF:
10852 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10854 case BUILT_IN_PRINTF_CHK:
10855 case BUILT_IN_VPRINTF_CHK:
10856 if (!validate_arg (arg0, INTEGER_TYPE)
10857 || TREE_SIDE_EFFECTS (arg0))
10858 return NULL_TREE;
10859 else
10860 return fold_builtin_printf (loc, fndecl,
10861 arg1, NULL_TREE, ignore, fcode);
10862 break;
10864 case BUILT_IN_FPRINTF:
10865 case BUILT_IN_FPRINTF_UNLOCKED:
10866 case BUILT_IN_VFPRINTF:
10867 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10868 ignore, fcode);
10870 default:
10871 break;
10873 return NULL_TREE;
10876 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10877 and ARG2. IGNORE is true if the result of the function call is ignored.
10878 This function returns NULL_TREE if no simplification was possible. */
10880 static tree
10881 fold_builtin_3 (location_t loc, tree fndecl,
10882 tree arg0, tree arg1, tree arg2, bool ignore)
10884 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10885 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10886 switch (fcode)
10889 CASE_FLT_FN (BUILT_IN_SINCOS):
10890 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10892 CASE_FLT_FN (BUILT_IN_FMA):
10893 if (validate_arg (arg0, REAL_TYPE)
10894 && validate_arg(arg1, REAL_TYPE)
10895 && validate_arg(arg2, REAL_TYPE))
10896 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10897 break;
10899 CASE_FLT_FN (BUILT_IN_REMQUO):
10900 if (validate_arg (arg0, REAL_TYPE)
10901 && validate_arg(arg1, REAL_TYPE)
10902 && validate_arg(arg2, POINTER_TYPE))
10903 return do_mpfr_remquo (arg0, arg1, arg2);
10904 break;
10906 case BUILT_IN_MEMSET:
10907 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10909 case BUILT_IN_BCOPY:
10910 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10911 void_type_node, true, /*endp=*/3);
10913 case BUILT_IN_MEMCPY:
10914 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10915 type, ignore, /*endp=*/0);
10917 case BUILT_IN_MEMPCPY:
10918 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10919 type, ignore, /*endp=*/1);
10921 case BUILT_IN_MEMMOVE:
10922 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10923 type, ignore, /*endp=*/3);
10925 case BUILT_IN_STRNCAT:
10926 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10928 case BUILT_IN_STRNCPY:
10929 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10931 case BUILT_IN_STRNCMP:
10932 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10934 case BUILT_IN_MEMCHR:
10935 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10937 case BUILT_IN_BCMP:
10938 case BUILT_IN_MEMCMP:
10939 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10941 case BUILT_IN_SPRINTF:
10942 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10944 case BUILT_IN_STRCPY_CHK:
10945 case BUILT_IN_STPCPY_CHK:
10946 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10947 ignore, fcode);
10949 case BUILT_IN_STRCAT_CHK:
10950 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10952 case BUILT_IN_PRINTF_CHK:
10953 case BUILT_IN_VPRINTF_CHK:
10954 if (!validate_arg (arg0, INTEGER_TYPE)
10955 || TREE_SIDE_EFFECTS (arg0))
10956 return NULL_TREE;
10957 else
10958 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10959 break;
10961 case BUILT_IN_FPRINTF:
10962 case BUILT_IN_FPRINTF_UNLOCKED:
10963 case BUILT_IN_VFPRINTF:
10964 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10965 ignore, fcode);
10967 case BUILT_IN_FPRINTF_CHK:
10968 case BUILT_IN_VFPRINTF_CHK:
10969 if (!validate_arg (arg1, INTEGER_TYPE)
10970 || TREE_SIDE_EFFECTS (arg1))
10971 return NULL_TREE;
10972 else
10973 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10974 ignore, fcode);
10976 default:
10977 break;
10979 return NULL_TREE;
10982 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10983 ARG2, and ARG3. IGNORE is true if the result of the function call is
10984 ignored. This function returns NULL_TREE if no simplification was
10985 possible. */
10987 static tree
10988 fold_builtin_4 (location_t loc, tree fndecl,
10989 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10991 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10993 switch (fcode)
10995 case BUILT_IN_MEMCPY_CHK:
10996 case BUILT_IN_MEMPCPY_CHK:
10997 case BUILT_IN_MEMMOVE_CHK:
10998 case BUILT_IN_MEMSET_CHK:
10999 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11000 NULL_TREE, ignore,
11001 DECL_FUNCTION_CODE (fndecl));
11003 case BUILT_IN_STRNCPY_CHK:
11004 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
11006 case BUILT_IN_STRNCAT_CHK:
11007 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11009 case BUILT_IN_FPRINTF_CHK:
11010 case BUILT_IN_VFPRINTF_CHK:
11011 if (!validate_arg (arg1, INTEGER_TYPE)
11012 || TREE_SIDE_EFFECTS (arg1))
11013 return NULL_TREE;
11014 else
11015 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11016 ignore, fcode);
11017 break;
11019 default:
11020 break;
11022 return NULL_TREE;
11025 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11026 arguments, where NARGS <= 4. IGNORE is true if the result of the
11027 function call is ignored. This function returns NULL_TREE if no
11028 simplification was possible. Note that this only folds builtins with
11029 fixed argument patterns. Foldings that do varargs-to-varargs
11030 transformations, or that match calls with more than 4 arguments,
11031 need to be handled with fold_builtin_varargs instead. */
11033 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11035 static tree
11036 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11038 tree ret = NULL_TREE;
11040 switch (nargs)
11042 case 0:
11043 ret = fold_builtin_0 (loc, fndecl, ignore);
11044 break;
11045 case 1:
11046 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11047 break;
11048 case 2:
11049 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11050 break;
11051 case 3:
11052 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11053 break;
11054 case 4:
11055 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11056 ignore);
11057 break;
11058 default:
11059 break;
11061 if (ret)
11063 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11064 SET_EXPR_LOCATION (ret, loc);
11065 TREE_NO_WARNING (ret) = 1;
11066 return ret;
11068 return NULL_TREE;
11071 /* Builtins with folding operations that operate on "..." arguments
11072 need special handling; we need to store the arguments in a convenient
11073 data structure before attempting any folding. Fortunately there are
11074 only a few builtins that fall into this category. FNDECL is the
11075 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11076 result of the function call is ignored. */
11078 static tree
11079 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11080 bool ignore ATTRIBUTE_UNUSED)
11082 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11083 tree ret = NULL_TREE;
11085 switch (fcode)
11087 case BUILT_IN_SPRINTF_CHK:
11088 case BUILT_IN_VSPRINTF_CHK:
11089 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11090 break;
11092 case BUILT_IN_SNPRINTF_CHK:
11093 case BUILT_IN_VSNPRINTF_CHK:
11094 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11095 break;
11097 case BUILT_IN_FPCLASSIFY:
11098 ret = fold_builtin_fpclassify (loc, exp);
11099 break;
11101 default:
11102 break;
11104 if (ret)
11106 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11107 SET_EXPR_LOCATION (ret, loc);
11108 TREE_NO_WARNING (ret) = 1;
11109 return ret;
11111 return NULL_TREE;
11114 /* Return true if FNDECL shouldn't be folded right now.
11115 If a built-in function has an inline attribute always_inline
11116 wrapper, defer folding it after always_inline functions have
11117 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11118 might not be performed. */
11120 static bool
11121 avoid_folding_inline_builtin (tree fndecl)
11123 return (DECL_DECLARED_INLINE_P (fndecl)
11124 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11125 && cfun
11126 && !cfun->always_inline_functions_inlined
11127 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11130 /* A wrapper function for builtin folding that prevents warnings for
11131 "statement without effect" and the like, caused by removing the
11132 call node earlier than the warning is generated. */
11134 tree
11135 fold_call_expr (location_t loc, tree exp, bool ignore)
11137 tree ret = NULL_TREE;
11138 tree fndecl = get_callee_fndecl (exp);
11139 if (fndecl
11140 && TREE_CODE (fndecl) == FUNCTION_DECL
11141 && DECL_BUILT_IN (fndecl)
11142 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11143 yet. Defer folding until we see all the arguments
11144 (after inlining). */
11145 && !CALL_EXPR_VA_ARG_PACK (exp))
11147 int nargs = call_expr_nargs (exp);
11149 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11150 instead last argument is __builtin_va_arg_pack (). Defer folding
11151 even in that case, until arguments are finalized. */
11152 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11154 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11155 if (fndecl2
11156 && TREE_CODE (fndecl2) == FUNCTION_DECL
11157 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11158 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11159 return NULL_TREE;
11162 if (avoid_folding_inline_builtin (fndecl))
11163 return NULL_TREE;
11165 /* FIXME: Don't use a list in this interface. */
11166 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11167 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11168 else
11170 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11172 tree *args = CALL_EXPR_ARGP (exp);
11173 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11175 if (!ret)
11176 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11177 if (ret)
11178 return ret;
11181 return NULL_TREE;
11184 /* Conveniently construct a function call expression. FNDECL names the
11185 function to be called and ARGLIST is a TREE_LIST of arguments. */
11187 tree
11188 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
11190 tree fntype = TREE_TYPE (fndecl);
11191 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11192 int n = list_length (arglist);
11193 tree *argarray = (tree *) alloca (n * sizeof (tree));
11194 int i;
11196 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11197 argarray[i] = TREE_VALUE (arglist);
11198 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11201 /* Conveniently construct a function call expression. FNDECL names the
11202 function to be called, N is the number of arguments, and the "..."
11203 parameters are the argument expressions. */
11205 tree
11206 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11208 va_list ap;
11209 tree fntype = TREE_TYPE (fndecl);
11210 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11211 tree *argarray = (tree *) alloca (n * sizeof (tree));
11212 int i;
11214 va_start (ap, n);
11215 for (i = 0; i < n; i++)
11216 argarray[i] = va_arg (ap, tree);
11217 va_end (ap);
11218 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11221 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11222 N arguments are passed in the array ARGARRAY. */
11224 tree
11225 fold_builtin_call_array (location_t loc, tree type,
11226 tree fn,
11227 int n,
11228 tree *argarray)
11230 tree ret = NULL_TREE;
11231 int i;
11232 tree exp;
11234 if (TREE_CODE (fn) == ADDR_EXPR)
11236 tree fndecl = TREE_OPERAND (fn, 0);
11237 if (TREE_CODE (fndecl) == FUNCTION_DECL
11238 && DECL_BUILT_IN (fndecl))
11240 /* If last argument is __builtin_va_arg_pack (), arguments to this
11241 function are not finalized yet. Defer folding until they are. */
11242 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11244 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11245 if (fndecl2
11246 && TREE_CODE (fndecl2) == FUNCTION_DECL
11247 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11248 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11249 return build_call_array_loc (loc, type, fn, n, argarray);
11251 if (avoid_folding_inline_builtin (fndecl))
11252 return build_call_array_loc (loc, type, fn, n, argarray);
11253 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11255 tree arglist = NULL_TREE;
11256 for (i = n - 1; i >= 0; i--)
11257 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11258 ret = targetm.fold_builtin (fndecl, arglist, false);
11259 if (ret)
11260 return ret;
11261 return build_call_array_loc (loc, type, fn, n, argarray);
11263 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11265 /* First try the transformations that don't require consing up
11266 an exp. */
11267 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11268 if (ret)
11269 return ret;
11272 /* If we got this far, we need to build an exp. */
11273 exp = build_call_array_loc (loc, type, fn, n, argarray);
11274 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11275 return ret ? ret : exp;
11279 return build_call_array_loc (loc, type, fn, n, argarray);
11282 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11283 along with N new arguments specified as the "..." parameters. SKIP
11284 is the number of arguments in EXP to be omitted. This function is used
11285 to do varargs-to-varargs transformations. */
11287 static tree
11288 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11290 int oldnargs = call_expr_nargs (exp);
11291 int nargs = oldnargs - skip + n;
11292 tree fntype = TREE_TYPE (fndecl);
11293 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11294 tree *buffer;
11296 if (n > 0)
11298 int i, j;
11299 va_list ap;
11301 buffer = XALLOCAVEC (tree, nargs);
11302 va_start (ap, n);
11303 for (i = 0; i < n; i++)
11304 buffer[i] = va_arg (ap, tree);
11305 va_end (ap);
11306 for (j = skip; j < oldnargs; j++, i++)
11307 buffer[i] = CALL_EXPR_ARG (exp, j);
11309 else
11310 buffer = CALL_EXPR_ARGP (exp) + skip;
11312 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
11315 /* Validate a single argument ARG against a tree code CODE representing
11316 a type. */
11318 static bool
11319 validate_arg (const_tree arg, enum tree_code code)
11321 if (!arg)
11322 return false;
11323 else if (code == POINTER_TYPE)
11324 return POINTER_TYPE_P (TREE_TYPE (arg));
11325 else if (code == INTEGER_TYPE)
11326 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11327 return code == TREE_CODE (TREE_TYPE (arg));
11330 /* This function validates the types of a function call argument list
11331 against a specified list of tree_codes. If the last specifier is a 0,
11332 that represents an ellipses, otherwise the last specifier must be a
11333 VOID_TYPE.
11335 This is the GIMPLE version of validate_arglist. Eventually we want to
11336 completely convert builtins.c to work from GIMPLEs and the tree based
11337 validate_arglist will then be removed. */
11339 bool
11340 validate_gimple_arglist (const_gimple call, ...)
11342 enum tree_code code;
11343 bool res = 0;
11344 va_list ap;
11345 const_tree arg;
11346 size_t i;
11348 va_start (ap, call);
11349 i = 0;
11353 code = (enum tree_code) va_arg (ap, int);
11354 switch (code)
11356 case 0:
11357 /* This signifies an ellipses, any further arguments are all ok. */
11358 res = true;
11359 goto end;
11360 case VOID_TYPE:
11361 /* This signifies an endlink, if no arguments remain, return
11362 true, otherwise return false. */
11363 res = (i == gimple_call_num_args (call));
11364 goto end;
11365 default:
11366 /* If no parameters remain or the parameter's code does not
11367 match the specified code, return false. Otherwise continue
11368 checking any remaining arguments. */
11369 arg = gimple_call_arg (call, i++);
11370 if (!validate_arg (arg, code))
11371 goto end;
11372 break;
11375 while (1);
11377 /* We need gotos here since we can only have one VA_CLOSE in a
11378 function. */
11379 end: ;
11380 va_end (ap);
11382 return res;
11385 /* This function validates the types of a function call argument list
11386 against a specified list of tree_codes. If the last specifier is a 0,
11387 that represents an ellipses, otherwise the last specifier must be a
11388 VOID_TYPE. */
11390 bool
11391 validate_arglist (const_tree callexpr, ...)
11393 enum tree_code code;
11394 bool res = 0;
11395 va_list ap;
11396 const_call_expr_arg_iterator iter;
11397 const_tree arg;
11399 va_start (ap, callexpr);
11400 init_const_call_expr_arg_iterator (callexpr, &iter);
11404 code = (enum tree_code) va_arg (ap, int);
11405 switch (code)
11407 case 0:
11408 /* This signifies an ellipses, any further arguments are all ok. */
11409 res = true;
11410 goto end;
11411 case VOID_TYPE:
11412 /* This signifies an endlink, if no arguments remain, return
11413 true, otherwise return false. */
11414 res = !more_const_call_expr_args_p (&iter);
11415 goto end;
11416 default:
11417 /* If no parameters remain or the parameter's code does not
11418 match the specified code, return false. Otherwise continue
11419 checking any remaining arguments. */
11420 arg = next_const_call_expr_arg (&iter);
11421 if (!validate_arg (arg, code))
11422 goto end;
11423 break;
11426 while (1);
11428 /* We need gotos here since we can only have one VA_CLOSE in a
11429 function. */
11430 end: ;
11431 va_end (ap);
11433 return res;
11436 /* Default target-specific builtin expander that does nothing. */
11439 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11440 rtx target ATTRIBUTE_UNUSED,
11441 rtx subtarget ATTRIBUTE_UNUSED,
11442 enum machine_mode mode ATTRIBUTE_UNUSED,
11443 int ignore ATTRIBUTE_UNUSED)
11445 return NULL_RTX;
11448 /* Returns true is EXP represents data that would potentially reside
11449 in a readonly section. */
11451 static bool
11452 readonly_data_expr (tree exp)
11454 STRIP_NOPS (exp);
11456 if (TREE_CODE (exp) != ADDR_EXPR)
11457 return false;
11459 exp = get_base_address (TREE_OPERAND (exp, 0));
11460 if (!exp)
11461 return false;
11463 /* Make sure we call decl_readonly_section only for trees it
11464 can handle (since it returns true for everything it doesn't
11465 understand). */
11466 if (TREE_CODE (exp) == STRING_CST
11467 || TREE_CODE (exp) == CONSTRUCTOR
11468 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11469 return decl_readonly_section (exp, 0);
11470 else
11471 return false;
11474 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11475 to the call, and TYPE is its return type.
11477 Return NULL_TREE if no simplification was possible, otherwise return the
11478 simplified form of the call as a tree.
11480 The simplified form may be a constant or other expression which
11481 computes the same value, but in a more efficient manner (including
11482 calls to other builtin functions).
11484 The call may contain arguments which need to be evaluated, but
11485 which are not useful to determine the result of the call. In
11486 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11487 COMPOUND_EXPR will be an argument which must be evaluated.
11488 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11489 COMPOUND_EXPR in the chain will contain the tree for the simplified
11490 form of the builtin function call. */
11492 static tree
11493 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11495 if (!validate_arg (s1, POINTER_TYPE)
11496 || !validate_arg (s2, POINTER_TYPE))
11497 return NULL_TREE;
11498 else
11500 tree fn;
11501 const char *p1, *p2;
11503 p2 = c_getstr (s2);
11504 if (p2 == NULL)
11505 return NULL_TREE;
11507 p1 = c_getstr (s1);
11508 if (p1 != NULL)
11510 const char *r = strstr (p1, p2);
11511 tree tem;
11513 if (r == NULL)
11514 return build_int_cst (TREE_TYPE (s1), 0);
11516 /* Return an offset into the constant string argument. */
11517 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11518 s1, size_int (r - p1));
11519 return fold_convert_loc (loc, type, tem);
11522 /* The argument is const char *, and the result is char *, so we need
11523 a type conversion here to avoid a warning. */
11524 if (p2[0] == '\0')
11525 return fold_convert_loc (loc, type, s1);
11527 if (p2[1] != '\0')
11528 return NULL_TREE;
11530 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11531 if (!fn)
11532 return NULL_TREE;
11534 /* New argument list transforming strstr(s1, s2) to
11535 strchr(s1, s2[0]). */
11536 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11540 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11541 the call, and TYPE is its return type.
11543 Return NULL_TREE if no simplification was possible, otherwise return the
11544 simplified form of the call as a tree.
11546 The simplified form may be a constant or other expression which
11547 computes the same value, but in a more efficient manner (including
11548 calls to other builtin functions).
11550 The call may contain arguments which need to be evaluated, but
11551 which are not useful to determine the result of the call. In
11552 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11553 COMPOUND_EXPR will be an argument which must be evaluated.
11554 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11555 COMPOUND_EXPR in the chain will contain the tree for the simplified
11556 form of the builtin function call. */
11558 static tree
11559 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11561 if (!validate_arg (s1, POINTER_TYPE)
11562 || !validate_arg (s2, INTEGER_TYPE))
11563 return NULL_TREE;
11564 else
11566 const char *p1;
11568 if (TREE_CODE (s2) != INTEGER_CST)
11569 return NULL_TREE;
11571 p1 = c_getstr (s1);
11572 if (p1 != NULL)
11574 char c;
11575 const char *r;
11576 tree tem;
11578 if (target_char_cast (s2, &c))
11579 return NULL_TREE;
11581 r = strchr (p1, c);
11583 if (r == NULL)
11584 return build_int_cst (TREE_TYPE (s1), 0);
11586 /* Return an offset into the constant string argument. */
11587 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11588 s1, size_int (r - p1));
11589 return fold_convert_loc (loc, type, tem);
11591 return NULL_TREE;
11595 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11596 the call, and TYPE is its return type.
11598 Return NULL_TREE if no simplification was possible, otherwise return the
11599 simplified form of the call as a tree.
11601 The simplified form may be a constant or other expression which
11602 computes the same value, but in a more efficient manner (including
11603 calls to other builtin functions).
11605 The call may contain arguments which need to be evaluated, but
11606 which are not useful to determine the result of the call. In
11607 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11608 COMPOUND_EXPR will be an argument which must be evaluated.
11609 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11610 COMPOUND_EXPR in the chain will contain the tree for the simplified
11611 form of the builtin function call. */
11613 static tree
11614 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11616 if (!validate_arg (s1, POINTER_TYPE)
11617 || !validate_arg (s2, INTEGER_TYPE))
11618 return NULL_TREE;
11619 else
11621 tree fn;
11622 const char *p1;
11624 if (TREE_CODE (s2) != INTEGER_CST)
11625 return NULL_TREE;
11627 p1 = c_getstr (s1);
11628 if (p1 != NULL)
11630 char c;
11631 const char *r;
11632 tree tem;
11634 if (target_char_cast (s2, &c))
11635 return NULL_TREE;
11637 r = strrchr (p1, c);
11639 if (r == NULL)
11640 return build_int_cst (TREE_TYPE (s1), 0);
11642 /* Return an offset into the constant string argument. */
11643 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11644 s1, size_int (r - p1));
11645 return fold_convert_loc (loc, type, tem);
11648 if (! integer_zerop (s2))
11649 return NULL_TREE;
11651 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11652 if (!fn)
11653 return NULL_TREE;
11655 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11656 return build_call_expr_loc (loc, fn, 2, s1, s2);
11660 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11661 to the call, and TYPE is its return type.
11663 Return NULL_TREE if no simplification was possible, otherwise return the
11664 simplified form of the call as a tree.
11666 The simplified form may be a constant or other expression which
11667 computes the same value, but in a more efficient manner (including
11668 calls to other builtin functions).
11670 The call may contain arguments which need to be evaluated, but
11671 which are not useful to determine the result of the call. In
11672 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11673 COMPOUND_EXPR will be an argument which must be evaluated.
11674 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11675 COMPOUND_EXPR in the chain will contain the tree for the simplified
11676 form of the builtin function call. */
11678 static tree
11679 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11681 if (!validate_arg (s1, POINTER_TYPE)
11682 || !validate_arg (s2, POINTER_TYPE))
11683 return NULL_TREE;
11684 else
11686 tree fn;
11687 const char *p1, *p2;
11689 p2 = c_getstr (s2);
11690 if (p2 == NULL)
11691 return NULL_TREE;
11693 p1 = c_getstr (s1);
11694 if (p1 != NULL)
11696 const char *r = strpbrk (p1, p2);
11697 tree tem;
11699 if (r == NULL)
11700 return build_int_cst (TREE_TYPE (s1), 0);
11702 /* Return an offset into the constant string argument. */
11703 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11704 s1, size_int (r - p1));
11705 return fold_convert_loc (loc, type, tem);
11708 if (p2[0] == '\0')
11709 /* strpbrk(x, "") == NULL.
11710 Evaluate and ignore s1 in case it had side-effects. */
11711 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11713 if (p2[1] != '\0')
11714 return NULL_TREE; /* Really call strpbrk. */
11716 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11717 if (!fn)
11718 return NULL_TREE;
11720 /* New argument list transforming strpbrk(s1, s2) to
11721 strchr(s1, s2[0]). */
11722 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11726 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11727 to the call.
11729 Return NULL_TREE if no simplification was possible, otherwise return the
11730 simplified form of the call as a tree.
11732 The simplified form may be a constant or other expression which
11733 computes the same value, but in a more efficient manner (including
11734 calls to other builtin functions).
11736 The call may contain arguments which need to be evaluated, but
11737 which are not useful to determine the result of the call. In
11738 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11739 COMPOUND_EXPR will be an argument which must be evaluated.
11740 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11741 COMPOUND_EXPR in the chain will contain the tree for the simplified
11742 form of the builtin function call. */
11744 static tree
11745 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11747 if (!validate_arg (dst, POINTER_TYPE)
11748 || !validate_arg (src, POINTER_TYPE))
11749 return NULL_TREE;
11750 else
11752 const char *p = c_getstr (src);
11754 /* If the string length is zero, return the dst parameter. */
11755 if (p && *p == '\0')
11756 return dst;
11758 return NULL_TREE;
11762 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11763 arguments to the call.
11765 Return NULL_TREE if no simplification was possible, otherwise return the
11766 simplified form of the call as a tree.
11768 The simplified form may be a constant or other expression which
11769 computes the same value, but in a more efficient manner (including
11770 calls to other builtin functions).
11772 The call may contain arguments which need to be evaluated, but
11773 which are not useful to determine the result of the call. In
11774 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11775 COMPOUND_EXPR will be an argument which must be evaluated.
11776 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11777 COMPOUND_EXPR in the chain will contain the tree for the simplified
11778 form of the builtin function call. */
11780 static tree
11781 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11783 if (!validate_arg (dst, POINTER_TYPE)
11784 || !validate_arg (src, POINTER_TYPE)
11785 || !validate_arg (len, INTEGER_TYPE))
11786 return NULL_TREE;
11787 else
11789 const char *p = c_getstr (src);
11791 /* If the requested length is zero, or the src parameter string
11792 length is zero, return the dst parameter. */
11793 if (integer_zerop (len) || (p && *p == '\0'))
11794 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11796 /* If the requested len is greater than or equal to the string
11797 length, call strcat. */
11798 if (TREE_CODE (len) == INTEGER_CST && p
11799 && compare_tree_int (len, strlen (p)) >= 0)
11801 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11803 /* If the replacement _DECL isn't initialized, don't do the
11804 transformation. */
11805 if (!fn)
11806 return NULL_TREE;
11808 return build_call_expr_loc (loc, fn, 2, dst, src);
11810 return NULL_TREE;
11814 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11815 to the call.
11817 Return NULL_TREE if no simplification was possible, otherwise return the
11818 simplified form of the call as a tree.
11820 The simplified form may be a constant or other expression which
11821 computes the same value, but in a more efficient manner (including
11822 calls to other builtin functions).
11824 The call may contain arguments which need to be evaluated, but
11825 which are not useful to determine the result of the call. In
11826 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11827 COMPOUND_EXPR will be an argument which must be evaluated.
11828 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11829 COMPOUND_EXPR in the chain will contain the tree for the simplified
11830 form of the builtin function call. */
11832 static tree
11833 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11835 if (!validate_arg (s1, POINTER_TYPE)
11836 || !validate_arg (s2, POINTER_TYPE))
11837 return NULL_TREE;
11838 else
11840 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11842 /* If both arguments are constants, evaluate at compile-time. */
11843 if (p1 && p2)
11845 const size_t r = strspn (p1, p2);
11846 return size_int (r);
11849 /* If either argument is "", return NULL_TREE. */
11850 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11851 /* Evaluate and ignore both arguments in case either one has
11852 side-effects. */
11853 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11854 s1, s2);
11855 return NULL_TREE;
11859 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11860 to the call.
11862 Return NULL_TREE if no simplification was possible, otherwise return the
11863 simplified form of the call as a tree.
11865 The simplified form may be a constant or other expression which
11866 computes the same value, but in a more efficient manner (including
11867 calls to other builtin functions).
11869 The call may contain arguments which need to be evaluated, but
11870 which are not useful to determine the result of the call. In
11871 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11872 COMPOUND_EXPR will be an argument which must be evaluated.
11873 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11874 COMPOUND_EXPR in the chain will contain the tree for the simplified
11875 form of the builtin function call. */
11877 static tree
11878 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11880 if (!validate_arg (s1, POINTER_TYPE)
11881 || !validate_arg (s2, POINTER_TYPE))
11882 return NULL_TREE;
11883 else
11885 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11887 /* If both arguments are constants, evaluate at compile-time. */
11888 if (p1 && p2)
11890 const size_t r = strcspn (p1, p2);
11891 return size_int (r);
11894 /* If the first argument is "", return NULL_TREE. */
11895 if (p1 && *p1 == '\0')
11897 /* Evaluate and ignore argument s2 in case it has
11898 side-effects. */
11899 return omit_one_operand_loc (loc, size_type_node,
11900 size_zero_node, s2);
11903 /* If the second argument is "", return __builtin_strlen(s1). */
11904 if (p2 && *p2 == '\0')
11906 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11908 /* If the replacement _DECL isn't initialized, don't do the
11909 transformation. */
11910 if (!fn)
11911 return NULL_TREE;
11913 return build_call_expr_loc (loc, fn, 1, s1);
11915 return NULL_TREE;
11919 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11920 to the call. IGNORE is true if the value returned
11921 by the builtin will be ignored. UNLOCKED is true is true if this
11922 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11923 the known length of the string. Return NULL_TREE if no simplification
11924 was possible. */
11926 tree
11927 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11928 bool ignore, bool unlocked, tree len)
11930 /* If we're using an unlocked function, assume the other unlocked
11931 functions exist explicitly. */
11932 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11933 : implicit_built_in_decls[BUILT_IN_FPUTC];
11934 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11935 : implicit_built_in_decls[BUILT_IN_FWRITE];
11937 /* If the return value is used, don't do the transformation. */
11938 if (!ignore)
11939 return NULL_TREE;
11941 /* Verify the arguments in the original call. */
11942 if (!validate_arg (arg0, POINTER_TYPE)
11943 || !validate_arg (arg1, POINTER_TYPE))
11944 return NULL_TREE;
11946 if (! len)
11947 len = c_strlen (arg0, 0);
11949 /* Get the length of the string passed to fputs. If the length
11950 can't be determined, punt. */
11951 if (!len
11952 || TREE_CODE (len) != INTEGER_CST)
11953 return NULL_TREE;
11955 switch (compare_tree_int (len, 1))
11957 case -1: /* length is 0, delete the call entirely . */
11958 return omit_one_operand_loc (loc, integer_type_node,
11959 integer_zero_node, arg1);;
11961 case 0: /* length is 1, call fputc. */
11963 const char *p = c_getstr (arg0);
11965 if (p != NULL)
11967 if (fn_fputc)
11968 return build_call_expr_loc (loc, fn_fputc, 2,
11969 build_int_cst (NULL_TREE, p[0]), arg1);
11970 else
11971 return NULL_TREE;
11974 /* FALLTHROUGH */
11975 case 1: /* length is greater than 1, call fwrite. */
11977 /* If optimizing for size keep fputs. */
11978 if (optimize_function_for_size_p (cfun))
11979 return NULL_TREE;
11980 /* New argument list transforming fputs(string, stream) to
11981 fwrite(string, 1, len, stream). */
11982 if (fn_fwrite)
11983 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11984 size_one_node, len, arg1);
11985 else
11986 return NULL_TREE;
11988 default:
11989 gcc_unreachable ();
11991 return NULL_TREE;
11994 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11995 produced. False otherwise. This is done so that we don't output the error
11996 or warning twice or three times. */
11998 bool
11999 fold_builtin_next_arg (tree exp, bool va_start_p)
12001 tree fntype = TREE_TYPE (current_function_decl);
12002 int nargs = call_expr_nargs (exp);
12003 tree arg;
12005 if (TYPE_ARG_TYPES (fntype) == 0
12006 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
12007 == void_type_node))
12009 error ("%<va_start%> used in function with fixed args");
12010 return true;
12013 if (va_start_p)
12015 if (va_start_p && (nargs != 2))
12017 error ("wrong number of arguments to function %<va_start%>");
12018 return true;
12020 arg = CALL_EXPR_ARG (exp, 1);
12022 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12023 when we checked the arguments and if needed issued a warning. */
12024 else
12026 if (nargs == 0)
12028 /* Evidently an out of date version of <stdarg.h>; can't validate
12029 va_start's second argument, but can still work as intended. */
12030 warning (0, "%<__builtin_next_arg%> called without an argument");
12031 return true;
12033 else if (nargs > 1)
12035 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12036 return true;
12038 arg = CALL_EXPR_ARG (exp, 0);
12041 if (TREE_CODE (arg) == SSA_NAME)
12042 arg = SSA_NAME_VAR (arg);
12044 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12045 or __builtin_next_arg (0) the first time we see it, after checking
12046 the arguments and if needed issuing a warning. */
12047 if (!integer_zerop (arg))
12049 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12051 /* Strip off all nops for the sake of the comparison. This
12052 is not quite the same as STRIP_NOPS. It does more.
12053 We must also strip off INDIRECT_EXPR for C++ reference
12054 parameters. */
12055 while (CONVERT_EXPR_P (arg)
12056 || TREE_CODE (arg) == INDIRECT_REF)
12057 arg = TREE_OPERAND (arg, 0);
12058 if (arg != last_parm)
12060 /* FIXME: Sometimes with the tree optimizers we can get the
12061 not the last argument even though the user used the last
12062 argument. We just warn and set the arg to be the last
12063 argument so that we will get wrong-code because of
12064 it. */
12065 warning (0, "second parameter of %<va_start%> not last named argument");
12068 /* Undefined by C99 7.15.1.4p4 (va_start):
12069 "If the parameter parmN is declared with the register storage
12070 class, with a function or array type, or with a type that is
12071 not compatible with the type that results after application of
12072 the default argument promotions, the behavior is undefined."
12074 else if (DECL_REGISTER (arg))
12075 warning (0, "undefined behaviour when second parameter of "
12076 "%<va_start%> is declared with %<register%> storage");
12078 /* We want to verify the second parameter just once before the tree
12079 optimizers are run and then avoid keeping it in the tree,
12080 as otherwise we could warn even for correct code like:
12081 void foo (int i, ...)
12082 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12083 if (va_start_p)
12084 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12085 else
12086 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12088 return false;
12092 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12093 ORIG may be null if this is a 2-argument call. We don't attempt to
12094 simplify calls with more than 3 arguments.
12096 Return NULL_TREE if no simplification was possible, otherwise return the
12097 simplified form of the call as a tree. If IGNORED is true, it means that
12098 the caller does not use the returned value of the function. */
12100 static tree
12101 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12102 tree orig, int ignored)
12104 tree call, retval;
12105 const char *fmt_str = NULL;
12107 /* Verify the required arguments in the original call. We deal with two
12108 types of sprintf() calls: 'sprintf (str, fmt)' and
12109 'sprintf (dest, "%s", orig)'. */
12110 if (!validate_arg (dest, POINTER_TYPE)
12111 || !validate_arg (fmt, POINTER_TYPE))
12112 return NULL_TREE;
12113 if (orig && !validate_arg (orig, POINTER_TYPE))
12114 return NULL_TREE;
12116 /* Check whether the format is a literal string constant. */
12117 fmt_str = c_getstr (fmt);
12118 if (fmt_str == NULL)
12119 return NULL_TREE;
12121 call = NULL_TREE;
12122 retval = NULL_TREE;
12124 if (!init_target_chars ())
12125 return NULL_TREE;
12127 /* If the format doesn't contain % args or %%, use strcpy. */
12128 if (strchr (fmt_str, target_percent) == NULL)
12130 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12132 if (!fn)
12133 return NULL_TREE;
12135 /* Don't optimize sprintf (buf, "abc", ptr++). */
12136 if (orig)
12137 return NULL_TREE;
12139 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12140 'format' is known to contain no % formats. */
12141 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12142 if (!ignored)
12143 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12146 /* If the format is "%s", use strcpy if the result isn't used. */
12147 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12149 tree fn;
12150 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12152 if (!fn)
12153 return NULL_TREE;
12155 /* Don't crash on sprintf (str1, "%s"). */
12156 if (!orig)
12157 return NULL_TREE;
12159 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12160 if (!ignored)
12162 retval = c_strlen (orig, 1);
12163 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12164 return NULL_TREE;
12166 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12169 if (call && retval)
12171 retval = fold_convert_loc
12172 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12173 retval);
12174 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12176 else
12177 return call;
12180 /* Expand a call EXP to __builtin_object_size. */
12183 expand_builtin_object_size (tree exp)
12185 tree ost;
12186 int object_size_type;
12187 tree fndecl = get_callee_fndecl (exp);
12189 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12191 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12192 exp, fndecl);
12193 expand_builtin_trap ();
12194 return const0_rtx;
12197 ost = CALL_EXPR_ARG (exp, 1);
12198 STRIP_NOPS (ost);
12200 if (TREE_CODE (ost) != INTEGER_CST
12201 || tree_int_cst_sgn (ost) < 0
12202 || compare_tree_int (ost, 3) > 0)
12204 error ("%Klast argument of %D is not integer constant between 0 and 3",
12205 exp, fndecl);
12206 expand_builtin_trap ();
12207 return const0_rtx;
12210 object_size_type = tree_low_cst (ost, 0);
12212 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12215 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12216 FCODE is the BUILT_IN_* to use.
12217 Return NULL_RTX if we failed; the caller should emit a normal call,
12218 otherwise try to get the result in TARGET, if convenient (and in
12219 mode MODE if that's convenient). */
12221 static rtx
12222 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12223 enum built_in_function fcode)
12225 tree dest, src, len, size;
12227 if (!validate_arglist (exp,
12228 POINTER_TYPE,
12229 fcode == BUILT_IN_MEMSET_CHK
12230 ? INTEGER_TYPE : POINTER_TYPE,
12231 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12232 return NULL_RTX;
12234 dest = CALL_EXPR_ARG (exp, 0);
12235 src = CALL_EXPR_ARG (exp, 1);
12236 len = CALL_EXPR_ARG (exp, 2);
12237 size = CALL_EXPR_ARG (exp, 3);
12239 if (! host_integerp (size, 1))
12240 return NULL_RTX;
12242 if (host_integerp (len, 1) || integer_all_onesp (size))
12244 tree fn;
12246 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12248 warning_at (tree_nonartificial_location (exp),
12249 0, "%Kcall to %D will always overflow destination buffer",
12250 exp, get_callee_fndecl (exp));
12251 return NULL_RTX;
12254 fn = NULL_TREE;
12255 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12256 mem{cpy,pcpy,move,set} is available. */
12257 switch (fcode)
12259 case BUILT_IN_MEMCPY_CHK:
12260 fn = built_in_decls[BUILT_IN_MEMCPY];
12261 break;
12262 case BUILT_IN_MEMPCPY_CHK:
12263 fn = built_in_decls[BUILT_IN_MEMPCPY];
12264 break;
12265 case BUILT_IN_MEMMOVE_CHK:
12266 fn = built_in_decls[BUILT_IN_MEMMOVE];
12267 break;
12268 case BUILT_IN_MEMSET_CHK:
12269 fn = built_in_decls[BUILT_IN_MEMSET];
12270 break;
12271 default:
12272 break;
12275 if (! fn)
12276 return NULL_RTX;
12278 fn = build_call_expr (fn, 3, dest, src, len);
12279 STRIP_TYPE_NOPS (fn);
12280 while (TREE_CODE (fn) == COMPOUND_EXPR)
12282 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12283 EXPAND_NORMAL);
12284 fn = TREE_OPERAND (fn, 1);
12286 if (TREE_CODE (fn) == CALL_EXPR)
12287 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12288 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12290 else if (fcode == BUILT_IN_MEMSET_CHK)
12291 return NULL_RTX;
12292 else
12294 unsigned int dest_align
12295 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12297 /* If DEST is not a pointer type, call the normal function. */
12298 if (dest_align == 0)
12299 return NULL_RTX;
12301 /* If SRC and DEST are the same (and not volatile), do nothing. */
12302 if (operand_equal_p (src, dest, 0))
12304 tree expr;
12306 if (fcode != BUILT_IN_MEMPCPY_CHK)
12308 /* Evaluate and ignore LEN in case it has side-effects. */
12309 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12310 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12313 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12314 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12317 /* __memmove_chk special case. */
12318 if (fcode == BUILT_IN_MEMMOVE_CHK)
12320 unsigned int src_align
12321 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12323 if (src_align == 0)
12324 return NULL_RTX;
12326 /* If src is categorized for a readonly section we can use
12327 normal __memcpy_chk. */
12328 if (readonly_data_expr (src))
12330 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12331 if (!fn)
12332 return NULL_RTX;
12333 fn = build_call_expr (fn, 4, dest, src, len, size);
12334 STRIP_TYPE_NOPS (fn);
12335 while (TREE_CODE (fn) == COMPOUND_EXPR)
12337 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12338 EXPAND_NORMAL);
12339 fn = TREE_OPERAND (fn, 1);
12341 if (TREE_CODE (fn) == CALL_EXPR)
12342 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12343 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12346 return NULL_RTX;
12350 /* Emit warning if a buffer overflow is detected at compile time. */
12352 static void
12353 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12355 int is_strlen = 0;
12356 tree len, size;
12357 location_t loc = tree_nonartificial_location (exp);
12359 switch (fcode)
12361 case BUILT_IN_STRCPY_CHK:
12362 case BUILT_IN_STPCPY_CHK:
12363 /* For __strcat_chk the warning will be emitted only if overflowing
12364 by at least strlen (dest) + 1 bytes. */
12365 case BUILT_IN_STRCAT_CHK:
12366 len = CALL_EXPR_ARG (exp, 1);
12367 size = CALL_EXPR_ARG (exp, 2);
12368 is_strlen = 1;
12369 break;
12370 case BUILT_IN_STRNCAT_CHK:
12371 case BUILT_IN_STRNCPY_CHK:
12372 len = CALL_EXPR_ARG (exp, 2);
12373 size = CALL_EXPR_ARG (exp, 3);
12374 break;
12375 case BUILT_IN_SNPRINTF_CHK:
12376 case BUILT_IN_VSNPRINTF_CHK:
12377 len = CALL_EXPR_ARG (exp, 1);
12378 size = CALL_EXPR_ARG (exp, 3);
12379 break;
12380 default:
12381 gcc_unreachable ();
12384 if (!len || !size)
12385 return;
12387 if (! host_integerp (size, 1) || integer_all_onesp (size))
12388 return;
12390 if (is_strlen)
12392 len = c_strlen (len, 1);
12393 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12394 return;
12396 else if (fcode == BUILT_IN_STRNCAT_CHK)
12398 tree src = CALL_EXPR_ARG (exp, 1);
12399 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12400 return;
12401 src = c_strlen (src, 1);
12402 if (! src || ! host_integerp (src, 1))
12404 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12405 exp, get_callee_fndecl (exp));
12406 return;
12408 else if (tree_int_cst_lt (src, size))
12409 return;
12411 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12412 return;
12414 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12415 exp, get_callee_fndecl (exp));
12418 /* Emit warning if a buffer overflow is detected at compile time
12419 in __sprintf_chk/__vsprintf_chk calls. */
12421 static void
12422 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12424 tree dest, size, len, fmt, flag;
12425 const char *fmt_str;
12426 int nargs = call_expr_nargs (exp);
12428 /* Verify the required arguments in the original call. */
12430 if (nargs < 4)
12431 return;
12432 dest = CALL_EXPR_ARG (exp, 0);
12433 flag = CALL_EXPR_ARG (exp, 1);
12434 size = CALL_EXPR_ARG (exp, 2);
12435 fmt = CALL_EXPR_ARG (exp, 3);
12437 if (! host_integerp (size, 1) || integer_all_onesp (size))
12438 return;
12440 /* Check whether the format is a literal string constant. */
12441 fmt_str = c_getstr (fmt);
12442 if (fmt_str == NULL)
12443 return;
12445 if (!init_target_chars ())
12446 return;
12448 /* If the format doesn't contain % args or %%, we know its size. */
12449 if (strchr (fmt_str, target_percent) == 0)
12450 len = build_int_cstu (size_type_node, strlen (fmt_str));
12451 /* If the format is "%s" and first ... argument is a string literal,
12452 we know it too. */
12453 else if (fcode == BUILT_IN_SPRINTF_CHK
12454 && strcmp (fmt_str, target_percent_s) == 0)
12456 tree arg;
12458 if (nargs < 5)
12459 return;
12460 arg = CALL_EXPR_ARG (exp, 4);
12461 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12462 return;
12464 len = c_strlen (arg, 1);
12465 if (!len || ! host_integerp (len, 1))
12466 return;
12468 else
12469 return;
12471 if (! tree_int_cst_lt (len, size))
12472 warning_at (tree_nonartificial_location (exp),
12473 0, "%Kcall to %D will always overflow destination buffer",
12474 exp, get_callee_fndecl (exp));
12477 /* Emit warning if a free is called with address of a variable. */
12479 static void
12480 maybe_emit_free_warning (tree exp)
12482 tree arg = CALL_EXPR_ARG (exp, 0);
12484 STRIP_NOPS (arg);
12485 if (TREE_CODE (arg) != ADDR_EXPR)
12486 return;
12488 arg = get_base_address (TREE_OPERAND (arg, 0));
12489 if (arg == NULL || INDIRECT_REF_P (arg))
12490 return;
12492 if (SSA_VAR_P (arg))
12493 warning_at (tree_nonartificial_location (exp),
12494 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12495 else
12496 warning_at (tree_nonartificial_location (exp),
12497 0, "%Kattempt to free a non-heap object", exp);
12500 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12501 if possible. */
12503 tree
12504 fold_builtin_object_size (tree ptr, tree ost)
12506 tree ret = NULL_TREE;
12507 int object_size_type;
12509 if (!validate_arg (ptr, POINTER_TYPE)
12510 || !validate_arg (ost, INTEGER_TYPE))
12511 return NULL_TREE;
12513 STRIP_NOPS (ost);
12515 if (TREE_CODE (ost) != INTEGER_CST
12516 || tree_int_cst_sgn (ost) < 0
12517 || compare_tree_int (ost, 3) > 0)
12518 return NULL_TREE;
12520 object_size_type = tree_low_cst (ost, 0);
12522 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12523 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12524 and (size_t) 0 for types 2 and 3. */
12525 if (TREE_SIDE_EFFECTS (ptr))
12526 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12528 if (TREE_CODE (ptr) == ADDR_EXPR)
12529 ret = build_int_cstu (size_type_node,
12530 compute_builtin_object_size (ptr, object_size_type));
12532 else if (TREE_CODE (ptr) == SSA_NAME)
12534 unsigned HOST_WIDE_INT bytes;
12536 /* If object size is not known yet, delay folding until
12537 later. Maybe subsequent passes will help determining
12538 it. */
12539 bytes = compute_builtin_object_size (ptr, object_size_type);
12540 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12541 ? -1 : 0))
12542 ret = build_int_cstu (size_type_node, bytes);
12545 if (ret)
12547 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12548 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12549 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12550 ret = NULL_TREE;
12553 return ret;
12556 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12557 DEST, SRC, LEN, and SIZE are the arguments to the call.
12558 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12559 code of the builtin. If MAXLEN is not NULL, it is maximum length
12560 passed as third argument. */
12562 tree
12563 fold_builtin_memory_chk (location_t loc, tree fndecl,
12564 tree dest, tree src, tree len, tree size,
12565 tree maxlen, bool ignore,
12566 enum built_in_function fcode)
12568 tree fn;
12570 if (!validate_arg (dest, POINTER_TYPE)
12571 || !validate_arg (src,
12572 (fcode == BUILT_IN_MEMSET_CHK
12573 ? INTEGER_TYPE : POINTER_TYPE))
12574 || !validate_arg (len, INTEGER_TYPE)
12575 || !validate_arg (size, INTEGER_TYPE))
12576 return NULL_TREE;
12578 /* If SRC and DEST are the same (and not volatile), return DEST
12579 (resp. DEST+LEN for __mempcpy_chk). */
12580 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12582 if (fcode != BUILT_IN_MEMPCPY_CHK)
12583 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12584 dest, len);
12585 else
12587 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12588 dest, len);
12589 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12593 if (! host_integerp (size, 1))
12594 return NULL_TREE;
12596 if (! integer_all_onesp (size))
12598 if (! host_integerp (len, 1))
12600 /* If LEN is not constant, try MAXLEN too.
12601 For MAXLEN only allow optimizing into non-_ocs function
12602 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12603 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12605 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12607 /* (void) __mempcpy_chk () can be optimized into
12608 (void) __memcpy_chk (). */
12609 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12610 if (!fn)
12611 return NULL_TREE;
12613 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12615 return NULL_TREE;
12618 else
12619 maxlen = len;
12621 if (tree_int_cst_lt (size, maxlen))
12622 return NULL_TREE;
12625 fn = NULL_TREE;
12626 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12627 mem{cpy,pcpy,move,set} is available. */
12628 switch (fcode)
12630 case BUILT_IN_MEMCPY_CHK:
12631 fn = built_in_decls[BUILT_IN_MEMCPY];
12632 break;
12633 case BUILT_IN_MEMPCPY_CHK:
12634 fn = built_in_decls[BUILT_IN_MEMPCPY];
12635 break;
12636 case BUILT_IN_MEMMOVE_CHK:
12637 fn = built_in_decls[BUILT_IN_MEMMOVE];
12638 break;
12639 case BUILT_IN_MEMSET_CHK:
12640 fn = built_in_decls[BUILT_IN_MEMSET];
12641 break;
12642 default:
12643 break;
12646 if (!fn)
12647 return NULL_TREE;
12649 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12652 /* Fold a call to the __st[rp]cpy_chk builtin.
12653 DEST, SRC, and SIZE are the arguments to the call.
12654 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12655 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12656 strings passed as second argument. */
12658 tree
12659 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12660 tree src, tree size,
12661 tree maxlen, bool ignore,
12662 enum built_in_function fcode)
12664 tree len, fn;
12666 if (!validate_arg (dest, POINTER_TYPE)
12667 || !validate_arg (src, POINTER_TYPE)
12668 || !validate_arg (size, INTEGER_TYPE))
12669 return NULL_TREE;
12671 /* If SRC and DEST are the same (and not volatile), return DEST. */
12672 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12673 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12675 if (! host_integerp (size, 1))
12676 return NULL_TREE;
12678 if (! integer_all_onesp (size))
12680 len = c_strlen (src, 1);
12681 if (! len || ! host_integerp (len, 1))
12683 /* If LEN is not constant, try MAXLEN too.
12684 For MAXLEN only allow optimizing into non-_ocs function
12685 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12686 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12688 if (fcode == BUILT_IN_STPCPY_CHK)
12690 if (! ignore)
12691 return NULL_TREE;
12693 /* If return value of __stpcpy_chk is ignored,
12694 optimize into __strcpy_chk. */
12695 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12696 if (!fn)
12697 return NULL_TREE;
12699 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12702 if (! len || TREE_SIDE_EFFECTS (len))
12703 return NULL_TREE;
12705 /* If c_strlen returned something, but not a constant,
12706 transform __strcpy_chk into __memcpy_chk. */
12707 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12708 if (!fn)
12709 return NULL_TREE;
12711 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12712 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12713 build_call_expr_loc (loc, fn, 4,
12714 dest, src, len, size));
12717 else
12718 maxlen = len;
12720 if (! tree_int_cst_lt (maxlen, size))
12721 return NULL_TREE;
12724 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12725 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12726 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12727 if (!fn)
12728 return NULL_TREE;
12730 return build_call_expr_loc (loc, fn, 2, dest, src);
12733 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12734 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12735 length passed as third argument. */
12737 tree
12738 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12739 tree len, tree size, tree maxlen)
12741 tree fn;
12743 if (!validate_arg (dest, POINTER_TYPE)
12744 || !validate_arg (src, POINTER_TYPE)
12745 || !validate_arg (len, INTEGER_TYPE)
12746 || !validate_arg (size, INTEGER_TYPE))
12747 return NULL_TREE;
12749 if (! host_integerp (size, 1))
12750 return NULL_TREE;
12752 if (! integer_all_onesp (size))
12754 if (! host_integerp (len, 1))
12756 /* If LEN is not constant, try MAXLEN too.
12757 For MAXLEN only allow optimizing into non-_ocs function
12758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12759 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12760 return NULL_TREE;
12762 else
12763 maxlen = len;
12765 if (tree_int_cst_lt (size, maxlen))
12766 return NULL_TREE;
12769 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12770 fn = built_in_decls[BUILT_IN_STRNCPY];
12771 if (!fn)
12772 return NULL_TREE;
12774 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12777 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12778 are the arguments to the call. */
12780 static tree
12781 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12782 tree src, tree size)
12784 tree fn;
12785 const char *p;
12787 if (!validate_arg (dest, POINTER_TYPE)
12788 || !validate_arg (src, POINTER_TYPE)
12789 || !validate_arg (size, INTEGER_TYPE))
12790 return NULL_TREE;
12792 p = c_getstr (src);
12793 /* If the SRC parameter is "", return DEST. */
12794 if (p && *p == '\0')
12795 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12797 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12798 return NULL_TREE;
12800 /* If __builtin_strcat_chk is used, assume strcat is available. */
12801 fn = built_in_decls[BUILT_IN_STRCAT];
12802 if (!fn)
12803 return NULL_TREE;
12805 return build_call_expr_loc (loc, fn, 2, dest, src);
12808 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12809 LEN, and SIZE. */
12811 static tree
12812 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12813 tree dest, tree src, tree len, tree size)
12815 tree fn;
12816 const char *p;
12818 if (!validate_arg (dest, POINTER_TYPE)
12819 || !validate_arg (src, POINTER_TYPE)
12820 || !validate_arg (size, INTEGER_TYPE)
12821 || !validate_arg (size, INTEGER_TYPE))
12822 return NULL_TREE;
12824 p = c_getstr (src);
12825 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12826 if (p && *p == '\0')
12827 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12828 else if (integer_zerop (len))
12829 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12831 if (! host_integerp (size, 1))
12832 return NULL_TREE;
12834 if (! integer_all_onesp (size))
12836 tree src_len = c_strlen (src, 1);
12837 if (src_len
12838 && host_integerp (src_len, 1)
12839 && host_integerp (len, 1)
12840 && ! tree_int_cst_lt (len, src_len))
12842 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12843 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12844 if (!fn)
12845 return NULL_TREE;
12847 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12849 return NULL_TREE;
12852 /* If __builtin_strncat_chk is used, assume strncat is available. */
12853 fn = built_in_decls[BUILT_IN_STRNCAT];
12854 if (!fn)
12855 return NULL_TREE;
12857 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12860 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12861 a normal call should be emitted rather than expanding the function
12862 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12864 static tree
12865 fold_builtin_sprintf_chk (location_t loc, tree exp,
12866 enum built_in_function fcode)
12868 tree dest, size, len, fn, fmt, flag;
12869 const char *fmt_str;
12870 int nargs = call_expr_nargs (exp);
12872 /* Verify the required arguments in the original call. */
12873 if (nargs < 4)
12874 return NULL_TREE;
12875 dest = CALL_EXPR_ARG (exp, 0);
12876 if (!validate_arg (dest, POINTER_TYPE))
12877 return NULL_TREE;
12878 flag = CALL_EXPR_ARG (exp, 1);
12879 if (!validate_arg (flag, INTEGER_TYPE))
12880 return NULL_TREE;
12881 size = CALL_EXPR_ARG (exp, 2);
12882 if (!validate_arg (size, INTEGER_TYPE))
12883 return NULL_TREE;
12884 fmt = CALL_EXPR_ARG (exp, 3);
12885 if (!validate_arg (fmt, POINTER_TYPE))
12886 return NULL_TREE;
12888 if (! host_integerp (size, 1))
12889 return NULL_TREE;
12891 len = NULL_TREE;
12893 if (!init_target_chars ())
12894 return NULL_TREE;
12896 /* Check whether the format is a literal string constant. */
12897 fmt_str = c_getstr (fmt);
12898 if (fmt_str != NULL)
12900 /* If the format doesn't contain % args or %%, we know the size. */
12901 if (strchr (fmt_str, target_percent) == 0)
12903 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12904 len = build_int_cstu (size_type_node, strlen (fmt_str));
12906 /* If the format is "%s" and first ... argument is a string literal,
12907 we know the size too. */
12908 else if (fcode == BUILT_IN_SPRINTF_CHK
12909 && strcmp (fmt_str, target_percent_s) == 0)
12911 tree arg;
12913 if (nargs == 5)
12915 arg = CALL_EXPR_ARG (exp, 4);
12916 if (validate_arg (arg, POINTER_TYPE))
12918 len = c_strlen (arg, 1);
12919 if (! len || ! host_integerp (len, 1))
12920 len = NULL_TREE;
12926 if (! integer_all_onesp (size))
12928 if (! len || ! tree_int_cst_lt (len, size))
12929 return NULL_TREE;
12932 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12933 or if format doesn't contain % chars or is "%s". */
12934 if (! integer_zerop (flag))
12936 if (fmt_str == NULL)
12937 return NULL_TREE;
12938 if (strchr (fmt_str, target_percent) != NULL
12939 && strcmp (fmt_str, target_percent_s))
12940 return NULL_TREE;
12943 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12944 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12945 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12946 if (!fn)
12947 return NULL_TREE;
12949 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12952 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12953 a normal call should be emitted rather than expanding the function
12954 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12955 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12956 passed as second argument. */
12958 tree
12959 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12960 enum built_in_function fcode)
12962 tree dest, size, len, fn, fmt, flag;
12963 const char *fmt_str;
12965 /* Verify the required arguments in the original call. */
12966 if (call_expr_nargs (exp) < 5)
12967 return NULL_TREE;
12968 dest = CALL_EXPR_ARG (exp, 0);
12969 if (!validate_arg (dest, POINTER_TYPE))
12970 return NULL_TREE;
12971 len = CALL_EXPR_ARG (exp, 1);
12972 if (!validate_arg (len, INTEGER_TYPE))
12973 return NULL_TREE;
12974 flag = CALL_EXPR_ARG (exp, 2);
12975 if (!validate_arg (flag, INTEGER_TYPE))
12976 return NULL_TREE;
12977 size = CALL_EXPR_ARG (exp, 3);
12978 if (!validate_arg (size, INTEGER_TYPE))
12979 return NULL_TREE;
12980 fmt = CALL_EXPR_ARG (exp, 4);
12981 if (!validate_arg (fmt, POINTER_TYPE))
12982 return NULL_TREE;
12984 if (! host_integerp (size, 1))
12985 return NULL_TREE;
12987 if (! integer_all_onesp (size))
12989 if (! host_integerp (len, 1))
12991 /* If LEN is not constant, try MAXLEN too.
12992 For MAXLEN only allow optimizing into non-_ocs function
12993 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12994 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12995 return NULL_TREE;
12997 else
12998 maxlen = len;
13000 if (tree_int_cst_lt (size, maxlen))
13001 return NULL_TREE;
13004 if (!init_target_chars ())
13005 return NULL_TREE;
13007 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13008 or if format doesn't contain % chars or is "%s". */
13009 if (! integer_zerop (flag))
13011 fmt_str = c_getstr (fmt);
13012 if (fmt_str == NULL)
13013 return NULL_TREE;
13014 if (strchr (fmt_str, target_percent) != NULL
13015 && strcmp (fmt_str, target_percent_s))
13016 return NULL_TREE;
13019 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13020 available. */
13021 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13022 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13023 if (!fn)
13024 return NULL_TREE;
13026 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
13029 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13030 FMT and ARG are the arguments to the call; we don't fold cases with
13031 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13033 Return NULL_TREE if no simplification was possible, otherwise return the
13034 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13035 code of the function to be simplified. */
13037 static tree
13038 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13039 tree arg, bool ignore,
13040 enum built_in_function fcode)
13042 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13043 const char *fmt_str = NULL;
13045 /* If the return value is used, don't do the transformation. */
13046 if (! ignore)
13047 return NULL_TREE;
13049 /* Verify the required arguments in the original call. */
13050 if (!validate_arg (fmt, POINTER_TYPE))
13051 return NULL_TREE;
13053 /* Check whether the format is a literal string constant. */
13054 fmt_str = c_getstr (fmt);
13055 if (fmt_str == NULL)
13056 return NULL_TREE;
13058 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13060 /* If we're using an unlocked function, assume the other
13061 unlocked functions exist explicitly. */
13062 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
13063 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
13065 else
13067 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
13068 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
13071 if (!init_target_chars ())
13072 return NULL_TREE;
13074 if (strcmp (fmt_str, target_percent_s) == 0
13075 || strchr (fmt_str, target_percent) == NULL)
13077 const char *str;
13079 if (strcmp (fmt_str, target_percent_s) == 0)
13081 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13082 return NULL_TREE;
13084 if (!arg || !validate_arg (arg, POINTER_TYPE))
13085 return NULL_TREE;
13087 str = c_getstr (arg);
13088 if (str == NULL)
13089 return NULL_TREE;
13091 else
13093 /* The format specifier doesn't contain any '%' characters. */
13094 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13095 && arg)
13096 return NULL_TREE;
13097 str = fmt_str;
13100 /* If the string was "", printf does nothing. */
13101 if (str[0] == '\0')
13102 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13104 /* If the string has length of 1, call putchar. */
13105 if (str[1] == '\0')
13107 /* Given printf("c"), (where c is any one character,)
13108 convert "c"[0] to an int and pass that to the replacement
13109 function. */
13110 newarg = build_int_cst (NULL_TREE, str[0]);
13111 if (fn_putchar)
13112 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13114 else
13116 /* If the string was "string\n", call puts("string"). */
13117 size_t len = strlen (str);
13118 if ((unsigned char)str[len - 1] == target_newline)
13120 /* Create a NUL-terminated string that's one char shorter
13121 than the original, stripping off the trailing '\n'. */
13122 char *newstr = XALLOCAVEC (char, len);
13123 memcpy (newstr, str, len - 1);
13124 newstr[len - 1] = 0;
13126 newarg = build_string_literal (len, newstr);
13127 if (fn_puts)
13128 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13130 else
13131 /* We'd like to arrange to call fputs(string,stdout) here,
13132 but we need stdout and don't have a way to get it yet. */
13133 return NULL_TREE;
13137 /* The other optimizations can be done only on the non-va_list variants. */
13138 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13139 return NULL_TREE;
13141 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13142 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13144 if (!arg || !validate_arg (arg, POINTER_TYPE))
13145 return NULL_TREE;
13146 if (fn_puts)
13147 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13150 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13151 else if (strcmp (fmt_str, target_percent_c) == 0)
13153 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13154 return NULL_TREE;
13155 if (fn_putchar)
13156 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13159 if (!call)
13160 return NULL_TREE;
13162 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13165 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13166 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13167 more than 3 arguments, and ARG may be null in the 2-argument case.
13169 Return NULL_TREE if no simplification was possible, otherwise return the
13170 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13171 code of the function to be simplified. */
13173 static tree
13174 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13175 tree fmt, tree arg, bool ignore,
13176 enum built_in_function fcode)
13178 tree fn_fputc, fn_fputs, call = NULL_TREE;
13179 const char *fmt_str = NULL;
13181 /* If the return value is used, don't do the transformation. */
13182 if (! ignore)
13183 return NULL_TREE;
13185 /* Verify the required arguments in the original call. */
13186 if (!validate_arg (fp, POINTER_TYPE))
13187 return NULL_TREE;
13188 if (!validate_arg (fmt, POINTER_TYPE))
13189 return NULL_TREE;
13191 /* Check whether the format is a literal string constant. */
13192 fmt_str = c_getstr (fmt);
13193 if (fmt_str == NULL)
13194 return NULL_TREE;
13196 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13198 /* If we're using an unlocked function, assume the other
13199 unlocked functions exist explicitly. */
13200 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13201 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13203 else
13205 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13206 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13209 if (!init_target_chars ())
13210 return NULL_TREE;
13212 /* If the format doesn't contain % args or %%, use strcpy. */
13213 if (strchr (fmt_str, target_percent) == NULL)
13215 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13216 && arg)
13217 return NULL_TREE;
13219 /* If the format specifier was "", fprintf does nothing. */
13220 if (fmt_str[0] == '\0')
13222 /* If FP has side-effects, just wait until gimplification is
13223 done. */
13224 if (TREE_SIDE_EFFECTS (fp))
13225 return NULL_TREE;
13227 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13230 /* When "string" doesn't contain %, replace all cases of
13231 fprintf (fp, string) with fputs (string, fp). The fputs
13232 builtin will take care of special cases like length == 1. */
13233 if (fn_fputs)
13234 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13237 /* The other optimizations can be done only on the non-va_list variants. */
13238 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13239 return NULL_TREE;
13241 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13242 else if (strcmp (fmt_str, target_percent_s) == 0)
13244 if (!arg || !validate_arg (arg, POINTER_TYPE))
13245 return NULL_TREE;
13246 if (fn_fputs)
13247 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13250 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13251 else if (strcmp (fmt_str, target_percent_c) == 0)
13253 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13254 return NULL_TREE;
13255 if (fn_fputc)
13256 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13259 if (!call)
13260 return NULL_TREE;
13261 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13264 /* Initialize format string characters in the target charset. */
13266 static bool
13267 init_target_chars (void)
13269 static bool init;
13270 if (!init)
13272 target_newline = lang_hooks.to_target_charset ('\n');
13273 target_percent = lang_hooks.to_target_charset ('%');
13274 target_c = lang_hooks.to_target_charset ('c');
13275 target_s = lang_hooks.to_target_charset ('s');
13276 if (target_newline == 0 || target_percent == 0 || target_c == 0
13277 || target_s == 0)
13278 return false;
13280 target_percent_c[0] = target_percent;
13281 target_percent_c[1] = target_c;
13282 target_percent_c[2] = '\0';
13284 target_percent_s[0] = target_percent;
13285 target_percent_s[1] = target_s;
13286 target_percent_s[2] = '\0';
13288 target_percent_s_newline[0] = target_percent;
13289 target_percent_s_newline[1] = target_s;
13290 target_percent_s_newline[2] = target_newline;
13291 target_percent_s_newline[3] = '\0';
13293 init = true;
13295 return true;
13298 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13299 and no overflow/underflow occurred. INEXACT is true if M was not
13300 exactly calculated. TYPE is the tree type for the result. This
13301 function assumes that you cleared the MPFR flags and then
13302 calculated M to see if anything subsequently set a flag prior to
13303 entering this function. Return NULL_TREE if any checks fail. */
13305 static tree
13306 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13308 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13309 overflow/underflow occurred. If -frounding-math, proceed iff the
13310 result of calling FUNC was exact. */
13311 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13312 && (!flag_rounding_math || !inexact))
13314 REAL_VALUE_TYPE rr;
13316 real_from_mpfr (&rr, m, type, GMP_RNDN);
13317 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13318 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13319 but the mpft_t is not, then we underflowed in the
13320 conversion. */
13321 if (real_isfinite (&rr)
13322 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13324 REAL_VALUE_TYPE rmode;
13326 real_convert (&rmode, TYPE_MODE (type), &rr);
13327 /* Proceed iff the specified mode can hold the value. */
13328 if (real_identical (&rmode, &rr))
13329 return build_real (type, rmode);
13332 return NULL_TREE;
13335 #ifdef HAVE_mpc
13336 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13337 number and no overflow/underflow occurred. INEXACT is true if M
13338 was not exactly calculated. TYPE is the tree type for the result.
13339 This function assumes that you cleared the MPFR flags and then
13340 calculated M to see if anything subsequently set a flag prior to
13341 entering this function. Return NULL_TREE if any checks fail. */
13343 static tree
13344 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13346 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13347 overflow/underflow occurred. If -frounding-math, proceed iff the
13348 result of calling FUNC was exact. */
13349 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13350 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13351 && (!flag_rounding_math || !inexact))
13353 REAL_VALUE_TYPE re, im;
13355 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13356 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13357 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13358 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13359 but the mpft_t is not, then we underflowed in the
13360 conversion. */
13361 if (real_isfinite (&re) && real_isfinite (&im)
13362 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13363 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13365 REAL_VALUE_TYPE re_mode, im_mode;
13367 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13368 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13369 /* Proceed iff the specified mode can hold the value. */
13370 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13371 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13372 build_real (TREE_TYPE (type), im_mode));
13375 return NULL_TREE;
13377 #endif /* HAVE_mpc */
13379 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13380 FUNC on it and return the resulting value as a tree with type TYPE.
13381 If MIN and/or MAX are not NULL, then the supplied ARG must be
13382 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13383 acceptable values, otherwise they are not. The mpfr precision is
13384 set to the precision of TYPE. We assume that function FUNC returns
13385 zero if the result could be calculated exactly within the requested
13386 precision. */
13388 static tree
13389 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13390 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13391 bool inclusive)
13393 tree result = NULL_TREE;
13395 STRIP_NOPS (arg);
13397 /* To proceed, MPFR must exactly represent the target floating point
13398 format, which only happens when the target base equals two. */
13399 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13400 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13402 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13404 if (real_isfinite (ra)
13405 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13406 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13408 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13409 const int prec = fmt->p;
13410 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13411 int inexact;
13412 mpfr_t m;
13414 mpfr_init2 (m, prec);
13415 mpfr_from_real (m, ra, GMP_RNDN);
13416 mpfr_clear_flags ();
13417 inexact = func (m, m, rnd);
13418 result = do_mpfr_ckconv (m, type, inexact);
13419 mpfr_clear (m);
13423 return result;
13426 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13427 FUNC on it and return the resulting value as a tree with type TYPE.
13428 The mpfr precision is set to the precision of TYPE. We assume that
13429 function FUNC returns zero if the result could be calculated
13430 exactly within the requested precision. */
13432 static tree
13433 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13434 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13436 tree result = NULL_TREE;
13438 STRIP_NOPS (arg1);
13439 STRIP_NOPS (arg2);
13441 /* To proceed, MPFR must exactly represent the target floating point
13442 format, which only happens when the target base equals two. */
13443 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13444 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13445 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13447 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13448 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13450 if (real_isfinite (ra1) && real_isfinite (ra2))
13452 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13453 const int prec = fmt->p;
13454 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13455 int inexact;
13456 mpfr_t m1, m2;
13458 mpfr_inits2 (prec, m1, m2, NULL);
13459 mpfr_from_real (m1, ra1, GMP_RNDN);
13460 mpfr_from_real (m2, ra2, GMP_RNDN);
13461 mpfr_clear_flags ();
13462 inexact = func (m1, m1, m2, rnd);
13463 result = do_mpfr_ckconv (m1, type, inexact);
13464 mpfr_clears (m1, m2, NULL);
13468 return result;
13471 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13472 FUNC on it and return the resulting value as a tree with type TYPE.
13473 The mpfr precision is set to the precision of TYPE. We assume that
13474 function FUNC returns zero if the result could be calculated
13475 exactly within the requested precision. */
13477 static tree
13478 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13479 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13481 tree result = NULL_TREE;
13483 STRIP_NOPS (arg1);
13484 STRIP_NOPS (arg2);
13485 STRIP_NOPS (arg3);
13487 /* To proceed, MPFR must exactly represent the target floating point
13488 format, which only happens when the target base equals two. */
13489 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13490 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13491 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13492 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13494 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13495 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13496 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13498 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13500 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13501 const int prec = fmt->p;
13502 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13503 int inexact;
13504 mpfr_t m1, m2, m3;
13506 mpfr_inits2 (prec, m1, m2, m3, NULL);
13507 mpfr_from_real (m1, ra1, GMP_RNDN);
13508 mpfr_from_real (m2, ra2, GMP_RNDN);
13509 mpfr_from_real (m3, ra3, GMP_RNDN);
13510 mpfr_clear_flags ();
13511 inexact = func (m1, m1, m2, m3, rnd);
13512 result = do_mpfr_ckconv (m1, type, inexact);
13513 mpfr_clears (m1, m2, m3, NULL);
13517 return result;
13520 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13521 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13522 If ARG_SINP and ARG_COSP are NULL then the result is returned
13523 as a complex value.
13524 The type is taken from the type of ARG and is used for setting the
13525 precision of the calculation and results. */
13527 static tree
13528 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13530 tree const type = TREE_TYPE (arg);
13531 tree result = NULL_TREE;
13533 STRIP_NOPS (arg);
13535 /* To proceed, MPFR must exactly represent the target floating point
13536 format, which only happens when the target base equals two. */
13537 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13538 && TREE_CODE (arg) == REAL_CST
13539 && !TREE_OVERFLOW (arg))
13541 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13543 if (real_isfinite (ra))
13545 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13546 const int prec = fmt->p;
13547 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13548 tree result_s, result_c;
13549 int inexact;
13550 mpfr_t m, ms, mc;
13552 mpfr_inits2 (prec, m, ms, mc, NULL);
13553 mpfr_from_real (m, ra, GMP_RNDN);
13554 mpfr_clear_flags ();
13555 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13556 result_s = do_mpfr_ckconv (ms, type, inexact);
13557 result_c = do_mpfr_ckconv (mc, type, inexact);
13558 mpfr_clears (m, ms, mc, NULL);
13559 if (result_s && result_c)
13561 /* If we are to return in a complex value do so. */
13562 if (!arg_sinp && !arg_cosp)
13563 return build_complex (build_complex_type (type),
13564 result_c, result_s);
13566 /* Dereference the sin/cos pointer arguments. */
13567 arg_sinp = build_fold_indirect_ref (arg_sinp);
13568 arg_cosp = build_fold_indirect_ref (arg_cosp);
13569 /* Proceed if valid pointer type were passed in. */
13570 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13571 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13573 /* Set the values. */
13574 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13575 result_s);
13576 TREE_SIDE_EFFECTS (result_s) = 1;
13577 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13578 result_c);
13579 TREE_SIDE_EFFECTS (result_c) = 1;
13580 /* Combine the assignments into a compound expr. */
13581 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13582 result_s, result_c));
13587 return result;
13590 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13591 two-argument mpfr order N Bessel function FUNC on them and return
13592 the resulting value as a tree with type TYPE. The mpfr precision
13593 is set to the precision of TYPE. We assume that function FUNC
13594 returns zero if the result could be calculated exactly within the
13595 requested precision. */
13596 static tree
13597 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13598 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13599 const REAL_VALUE_TYPE *min, bool inclusive)
13601 tree result = NULL_TREE;
13603 STRIP_NOPS (arg1);
13604 STRIP_NOPS (arg2);
13606 /* To proceed, MPFR must exactly represent the target floating point
13607 format, which only happens when the target base equals two. */
13608 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13609 && host_integerp (arg1, 0)
13610 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13612 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13613 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13615 if (n == (long)n
13616 && real_isfinite (ra)
13617 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13619 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13620 const int prec = fmt->p;
13621 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13622 int inexact;
13623 mpfr_t m;
13625 mpfr_init2 (m, prec);
13626 mpfr_from_real (m, ra, GMP_RNDN);
13627 mpfr_clear_flags ();
13628 inexact = func (m, n, m, rnd);
13629 result = do_mpfr_ckconv (m, type, inexact);
13630 mpfr_clear (m);
13634 return result;
13637 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13638 the pointer *(ARG_QUO) and return the result. The type is taken
13639 from the type of ARG0 and is used for setting the precision of the
13640 calculation and results. */
13642 static tree
13643 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13645 tree const type = TREE_TYPE (arg0);
13646 tree result = NULL_TREE;
13648 STRIP_NOPS (arg0);
13649 STRIP_NOPS (arg1);
13651 /* To proceed, MPFR must exactly represent the target floating point
13652 format, which only happens when the target base equals two. */
13653 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13654 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13655 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13657 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13658 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13660 if (real_isfinite (ra0) && real_isfinite (ra1))
13662 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13663 const int prec = fmt->p;
13664 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13665 tree result_rem;
13666 long integer_quo;
13667 mpfr_t m0, m1;
13669 mpfr_inits2 (prec, m0, m1, NULL);
13670 mpfr_from_real (m0, ra0, GMP_RNDN);
13671 mpfr_from_real (m1, ra1, GMP_RNDN);
13672 mpfr_clear_flags ();
13673 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13674 /* Remquo is independent of the rounding mode, so pass
13675 inexact=0 to do_mpfr_ckconv(). */
13676 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13677 mpfr_clears (m0, m1, NULL);
13678 if (result_rem)
13680 /* MPFR calculates quo in the host's long so it may
13681 return more bits in quo than the target int can hold
13682 if sizeof(host long) > sizeof(target int). This can
13683 happen even for native compilers in LP64 mode. In
13684 these cases, modulo the quo value with the largest
13685 number that the target int can hold while leaving one
13686 bit for the sign. */
13687 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13688 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13690 /* Dereference the quo pointer argument. */
13691 arg_quo = build_fold_indirect_ref (arg_quo);
13692 /* Proceed iff a valid pointer type was passed in. */
13693 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13695 /* Set the value. */
13696 tree result_quo = fold_build2 (MODIFY_EXPR,
13697 TREE_TYPE (arg_quo), arg_quo,
13698 build_int_cst (NULL, integer_quo));
13699 TREE_SIDE_EFFECTS (result_quo) = 1;
13700 /* Combine the quo assignment with the rem. */
13701 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13702 result_quo, result_rem));
13707 return result;
13710 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13711 resulting value as a tree with type TYPE. The mpfr precision is
13712 set to the precision of TYPE. We assume that this mpfr function
13713 returns zero if the result could be calculated exactly within the
13714 requested precision. In addition, the integer pointer represented
13715 by ARG_SG will be dereferenced and set to the appropriate signgam
13716 (-1,1) value. */
13718 static tree
13719 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13721 tree result = NULL_TREE;
13723 STRIP_NOPS (arg);
13725 /* To proceed, MPFR must exactly represent the target floating point
13726 format, which only happens when the target base equals two. Also
13727 verify ARG is a constant and that ARG_SG is an int pointer. */
13728 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13729 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13730 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13731 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13733 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13735 /* In addition to NaN and Inf, the argument cannot be zero or a
13736 negative integer. */
13737 if (real_isfinite (ra)
13738 && ra->cl != rvc_zero
13739 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13741 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13742 const int prec = fmt->p;
13743 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13744 int inexact, sg;
13745 mpfr_t m;
13746 tree result_lg;
13748 mpfr_init2 (m, prec);
13749 mpfr_from_real (m, ra, GMP_RNDN);
13750 mpfr_clear_flags ();
13751 inexact = mpfr_lgamma (m, &sg, m, rnd);
13752 result_lg = do_mpfr_ckconv (m, type, inexact);
13753 mpfr_clear (m);
13754 if (result_lg)
13756 tree result_sg;
13758 /* Dereference the arg_sg pointer argument. */
13759 arg_sg = build_fold_indirect_ref (arg_sg);
13760 /* Assign the signgam value into *arg_sg. */
13761 result_sg = fold_build2 (MODIFY_EXPR,
13762 TREE_TYPE (arg_sg), arg_sg,
13763 build_int_cst (NULL, sg));
13764 TREE_SIDE_EFFECTS (result_sg) = 1;
13765 /* Combine the signgam assignment with the lgamma result. */
13766 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13767 result_sg, result_lg));
13772 return result;
13775 #ifdef HAVE_mpc
13776 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13777 function FUNC on it and return the resulting value as a tree with
13778 type TYPE. The mpfr precision is set to the precision of TYPE. We
13779 assume that function FUNC returns zero if the result could be
13780 calculated exactly within the requested precision. */
13782 static tree
13783 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13785 tree result = NULL_TREE;
13787 STRIP_NOPS (arg);
13789 /* To proceed, MPFR must exactly represent the target floating point
13790 format, which only happens when the target base equals two. */
13791 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13792 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13793 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13795 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13796 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13798 if (real_isfinite (re) && real_isfinite (im))
13800 const struct real_format *const fmt =
13801 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13802 const int prec = fmt->p;
13803 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13804 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13805 int inexact;
13806 mpc_t m;
13808 mpc_init2 (m, prec);
13809 mpfr_from_real (mpc_realref(m), re, rnd);
13810 mpfr_from_real (mpc_imagref(m), im, rnd);
13811 mpfr_clear_flags ();
13812 inexact = func (m, m, crnd);
13813 result = do_mpc_ckconv (m, type, inexact);
13814 mpc_clear (m);
13818 return result;
13821 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13822 mpc function FUNC on it and return the resulting value as a tree
13823 with type TYPE. The mpfr precision is set to the precision of
13824 TYPE. We assume that function FUNC returns zero if the result
13825 could be calculated exactly within the requested precision. */
13827 #ifdef HAVE_mpc
13828 tree
13829 do_mpc_arg2 (tree arg0, tree arg1, tree type,
13830 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13832 tree result = NULL_TREE;
13834 STRIP_NOPS (arg0);
13835 STRIP_NOPS (arg1);
13837 /* To proceed, MPFR must exactly represent the target floating point
13838 format, which only happens when the target base equals two. */
13839 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13840 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13841 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13842 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13843 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13845 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13846 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13847 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13848 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13850 if (real_isfinite (re0) && real_isfinite (im0)
13851 && real_isfinite (re1) && real_isfinite (im1))
13853 const struct real_format *const fmt =
13854 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13855 const int prec = fmt->p;
13856 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13857 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13858 int inexact;
13859 mpc_t m0, m1;
13861 mpc_init2 (m0, prec);
13862 mpc_init2 (m1, prec);
13863 mpfr_from_real (mpc_realref(m0), re0, rnd);
13864 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13865 mpfr_from_real (mpc_realref(m1), re1, rnd);
13866 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13867 mpfr_clear_flags ();
13868 inexact = func (m0, m0, m1, crnd);
13869 result = do_mpc_ckconv (m0, type, inexact);
13870 mpc_clear (m0);
13871 mpc_clear (m1);
13875 return result;
13877 # endif
13878 #endif /* HAVE_mpc */
13880 /* FIXME tuples.
13881 The functions below provide an alternate interface for folding
13882 builtin function calls presented as GIMPLE_CALL statements rather
13883 than as CALL_EXPRs. The folded result is still expressed as a
13884 tree. There is too much code duplication in the handling of
13885 varargs functions, and a more intrusive re-factoring would permit
13886 better sharing of code between the tree and statement-based
13887 versions of these functions. */
13889 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13890 along with N new arguments specified as the "..." parameters. SKIP
13891 is the number of arguments in STMT to be omitted. This function is used
13892 to do varargs-to-varargs transformations. */
13894 static tree
13895 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13897 int oldnargs = gimple_call_num_args (stmt);
13898 int nargs = oldnargs - skip + n;
13899 tree fntype = TREE_TYPE (fndecl);
13900 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13901 tree *buffer;
13902 int i, j;
13903 va_list ap;
13904 location_t loc = gimple_location (stmt);
13906 buffer = XALLOCAVEC (tree, nargs);
13907 va_start (ap, n);
13908 for (i = 0; i < n; i++)
13909 buffer[i] = va_arg (ap, tree);
13910 va_end (ap);
13911 for (j = skip; j < oldnargs; j++, i++)
13912 buffer[i] = gimple_call_arg (stmt, j);
13914 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13917 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13918 a normal call should be emitted rather than expanding the function
13919 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13921 static tree
13922 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13924 tree dest, size, len, fn, fmt, flag;
13925 const char *fmt_str;
13926 int nargs = gimple_call_num_args (stmt);
13928 /* Verify the required arguments in the original call. */
13929 if (nargs < 4)
13930 return NULL_TREE;
13931 dest = gimple_call_arg (stmt, 0);
13932 if (!validate_arg (dest, POINTER_TYPE))
13933 return NULL_TREE;
13934 flag = gimple_call_arg (stmt, 1);
13935 if (!validate_arg (flag, INTEGER_TYPE))
13936 return NULL_TREE;
13937 size = gimple_call_arg (stmt, 2);
13938 if (!validate_arg (size, INTEGER_TYPE))
13939 return NULL_TREE;
13940 fmt = gimple_call_arg (stmt, 3);
13941 if (!validate_arg (fmt, POINTER_TYPE))
13942 return NULL_TREE;
13944 if (! host_integerp (size, 1))
13945 return NULL_TREE;
13947 len = NULL_TREE;
13949 if (!init_target_chars ())
13950 return NULL_TREE;
13952 /* Check whether the format is a literal string constant. */
13953 fmt_str = c_getstr (fmt);
13954 if (fmt_str != NULL)
13956 /* If the format doesn't contain % args or %%, we know the size. */
13957 if (strchr (fmt_str, target_percent) == 0)
13959 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13960 len = build_int_cstu (size_type_node, strlen (fmt_str));
13962 /* If the format is "%s" and first ... argument is a string literal,
13963 we know the size too. */
13964 else if (fcode == BUILT_IN_SPRINTF_CHK
13965 && strcmp (fmt_str, target_percent_s) == 0)
13967 tree arg;
13969 if (nargs == 5)
13971 arg = gimple_call_arg (stmt, 4);
13972 if (validate_arg (arg, POINTER_TYPE))
13974 len = c_strlen (arg, 1);
13975 if (! len || ! host_integerp (len, 1))
13976 len = NULL_TREE;
13982 if (! integer_all_onesp (size))
13984 if (! len || ! tree_int_cst_lt (len, size))
13985 return NULL_TREE;
13988 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13989 or if format doesn't contain % chars or is "%s". */
13990 if (! integer_zerop (flag))
13992 if (fmt_str == NULL)
13993 return NULL_TREE;
13994 if (strchr (fmt_str, target_percent) != NULL
13995 && strcmp (fmt_str, target_percent_s))
13996 return NULL_TREE;
13999 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
14000 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
14001 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
14002 if (!fn)
14003 return NULL_TREE;
14005 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
14008 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14009 a normal call should be emitted rather than expanding the function
14010 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14011 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14012 passed as second argument. */
14014 tree
14015 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14016 enum built_in_function fcode)
14018 tree dest, size, len, fn, fmt, flag;
14019 const char *fmt_str;
14021 /* Verify the required arguments in the original call. */
14022 if (gimple_call_num_args (stmt) < 5)
14023 return NULL_TREE;
14024 dest = gimple_call_arg (stmt, 0);
14025 if (!validate_arg (dest, POINTER_TYPE))
14026 return NULL_TREE;
14027 len = gimple_call_arg (stmt, 1);
14028 if (!validate_arg (len, INTEGER_TYPE))
14029 return NULL_TREE;
14030 flag = gimple_call_arg (stmt, 2);
14031 if (!validate_arg (flag, INTEGER_TYPE))
14032 return NULL_TREE;
14033 size = gimple_call_arg (stmt, 3);
14034 if (!validate_arg (size, INTEGER_TYPE))
14035 return NULL_TREE;
14036 fmt = gimple_call_arg (stmt, 4);
14037 if (!validate_arg (fmt, POINTER_TYPE))
14038 return NULL_TREE;
14040 if (! host_integerp (size, 1))
14041 return NULL_TREE;
14043 if (! integer_all_onesp (size))
14045 if (! host_integerp (len, 1))
14047 /* If LEN is not constant, try MAXLEN too.
14048 For MAXLEN only allow optimizing into non-_ocs function
14049 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
14050 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
14051 return NULL_TREE;
14053 else
14054 maxlen = len;
14056 if (tree_int_cst_lt (size, maxlen))
14057 return NULL_TREE;
14060 if (!init_target_chars ())
14061 return NULL_TREE;
14063 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
14064 or if format doesn't contain % chars or is "%s". */
14065 if (! integer_zerop (flag))
14067 fmt_str = c_getstr (fmt);
14068 if (fmt_str == NULL)
14069 return NULL_TREE;
14070 if (strchr (fmt_str, target_percent) != NULL
14071 && strcmp (fmt_str, target_percent_s))
14072 return NULL_TREE;
14075 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
14076 available. */
14077 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
14078 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
14079 if (!fn)
14080 return NULL_TREE;
14082 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
14085 /* Builtins with folding operations that operate on "..." arguments
14086 need special handling; we need to store the arguments in a convenient
14087 data structure before attempting any folding. Fortunately there are
14088 only a few builtins that fall into this category. FNDECL is the
14089 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14090 result of the function call is ignored. */
14092 static tree
14093 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14094 bool ignore ATTRIBUTE_UNUSED)
14096 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14097 tree ret = NULL_TREE;
14099 switch (fcode)
14101 case BUILT_IN_SPRINTF_CHK:
14102 case BUILT_IN_VSPRINTF_CHK:
14103 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14104 break;
14106 case BUILT_IN_SNPRINTF_CHK:
14107 case BUILT_IN_VSNPRINTF_CHK:
14108 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14110 default:
14111 break;
14113 if (ret)
14115 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14116 TREE_NO_WARNING (ret) = 1;
14117 return ret;
14119 return NULL_TREE;
14122 /* A wrapper function for builtin folding that prevents warnings for
14123 "statement without effect" and the like, caused by removing the
14124 call node earlier than the warning is generated. */
14126 tree
14127 fold_call_stmt (gimple stmt, bool ignore)
14129 tree ret = NULL_TREE;
14130 tree fndecl = gimple_call_fndecl (stmt);
14131 location_t loc = gimple_location (stmt);
14132 if (fndecl
14133 && TREE_CODE (fndecl) == FUNCTION_DECL
14134 && DECL_BUILT_IN (fndecl)
14135 && !gimple_call_va_arg_pack_p (stmt))
14137 int nargs = gimple_call_num_args (stmt);
14139 if (avoid_folding_inline_builtin (fndecl))
14140 return NULL_TREE;
14141 /* FIXME: Don't use a list in this interface. */
14142 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14144 tree arglist = NULL_TREE;
14145 int i;
14146 for (i = nargs - 1; i >= 0; i--)
14147 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
14148 return targetm.fold_builtin (fndecl, arglist, ignore);
14150 else
14152 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14154 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
14155 int i;
14156 for (i = 0; i < nargs; i++)
14157 args[i] = gimple_call_arg (stmt, i);
14158 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14160 if (!ret)
14161 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14162 if (ret)
14164 /* Propagate location information from original call to
14165 expansion of builtin. Otherwise things like
14166 maybe_emit_chk_warning, that operate on the expansion
14167 of a builtin, will use the wrong location information. */
14168 if (gimple_has_location (stmt))
14170 tree realret = ret;
14171 if (TREE_CODE (ret) == NOP_EXPR)
14172 realret = TREE_OPERAND (ret, 0);
14173 if (CAN_HAVE_LOCATION_P (realret)
14174 && !EXPR_HAS_LOCATION (realret))
14175 SET_EXPR_LOCATION (realret, loc);
14176 return realret;
14178 return ret;
14182 return NULL_TREE;