* config/mips/mips.h (ISA_HAS_EXTS): New macro.
[official-gcc.git] / gcc / builtins.c
blob81d0ab1dfa178f883ee06acaf43521f7e9e2f7d2
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_free_warning (tree);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree do_mpfr_bessel_n (tree, tree, tree,
236 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_remquo (tree, tree, tree);
239 static tree do_mpfr_lgamma_r (tree, tree, tree);
240 #endif
242 /* Return true if NODE should be considered for inline expansion regardless
243 of the optimization level. This means whenever a function is invoked with
244 its "internal" name, which normally contains the prefix "__builtin". */
246 static bool called_as_built_in (tree node)
248 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
249 if (strncmp (name, "__builtin_", 10) == 0)
250 return true;
251 if (strncmp (name, "__sync_", 7) == 0)
252 return true;
253 return false;
256 /* Return the alignment in bits of EXP, a pointer valued expression.
257 But don't return more than MAX_ALIGN no matter what.
258 The alignment returned is, by default, the alignment of the thing that
259 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
261 Otherwise, look at the expression to see if we can do better, i.e., if the
262 expression is actually pointing at an object whose alignment is tighter. */
265 get_pointer_alignment (tree exp, unsigned int max_align)
267 unsigned int align, inner;
269 /* We rely on TER to compute accurate alignment information. */
270 if (!(optimize && flag_tree_ter))
271 return 0;
273 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
274 return 0;
276 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
277 align = MIN (align, max_align);
279 while (1)
281 switch (TREE_CODE (exp))
283 CASE_CONVERT:
284 exp = TREE_OPERAND (exp, 0);
285 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
286 return align;
288 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
289 align = MIN (inner, max_align);
290 break;
292 case POINTER_PLUS_EXPR:
293 /* If sum of pointer + int, restrict our maximum alignment to that
294 imposed by the integer. If not, we can't do any better than
295 ALIGN. */
296 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
297 return align;
299 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
300 & (max_align / BITS_PER_UNIT - 1))
301 != 0)
302 max_align >>= 1;
304 exp = TREE_OPERAND (exp, 0);
305 break;
307 case ADDR_EXPR:
308 /* See what we are pointing at and look at its alignment. */
309 exp = TREE_OPERAND (exp, 0);
310 inner = max_align;
311 if (handled_component_p (exp))
313 HOST_WIDE_INT bitsize, bitpos;
314 tree offset;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
318 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
319 &mode, &unsignedp, &volatilep, true);
320 if (bitpos)
321 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
322 if (offset && TREE_CODE (offset) == PLUS_EXPR
323 && host_integerp (TREE_OPERAND (offset, 1), 1))
325 /* Any overflow in calculating offset_bits won't change
326 the alignment. */
327 unsigned offset_bits
328 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
329 * BITS_PER_UNIT);
331 if (offset_bits)
332 inner = MIN (inner, (offset_bits & -offset_bits));
333 offset = TREE_OPERAND (offset, 0);
335 if (offset && TREE_CODE (offset) == MULT_EXPR
336 && host_integerp (TREE_OPERAND (offset, 1), 1))
338 /* Any overflow in calculating offset_factor won't change
339 the alignment. */
340 unsigned offset_factor
341 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
342 * BITS_PER_UNIT);
344 if (offset_factor)
345 inner = MIN (inner, (offset_factor & -offset_factor));
347 else if (offset)
348 inner = MIN (inner, BITS_PER_UNIT);
350 if (DECL_P (exp))
351 align = MIN (inner, DECL_ALIGN (exp));
352 #ifdef CONSTANT_ALIGNMENT
353 else if (CONSTANT_CLASS_P (exp))
354 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 #endif
356 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
357 || TREE_CODE (exp) == INDIRECT_REF)
358 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 else
360 align = MIN (align, inner);
361 return MIN (align, max_align);
363 default:
364 return align;
369 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
370 way, because it could contain a zero byte in the middle.
371 TREE_STRING_LENGTH is the size of the character array, not the string.
373 ONLY_VALUE should be nonzero if the result is not going to be emitted
374 into the instruction stream and zero if it is going to be expanded.
375 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
376 is returned, otherwise NULL, since
377 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
378 evaluate the side-effects.
380 The value returned is of type `ssizetype'.
382 Unfortunately, string_constant can't access the values of const char
383 arrays with initializers, so neither can we do so here. */
385 tree
386 c_strlen (tree src, int only_value)
388 tree offset_node;
389 HOST_WIDE_INT offset;
390 int max;
391 const char *ptr;
393 STRIP_NOPS (src);
394 if (TREE_CODE (src) == COND_EXPR
395 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
397 tree len1, len2;
399 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
400 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
401 if (tree_int_cst_equal (len1, len2))
402 return len1;
405 if (TREE_CODE (src) == COMPOUND_EXPR
406 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
407 return c_strlen (TREE_OPERAND (src, 1), only_value);
409 src = string_constant (src, &offset_node);
410 if (src == 0)
411 return NULL_TREE;
413 max = TREE_STRING_LENGTH (src) - 1;
414 ptr = TREE_STRING_POINTER (src);
416 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
418 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
419 compute the offset to the following null if we don't know where to
420 start searching for it. */
421 int i;
423 for (i = 0; i < max; i++)
424 if (ptr[i] == 0)
425 return NULL_TREE;
427 /* We don't know the starting offset, but we do know that the string
428 has no internal zero bytes. We can assume that the offset falls
429 within the bounds of the string; otherwise, the programmer deserves
430 what he gets. Subtract the offset from the length of the string,
431 and return that. This would perhaps not be valid if we were dealing
432 with named arrays in addition to literal string constants. */
434 return size_diffop (size_int (max), offset_node);
437 /* We have a known offset into the string. Start searching there for
438 a null character if we can represent it as a single HOST_WIDE_INT. */
439 if (offset_node == 0)
440 offset = 0;
441 else if (! host_integerp (offset_node, 0))
442 offset = -1;
443 else
444 offset = tree_low_cst (offset_node, 0);
446 /* If the offset is known to be out of bounds, warn, and call strlen at
447 runtime. */
448 if (offset < 0 || offset > max)
450 /* Suppress multiple warnings for propagated constant strings. */
451 if (! TREE_NO_WARNING (src))
453 warning (0, "offset outside bounds of constant string");
454 TREE_NO_WARNING (src) = 1;
456 return NULL_TREE;
459 /* Use strlen to search for the first zero byte. Since any strings
460 constructed with build_string will have nulls appended, we win even
461 if we get handed something like (char[4])"abcd".
463 Since OFFSET is our starting index into the string, no further
464 calculation is needed. */
465 return ssize_int (strlen (ptr + offset));
468 /* Return a char pointer for a C string if it is a string constant
469 or sum of string constant and integer constant. */
471 static const char *
472 c_getstr (tree src)
474 tree offset_node;
476 src = string_constant (src, &offset_node);
477 if (src == 0)
478 return 0;
480 if (offset_node == 0)
481 return TREE_STRING_POINTER (src);
482 else if (!host_integerp (offset_node, 1)
483 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
484 return 0;
486 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
489 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
490 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 static rtx
493 c_readstr (const char *str, enum machine_mode mode)
495 HOST_WIDE_INT c[2];
496 HOST_WIDE_INT ch;
497 unsigned int i, j;
499 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
501 c[0] = 0;
502 c[1] = 0;
503 ch = 1;
504 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 j = i;
507 if (WORDS_BIG_ENDIAN)
508 j = GET_MODE_SIZE (mode) - i - 1;
509 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
510 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
511 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 j *= BITS_PER_UNIT;
513 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 if (ch)
516 ch = (unsigned char) str[i];
517 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
519 return immed_double_const (c[0], c[1], mode);
522 /* Cast a target constant CST to target CHAR and if that value fits into
523 host char type, return zero and put that value into variable pointed to by
524 P. */
526 static int
527 target_char_cast (tree cst, char *p)
529 unsigned HOST_WIDE_INT val, hostval;
531 if (!host_integerp (cst, 1)
532 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
533 return 1;
535 val = tree_low_cst (cst, 1);
536 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
537 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 hostval = val;
540 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
541 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
543 if (val != hostval)
544 return 1;
546 *p = hostval;
547 return 0;
550 /* Similar to save_expr, but assumes that arbitrary code is not executed
551 in between the multiple evaluations. In particular, we assume that a
552 non-addressable local variable will not be modified. */
554 static tree
555 builtin_save_expr (tree exp)
557 if (TREE_ADDRESSABLE (exp) == 0
558 && (TREE_CODE (exp) == PARM_DECL
559 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
560 return exp;
562 return save_expr (exp);
565 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
566 times to get the address of either a higher stack frame, or a return
567 address located within it (depending on FNDECL_CODE). */
569 static rtx
570 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
572 int i;
574 #ifdef INITIAL_FRAME_ADDRESS_RTX
575 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
576 #else
577 rtx tem;
579 /* For a zero count with __builtin_return_address, we don't care what
580 frame address we return, because target-specific definitions will
581 override us. Therefore frame pointer elimination is OK, and using
582 the soft frame pointer is OK.
584 For a nonzero count, or a zero count with __builtin_frame_address,
585 we require a stable offset from the current frame pointer to the
586 previous one, so we must use the hard frame pointer, and
587 we must disable frame pointer elimination. */
588 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
589 tem = frame_pointer_rtx;
590 else
592 tem = hard_frame_pointer_rtx;
594 /* Tell reload not to eliminate the frame pointer. */
595 crtl->accesses_prior_frames = 1;
597 #endif
599 /* Some machines need special handling before we can access
600 arbitrary frames. For example, on the SPARC, we must first flush
601 all register windows to the stack. */
602 #ifdef SETUP_FRAME_ADDRESSES
603 if (count > 0)
604 SETUP_FRAME_ADDRESSES ();
605 #endif
607 /* On the SPARC, the return address is not in the frame, it is in a
608 register. There is no way to access it off of the current frame
609 pointer, but it can be accessed off the previous frame pointer by
610 reading the value from the register window save area. */
611 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
612 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
613 count--;
614 #endif
616 /* Scan back COUNT frames to the specified frame. */
617 for (i = 0; i < count; i++)
619 /* Assume the dynamic chain pointer is in the word that the
620 frame address points to, unless otherwise specified. */
621 #ifdef DYNAMIC_CHAIN_ADDRESS
622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 #endif
624 tem = memory_address (Pmode, tem);
625 tem = gen_frame_mem (Pmode, tem);
626 tem = copy_to_reg (tem);
629 /* For __builtin_frame_address, return what we've got. But, on
630 the SPARC for example, we may have to add a bias. */
631 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
632 #ifdef FRAME_ADDR_RTX
633 return FRAME_ADDR_RTX (tem);
634 #else
635 return tem;
636 #endif
638 /* For __builtin_return_address, get the return address from that frame. */
639 #ifdef RETURN_ADDR_RTX
640 tem = RETURN_ADDR_RTX (count, tem);
641 #else
642 tem = memory_address (Pmode,
643 plus_constant (tem, GET_MODE_SIZE (Pmode)));
644 tem = gen_frame_mem (Pmode, tem);
645 #endif
646 return tem;
649 /* Alias set used for setjmp buffer. */
650 static alias_set_type setjmp_alias_set = -1;
652 /* Construct the leading half of a __builtin_setjmp call. Control will
653 return to RECEIVER_LABEL. This is also called directly by the SJLJ
654 exception handling code. */
656 void
657 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
659 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
660 rtx stack_save;
661 rtx mem;
663 if (setjmp_alias_set == -1)
664 setjmp_alias_set = new_alias_set ();
666 buf_addr = convert_memory_address (Pmode, buf_addr);
668 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
670 /* We store the frame pointer and the address of receiver_label in
671 the buffer and use the rest of it for the stack save area, which
672 is machine-dependent. */
674 mem = gen_rtx_MEM (Pmode, buf_addr);
675 set_mem_alias_set (mem, setjmp_alias_set);
676 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
678 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
679 set_mem_alias_set (mem, setjmp_alias_set);
681 emit_move_insn (validize_mem (mem),
682 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
684 stack_save = gen_rtx_MEM (sa_mode,
685 plus_constant (buf_addr,
686 2 * GET_MODE_SIZE (Pmode)));
687 set_mem_alias_set (stack_save, setjmp_alias_set);
688 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
690 /* If there is further processing to do, do it. */
691 #ifdef HAVE_builtin_setjmp_setup
692 if (HAVE_builtin_setjmp_setup)
693 emit_insn (gen_builtin_setjmp_setup (buf_addr));
694 #endif
696 /* Tell optimize_save_area_alloca that extra work is going to
697 need to go on during alloca. */
698 cfun->calls_setjmp = 1;
700 /* We have a nonlocal label. */
701 cfun->has_nonlocal_label = 1;
704 /* Construct the trailing part of a __builtin_setjmp call. This is
705 also called directly by the SJLJ exception handling code. */
707 void
708 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
710 /* Clobber the FP when we get here, so we have to make sure it's
711 marked as used by this function. */
712 emit_use (hard_frame_pointer_rtx);
714 /* Mark the static chain as clobbered here so life information
715 doesn't get messed up for it. */
716 emit_clobber (static_chain_rtx);
718 /* Now put in the code to restore the frame pointer, and argument
719 pointer, if needed. */
720 #ifdef HAVE_nonlocal_goto
721 if (! HAVE_nonlocal_goto)
722 #endif
724 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
725 /* This might change the hard frame pointer in ways that aren't
726 apparent to early optimization passes, so force a clobber. */
727 emit_clobber (hard_frame_pointer_rtx);
730 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
731 if (fixed_regs[ARG_POINTER_REGNUM])
733 #ifdef ELIMINABLE_REGS
734 size_t i;
735 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
737 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
738 if (elim_regs[i].from == ARG_POINTER_REGNUM
739 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
740 break;
742 if (i == ARRAY_SIZE (elim_regs))
743 #endif
745 /* Now restore our arg pointer from the address at which it
746 was saved in our stack frame. */
747 emit_move_insn (crtl->args.internal_arg_pointer,
748 copy_to_reg (get_arg_pointer_save_area ()));
751 #endif
753 #ifdef HAVE_builtin_setjmp_receiver
754 if (HAVE_builtin_setjmp_receiver)
755 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
756 else
757 #endif
758 #ifdef HAVE_nonlocal_goto_receiver
759 if (HAVE_nonlocal_goto_receiver)
760 emit_insn (gen_nonlocal_goto_receiver ());
761 else
762 #endif
763 { /* Nothing */ }
765 /* We must not allow the code we just generated to be reordered by
766 scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. */
768 emit_insn (gen_blockage ());
771 /* __builtin_longjmp is passed a pointer to an array of five words (not
772 all will be used on all machines). It operates similarly to the C
773 library function of the same name, but is more efficient. Much of
774 the code below is copied from the handling of non-local gotos. */
776 static void
777 expand_builtin_longjmp (rtx buf_addr, rtx value)
779 rtx fp, lab, stack, insn, last;
780 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782 /* DRAP is needed for stack realign if longjmp is expanded to current
783 function */
784 if (SUPPORTS_STACK_ALIGNMENT)
785 crtl->need_drap = true;
787 if (setjmp_alias_set == -1)
788 setjmp_alias_set = new_alias_set ();
790 buf_addr = convert_memory_address (Pmode, buf_addr);
792 buf_addr = force_reg (Pmode, buf_addr);
794 /* We used to store value in static_chain_rtx, but that fails if pointers
795 are smaller than integers. We instead require that the user must pass
796 a second argument of 1, because that is what builtin_setjmp will
797 return. This also makes EH slightly more efficient, since we are no
798 longer copying around a value that we don't care about. */
799 gcc_assert (value == const1_rtx);
801 last = get_last_insn ();
802 #ifdef HAVE_builtin_longjmp
803 if (HAVE_builtin_longjmp)
804 emit_insn (gen_builtin_longjmp (buf_addr));
805 else
806 #endif
808 fp = gen_rtx_MEM (Pmode, buf_addr);
809 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
810 GET_MODE_SIZE (Pmode)));
812 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
813 2 * GET_MODE_SIZE (Pmode)));
814 set_mem_alias_set (fp, setjmp_alias_set);
815 set_mem_alias_set (lab, setjmp_alias_set);
816 set_mem_alias_set (stack, setjmp_alias_set);
818 /* Pick up FP, label, and SP from the block and jump. This code is
819 from expand_goto in stmt.c; see there for detailed comments. */
820 #ifdef HAVE_nonlocal_goto
821 if (HAVE_nonlocal_goto)
822 /* We have to pass a value to the nonlocal_goto pattern that will
823 get copied into the static_chain pointer, but it does not matter
824 what that value is, because builtin_setjmp does not use it. */
825 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
826 else
827 #endif
829 lab = copy_to_reg (lab);
831 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
832 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
834 emit_move_insn (hard_frame_pointer_rtx, fp);
835 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
837 emit_use (hard_frame_pointer_rtx);
838 emit_use (stack_pointer_rtx);
839 emit_indirect_jump (lab);
843 /* Search backwards and mark the jump insn as a non-local goto.
844 Note that this precludes the use of __builtin_longjmp to a
845 __builtin_setjmp target in the same function. However, we've
846 already cautioned the user that these functions are for
847 internal exception handling use only. */
848 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
850 gcc_assert (insn != last);
852 if (JUMP_P (insn))
854 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
855 break;
857 else if (CALL_P (insn))
858 break;
862 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
863 and the address of the save area. */
865 static rtx
866 expand_builtin_nonlocal_goto (tree exp)
868 tree t_label, t_save_area;
869 rtx r_label, r_save_area, r_fp, r_sp, insn;
871 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
872 return NULL_RTX;
874 t_label = CALL_EXPR_ARG (exp, 0);
875 t_save_area = CALL_EXPR_ARG (exp, 1);
877 r_label = expand_normal (t_label);
878 r_label = convert_memory_address (Pmode, r_label);
879 r_save_area = expand_normal (t_save_area);
880 r_save_area = convert_memory_address (Pmode, r_save_area);
881 /* Copy the address of the save location to a register just in case it was based
882 on the frame pointer. */
883 r_save_area = copy_to_reg (r_save_area);
884 r_fp = gen_rtx_MEM (Pmode, r_save_area);
885 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
886 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
888 crtl->has_nonlocal_goto = 1;
890 #ifdef HAVE_nonlocal_goto
891 /* ??? We no longer need to pass the static chain value, afaik. */
892 if (HAVE_nonlocal_goto)
893 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
894 else
895 #endif
897 r_label = copy_to_reg (r_label);
899 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
900 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
902 /* Restore frame pointer for containing function.
903 This sets the actual hard register used for the frame pointer
904 to the location of the function's incoming static chain info.
905 The non-local goto handler will then adjust it to contain the
906 proper value and reload the argument pointer, if needed. */
907 emit_move_insn (hard_frame_pointer_rtx, r_fp);
908 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
910 /* USE of hard_frame_pointer_rtx added for consistency;
911 not clear if really needed. */
912 emit_use (hard_frame_pointer_rtx);
913 emit_use (stack_pointer_rtx);
915 /* If the architecture is using a GP register, we must
916 conservatively assume that the target function makes use of it.
917 The prologue of functions with nonlocal gotos must therefore
918 initialize the GP register to the appropriate value, and we
919 must then make sure that this value is live at the point
920 of the jump. (Note that this doesn't necessarily apply
921 to targets with a nonlocal_goto pattern; they are free
922 to implement it in their own way. Note also that this is
923 a no-op if the GP register is a global invariant.) */
924 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
925 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
926 emit_use (pic_offset_table_rtx);
928 emit_indirect_jump (r_label);
931 /* Search backwards to the jump insn and mark it as a
932 non-local goto. */
933 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
935 if (JUMP_P (insn))
937 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
938 break;
940 else if (CALL_P (insn))
941 break;
944 return const0_rtx;
947 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
948 (not all will be used on all machines) that was passed to __builtin_setjmp.
949 It updates the stack pointer in that block to correspond to the current
950 stack pointer. */
952 static void
953 expand_builtin_update_setjmp_buf (rtx buf_addr)
955 enum machine_mode sa_mode = Pmode;
956 rtx stack_save;
959 #ifdef HAVE_save_stack_nonlocal
960 if (HAVE_save_stack_nonlocal)
961 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
962 #endif
963 #ifdef STACK_SAVEAREA_MODE
964 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
965 #endif
967 stack_save
968 = gen_rtx_MEM (sa_mode,
969 memory_address
970 (sa_mode,
971 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
973 #ifdef HAVE_setjmp
974 if (HAVE_setjmp)
975 emit_insn (gen_setjmp ());
976 #endif
978 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
981 /* Expand a call to __builtin_prefetch. For a target that does not support
982 data prefetch, evaluate the memory address argument in case it has side
983 effects. */
985 static void
986 expand_builtin_prefetch (tree exp)
988 tree arg0, arg1, arg2;
989 int nargs;
990 rtx op0, op1, op2;
992 if (!validate_arglist (exp, POINTER_TYPE, 0))
993 return;
995 arg0 = CALL_EXPR_ARG (exp, 0);
997 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
998 zero (read) and argument 2 (locality) defaults to 3 (high degree of
999 locality). */
1000 nargs = call_expr_nargs (exp);
1001 if (nargs > 1)
1002 arg1 = CALL_EXPR_ARG (exp, 1);
1003 else
1004 arg1 = integer_zero_node;
1005 if (nargs > 2)
1006 arg2 = CALL_EXPR_ARG (exp, 2);
1007 else
1008 arg2 = build_int_cst (NULL_TREE, 3);
1010 /* Argument 0 is an address. */
1011 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1013 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1014 if (TREE_CODE (arg1) != INTEGER_CST)
1016 error ("second argument to %<__builtin_prefetch%> must be a constant");
1017 arg1 = integer_zero_node;
1019 op1 = expand_normal (arg1);
1020 /* Argument 1 must be either zero or one. */
1021 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1023 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1024 " using zero");
1025 op1 = const0_rtx;
1028 /* Argument 2 (locality) must be a compile-time constant int. */
1029 if (TREE_CODE (arg2) != INTEGER_CST)
1031 error ("third argument to %<__builtin_prefetch%> must be a constant");
1032 arg2 = integer_zero_node;
1034 op2 = expand_normal (arg2);
1035 /* Argument 2 must be 0, 1, 2, or 3. */
1036 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1038 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1039 op2 = const0_rtx;
1042 #ifdef HAVE_prefetch
1043 if (HAVE_prefetch)
1045 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1046 (op0,
1047 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1048 || (GET_MODE (op0) != Pmode))
1050 op0 = convert_memory_address (Pmode, op0);
1051 op0 = force_reg (Pmode, op0);
1053 emit_insn (gen_prefetch (op0, op1, op2));
1055 #endif
1057 /* Don't do anything with direct references to volatile memory, but
1058 generate code to handle other side effects. */
1059 if (!MEM_P (op0) && side_effects_p (op0))
1060 emit_insn (op0);
1063 /* Get a MEM rtx for expression EXP which is the address of an operand
1064 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1065 the maximum length of the block of memory that might be accessed or
1066 NULL if unknown. */
1068 static rtx
1069 get_memory_rtx (tree exp, tree len)
1071 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1072 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1074 /* Get an expression we can use to find the attributes to assign to MEM.
1075 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1076 we can. First remove any nops. */
1077 while (CONVERT_EXPR_P (exp)
1078 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1079 exp = TREE_OPERAND (exp, 0);
1081 if (TREE_CODE (exp) == ADDR_EXPR)
1082 exp = TREE_OPERAND (exp, 0);
1083 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1084 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1085 else
1086 exp = NULL;
1088 /* Honor attributes derived from exp, except for the alias set
1089 (as builtin stringops may alias with anything) and the size
1090 (as stringops may access multiple array elements). */
1091 if (exp)
1093 set_mem_attributes (mem, exp, 0);
1095 /* Allow the string and memory builtins to overflow from one
1096 field into another, see http://gcc.gnu.org/PR23561.
1097 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1098 memory accessed by the string or memory builtin will fit
1099 within the field. */
1100 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1102 tree mem_expr = MEM_EXPR (mem);
1103 HOST_WIDE_INT offset = -1, length = -1;
1104 tree inner = exp;
1106 while (TREE_CODE (inner) == ARRAY_REF
1107 || CONVERT_EXPR_P (inner)
1108 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1109 || TREE_CODE (inner) == SAVE_EXPR)
1110 inner = TREE_OPERAND (inner, 0);
1112 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1114 if (MEM_OFFSET (mem)
1115 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1116 offset = INTVAL (MEM_OFFSET (mem));
1118 if (offset >= 0 && len && host_integerp (len, 0))
1119 length = tree_low_cst (len, 0);
1121 while (TREE_CODE (inner) == COMPONENT_REF)
1123 tree field = TREE_OPERAND (inner, 1);
1124 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1125 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1127 /* Bitfields are generally not byte-addressable. */
1128 gcc_assert (!DECL_BIT_FIELD (field)
1129 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1130 % BITS_PER_UNIT) == 0
1131 && host_integerp (DECL_SIZE (field), 0)
1132 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1133 % BITS_PER_UNIT) == 0));
1135 /* If we can prove that the memory starting at XEXP (mem, 0) and
1136 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1137 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1138 fields without DECL_SIZE_UNIT like flexible array members. */
1139 if (length >= 0
1140 && DECL_SIZE_UNIT (field)
1141 && host_integerp (DECL_SIZE_UNIT (field), 0))
1143 HOST_WIDE_INT size
1144 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1145 if (offset <= size
1146 && length <= size
1147 && offset + length <= size)
1148 break;
1151 if (offset >= 0
1152 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1153 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1154 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1155 / BITS_PER_UNIT;
1156 else
1158 offset = -1;
1159 length = -1;
1162 mem_expr = TREE_OPERAND (mem_expr, 0);
1163 inner = TREE_OPERAND (inner, 0);
1166 if (mem_expr == NULL)
1167 offset = -1;
1168 if (mem_expr != MEM_EXPR (mem))
1170 set_mem_expr (mem, mem_expr);
1171 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1174 set_mem_alias_set (mem, 0);
1175 set_mem_size (mem, NULL_RTX);
1178 return mem;
1181 /* Built-in functions to perform an untyped call and return. */
1183 /* For each register that may be used for calling a function, this
1184 gives a mode used to copy the register's value. VOIDmode indicates
1185 the register is not used for calling a function. If the machine
1186 has register windows, this gives only the outbound registers.
1187 INCOMING_REGNO gives the corresponding inbound register. */
1188 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1190 /* For each register that may be used for returning values, this gives
1191 a mode used to copy the register's value. VOIDmode indicates the
1192 register is not used for returning values. If the machine has
1193 register windows, this gives only the outbound registers.
1194 INCOMING_REGNO gives the corresponding inbound register. */
1195 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1197 /* For each register that may be used for calling a function, this
1198 gives the offset of that register into the block returned by
1199 __builtin_apply_args. 0 indicates that the register is not
1200 used for calling a function. */
1201 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1203 /* Return the size required for the block returned by __builtin_apply_args,
1204 and initialize apply_args_mode. */
1206 static int
1207 apply_args_size (void)
1209 static int size = -1;
1210 int align;
1211 unsigned int regno;
1212 enum machine_mode mode;
1214 /* The values computed by this function never change. */
1215 if (size < 0)
1217 /* The first value is the incoming arg-pointer. */
1218 size = GET_MODE_SIZE (Pmode);
1220 /* The second value is the structure value address unless this is
1221 passed as an "invisible" first argument. */
1222 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1223 size += GET_MODE_SIZE (Pmode);
1225 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1226 if (FUNCTION_ARG_REGNO_P (regno))
1228 mode = reg_raw_mode[regno];
1230 gcc_assert (mode != VOIDmode);
1232 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1233 if (size % align != 0)
1234 size = CEIL (size, align) * align;
1235 apply_args_reg_offset[regno] = size;
1236 size += GET_MODE_SIZE (mode);
1237 apply_args_mode[regno] = mode;
1239 else
1241 apply_args_mode[regno] = VOIDmode;
1242 apply_args_reg_offset[regno] = 0;
1245 return size;
1248 /* Return the size required for the block returned by __builtin_apply,
1249 and initialize apply_result_mode. */
1251 static int
1252 apply_result_size (void)
1254 static int size = -1;
1255 int align, regno;
1256 enum machine_mode mode;
1258 /* The values computed by this function never change. */
1259 if (size < 0)
1261 size = 0;
1263 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1264 if (FUNCTION_VALUE_REGNO_P (regno))
1266 mode = reg_raw_mode[regno];
1268 gcc_assert (mode != VOIDmode);
1270 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1271 if (size % align != 0)
1272 size = CEIL (size, align) * align;
1273 size += GET_MODE_SIZE (mode);
1274 apply_result_mode[regno] = mode;
1276 else
1277 apply_result_mode[regno] = VOIDmode;
1279 /* Allow targets that use untyped_call and untyped_return to override
1280 the size so that machine-specific information can be stored here. */
1281 #ifdef APPLY_RESULT_SIZE
1282 size = APPLY_RESULT_SIZE;
1283 #endif
1285 return size;
1288 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1289 /* Create a vector describing the result block RESULT. If SAVEP is true,
1290 the result block is used to save the values; otherwise it is used to
1291 restore the values. */
1293 static rtx
1294 result_vector (int savep, rtx result)
1296 int regno, size, align, nelts;
1297 enum machine_mode mode;
1298 rtx reg, mem;
1299 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1301 size = nelts = 0;
1302 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1303 if ((mode = apply_result_mode[regno]) != VOIDmode)
1305 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1306 if (size % align != 0)
1307 size = CEIL (size, align) * align;
1308 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1309 mem = adjust_address (result, mode, size);
1310 savevec[nelts++] = (savep
1311 ? gen_rtx_SET (VOIDmode, mem, reg)
1312 : gen_rtx_SET (VOIDmode, reg, mem));
1313 size += GET_MODE_SIZE (mode);
1315 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1317 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1319 /* Save the state required to perform an untyped call with the same
1320 arguments as were passed to the current function. */
1322 static rtx
1323 expand_builtin_apply_args_1 (void)
1325 rtx registers, tem;
1326 int size, align, regno;
1327 enum machine_mode mode;
1328 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1330 /* Create a block where the arg-pointer, structure value address,
1331 and argument registers can be saved. */
1332 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1334 /* Walk past the arg-pointer and structure value address. */
1335 size = GET_MODE_SIZE (Pmode);
1336 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1337 size += GET_MODE_SIZE (Pmode);
1339 /* Save each register used in calling a function to the block. */
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if ((mode = apply_args_mode[regno]) != VOIDmode)
1343 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1344 if (size % align != 0)
1345 size = CEIL (size, align) * align;
1347 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1349 emit_move_insn (adjust_address (registers, mode, size), tem);
1350 size += GET_MODE_SIZE (mode);
1353 /* Save the arg pointer to the block. */
1354 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1355 #ifdef STACK_GROWS_DOWNWARD
1356 /* We need the pointer as the caller actually passed them to us, not
1357 as we might have pretended they were passed. Make sure it's a valid
1358 operand, as emit_move_insn isn't expected to handle a PLUS. */
1360 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1361 NULL_RTX);
1362 #endif
1363 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1365 size = GET_MODE_SIZE (Pmode);
1367 /* Save the structure value address unless this is passed as an
1368 "invisible" first argument. */
1369 if (struct_incoming_value)
1371 emit_move_insn (adjust_address (registers, Pmode, size),
1372 copy_to_reg (struct_incoming_value));
1373 size += GET_MODE_SIZE (Pmode);
1376 /* Return the address of the block. */
1377 return copy_addr_to_reg (XEXP (registers, 0));
1380 /* __builtin_apply_args returns block of memory allocated on
1381 the stack into which is stored the arg pointer, structure
1382 value address, static chain, and all the registers that might
1383 possibly be used in performing a function call. The code is
1384 moved to the start of the function so the incoming values are
1385 saved. */
1387 static rtx
1388 expand_builtin_apply_args (void)
1390 /* Don't do __builtin_apply_args more than once in a function.
1391 Save the result of the first call and reuse it. */
1392 if (apply_args_value != 0)
1393 return apply_args_value;
1395 /* When this function is called, it means that registers must be
1396 saved on entry to this function. So we migrate the
1397 call to the first insn of this function. */
1398 rtx temp;
1399 rtx seq;
1401 start_sequence ();
1402 temp = expand_builtin_apply_args_1 ();
1403 seq = get_insns ();
1404 end_sequence ();
1406 apply_args_value = temp;
1408 /* Put the insns after the NOTE that starts the function.
1409 If this is inside a start_sequence, make the outer-level insn
1410 chain current, so the code is placed at the start of the
1411 function. */
1412 push_topmost_sequence ();
1413 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1414 pop_topmost_sequence ();
1415 return temp;
1419 /* Perform an untyped call and save the state required to perform an
1420 untyped return of whatever value was returned by the given function. */
1422 static rtx
1423 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1425 int size, align, regno;
1426 enum machine_mode mode;
1427 rtx incoming_args, result, reg, dest, src, call_insn;
1428 rtx old_stack_level = 0;
1429 rtx call_fusage = 0;
1430 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1432 arguments = convert_memory_address (Pmode, arguments);
1434 /* Create a block where the return registers can be saved. */
1435 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1437 /* Fetch the arg pointer from the ARGUMENTS block. */
1438 incoming_args = gen_reg_rtx (Pmode);
1439 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1440 #ifndef STACK_GROWS_DOWNWARD
1441 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1442 incoming_args, 0, OPTAB_LIB_WIDEN);
1443 #endif
1445 /* Push a new argument block and copy the arguments. Do not allow
1446 the (potential) memcpy call below to interfere with our stack
1447 manipulations. */
1448 do_pending_stack_adjust ();
1449 NO_DEFER_POP;
1451 /* Save the stack with nonlocal if available. */
1452 #ifdef HAVE_save_stack_nonlocal
1453 if (HAVE_save_stack_nonlocal)
1454 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1455 else
1456 #endif
1457 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1459 /* Allocate a block of memory onto the stack and copy the memory
1460 arguments to the outgoing arguments address. */
1461 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1463 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1464 may have already set current_function_calls_alloca to true.
1465 current_function_calls_alloca won't be set if argsize is zero,
1466 so we have to guarantee need_drap is true here. */
1467 if (SUPPORTS_STACK_ALIGNMENT)
1468 crtl->need_drap = true;
1470 dest = virtual_outgoing_args_rtx;
1471 #ifndef STACK_GROWS_DOWNWARD
1472 if (GET_CODE (argsize) == CONST_INT)
1473 dest = plus_constant (dest, -INTVAL (argsize));
1474 else
1475 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1476 #endif
1477 dest = gen_rtx_MEM (BLKmode, dest);
1478 set_mem_align (dest, PARM_BOUNDARY);
1479 src = gen_rtx_MEM (BLKmode, incoming_args);
1480 set_mem_align (src, PARM_BOUNDARY);
1481 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1483 /* Refer to the argument block. */
1484 apply_args_size ();
1485 arguments = gen_rtx_MEM (BLKmode, arguments);
1486 set_mem_align (arguments, PARM_BOUNDARY);
1488 /* Walk past the arg-pointer and structure value address. */
1489 size = GET_MODE_SIZE (Pmode);
1490 if (struct_value)
1491 size += GET_MODE_SIZE (Pmode);
1493 /* Restore each of the registers previously saved. Make USE insns
1494 for each of these registers for use in making the call. */
1495 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1496 if ((mode = apply_args_mode[regno]) != VOIDmode)
1498 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1499 if (size % align != 0)
1500 size = CEIL (size, align) * align;
1501 reg = gen_rtx_REG (mode, regno);
1502 emit_move_insn (reg, adjust_address (arguments, mode, size));
1503 use_reg (&call_fusage, reg);
1504 size += GET_MODE_SIZE (mode);
1507 /* Restore the structure value address unless this is passed as an
1508 "invisible" first argument. */
1509 size = GET_MODE_SIZE (Pmode);
1510 if (struct_value)
1512 rtx value = gen_reg_rtx (Pmode);
1513 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1514 emit_move_insn (struct_value, value);
1515 if (REG_P (struct_value))
1516 use_reg (&call_fusage, struct_value);
1517 size += GET_MODE_SIZE (Pmode);
1520 /* All arguments and registers used for the call are set up by now! */
1521 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1523 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1524 and we don't want to load it into a register as an optimization,
1525 because prepare_call_address already did it if it should be done. */
1526 if (GET_CODE (function) != SYMBOL_REF)
1527 function = memory_address (FUNCTION_MODE, function);
1529 /* Generate the actual call instruction and save the return value. */
1530 #ifdef HAVE_untyped_call
1531 if (HAVE_untyped_call)
1532 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1533 result, result_vector (1, result)));
1534 else
1535 #endif
1536 #ifdef HAVE_call_value
1537 if (HAVE_call_value)
1539 rtx valreg = 0;
1541 /* Locate the unique return register. It is not possible to
1542 express a call that sets more than one return register using
1543 call_value; use untyped_call for that. In fact, untyped_call
1544 only needs to save the return registers in the given block. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546 if ((mode = apply_result_mode[regno]) != VOIDmode)
1548 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1550 valreg = gen_rtx_REG (mode, regno);
1553 emit_call_insn (GEN_CALL_VALUE (valreg,
1554 gen_rtx_MEM (FUNCTION_MODE, function),
1555 const0_rtx, NULL_RTX, const0_rtx));
1557 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1559 else
1560 #endif
1561 gcc_unreachable ();
1563 /* Find the CALL insn we just emitted, and attach the register usage
1564 information. */
1565 call_insn = last_call_insn ();
1566 add_function_usage_to (call_insn, call_fusage);
1568 /* Restore the stack. */
1569 #ifdef HAVE_save_stack_nonlocal
1570 if (HAVE_save_stack_nonlocal)
1571 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1572 else
1573 #endif
1574 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1576 OK_DEFER_POP;
1578 /* Return the address of the result block. */
1579 result = copy_addr_to_reg (XEXP (result, 0));
1580 return convert_memory_address (ptr_mode, result);
1583 /* Perform an untyped return. */
1585 static void
1586 expand_builtin_return (rtx result)
1588 int size, align, regno;
1589 enum machine_mode mode;
1590 rtx reg;
1591 rtx call_fusage = 0;
1593 result = convert_memory_address (Pmode, result);
1595 apply_result_size ();
1596 result = gen_rtx_MEM (BLKmode, result);
1598 #ifdef HAVE_untyped_return
1599 if (HAVE_untyped_return)
1601 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1602 emit_barrier ();
1603 return;
1605 #endif
1607 /* Restore the return value and note that each value is used. */
1608 size = 0;
1609 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1610 if ((mode = apply_result_mode[regno]) != VOIDmode)
1612 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1613 if (size % align != 0)
1614 size = CEIL (size, align) * align;
1615 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1616 emit_move_insn (reg, adjust_address (result, mode, size));
1618 push_to_sequence (call_fusage);
1619 emit_use (reg);
1620 call_fusage = get_insns ();
1621 end_sequence ();
1622 size += GET_MODE_SIZE (mode);
1625 /* Put the USE insns before the return. */
1626 emit_insn (call_fusage);
1628 /* Return whatever values was restored by jumping directly to the end
1629 of the function. */
1630 expand_naked_return ();
1633 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1635 static enum type_class
1636 type_to_class (tree type)
1638 switch (TREE_CODE (type))
1640 case VOID_TYPE: return void_type_class;
1641 case INTEGER_TYPE: return integer_type_class;
1642 case ENUMERAL_TYPE: return enumeral_type_class;
1643 case BOOLEAN_TYPE: return boolean_type_class;
1644 case POINTER_TYPE: return pointer_type_class;
1645 case REFERENCE_TYPE: return reference_type_class;
1646 case OFFSET_TYPE: return offset_type_class;
1647 case REAL_TYPE: return real_type_class;
1648 case COMPLEX_TYPE: return complex_type_class;
1649 case FUNCTION_TYPE: return function_type_class;
1650 case METHOD_TYPE: return method_type_class;
1651 case RECORD_TYPE: return record_type_class;
1652 case UNION_TYPE:
1653 case QUAL_UNION_TYPE: return union_type_class;
1654 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1655 ? string_type_class : array_type_class);
1656 case LANG_TYPE: return lang_type_class;
1657 default: return no_type_class;
1661 /* Expand a call EXP to __builtin_classify_type. */
1663 static rtx
1664 expand_builtin_classify_type (tree exp)
1666 if (call_expr_nargs (exp))
1667 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1668 return GEN_INT (no_type_class);
1671 /* This helper macro, meant to be used in mathfn_built_in below,
1672 determines which among a set of three builtin math functions is
1673 appropriate for a given type mode. The `F' and `L' cases are
1674 automatically generated from the `double' case. */
1675 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1676 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1677 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1678 fcodel = BUILT_IN_MATHFN##L ; break;
1679 /* Similar to above, but appends _R after any F/L suffix. */
1680 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1681 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1682 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1683 fcodel = BUILT_IN_MATHFN##L_R ; break;
1685 /* Return mathematic function equivalent to FN but operating directly
1686 on TYPE, if available. If IMPLICIT is true find the function in
1687 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1688 can't do the conversion, return zero. */
1690 static tree
1691 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1693 tree const *const fn_arr
1694 = implicit ? implicit_built_in_decls : built_in_decls;
1695 enum built_in_function fcode, fcodef, fcodel;
1697 switch (fn)
1699 CASE_MATHFN (BUILT_IN_ACOS)
1700 CASE_MATHFN (BUILT_IN_ACOSH)
1701 CASE_MATHFN (BUILT_IN_ASIN)
1702 CASE_MATHFN (BUILT_IN_ASINH)
1703 CASE_MATHFN (BUILT_IN_ATAN)
1704 CASE_MATHFN (BUILT_IN_ATAN2)
1705 CASE_MATHFN (BUILT_IN_ATANH)
1706 CASE_MATHFN (BUILT_IN_CBRT)
1707 CASE_MATHFN (BUILT_IN_CEIL)
1708 CASE_MATHFN (BUILT_IN_CEXPI)
1709 CASE_MATHFN (BUILT_IN_COPYSIGN)
1710 CASE_MATHFN (BUILT_IN_COS)
1711 CASE_MATHFN (BUILT_IN_COSH)
1712 CASE_MATHFN (BUILT_IN_DREM)
1713 CASE_MATHFN (BUILT_IN_ERF)
1714 CASE_MATHFN (BUILT_IN_ERFC)
1715 CASE_MATHFN (BUILT_IN_EXP)
1716 CASE_MATHFN (BUILT_IN_EXP10)
1717 CASE_MATHFN (BUILT_IN_EXP2)
1718 CASE_MATHFN (BUILT_IN_EXPM1)
1719 CASE_MATHFN (BUILT_IN_FABS)
1720 CASE_MATHFN (BUILT_IN_FDIM)
1721 CASE_MATHFN (BUILT_IN_FLOOR)
1722 CASE_MATHFN (BUILT_IN_FMA)
1723 CASE_MATHFN (BUILT_IN_FMAX)
1724 CASE_MATHFN (BUILT_IN_FMIN)
1725 CASE_MATHFN (BUILT_IN_FMOD)
1726 CASE_MATHFN (BUILT_IN_FREXP)
1727 CASE_MATHFN (BUILT_IN_GAMMA)
1728 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1729 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1730 CASE_MATHFN (BUILT_IN_HYPOT)
1731 CASE_MATHFN (BUILT_IN_ILOGB)
1732 CASE_MATHFN (BUILT_IN_INF)
1733 CASE_MATHFN (BUILT_IN_ISINF)
1734 CASE_MATHFN (BUILT_IN_J0)
1735 CASE_MATHFN (BUILT_IN_J1)
1736 CASE_MATHFN (BUILT_IN_JN)
1737 CASE_MATHFN (BUILT_IN_LCEIL)
1738 CASE_MATHFN (BUILT_IN_LDEXP)
1739 CASE_MATHFN (BUILT_IN_LFLOOR)
1740 CASE_MATHFN (BUILT_IN_LGAMMA)
1741 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1742 CASE_MATHFN (BUILT_IN_LLCEIL)
1743 CASE_MATHFN (BUILT_IN_LLFLOOR)
1744 CASE_MATHFN (BUILT_IN_LLRINT)
1745 CASE_MATHFN (BUILT_IN_LLROUND)
1746 CASE_MATHFN (BUILT_IN_LOG)
1747 CASE_MATHFN (BUILT_IN_LOG10)
1748 CASE_MATHFN (BUILT_IN_LOG1P)
1749 CASE_MATHFN (BUILT_IN_LOG2)
1750 CASE_MATHFN (BUILT_IN_LOGB)
1751 CASE_MATHFN (BUILT_IN_LRINT)
1752 CASE_MATHFN (BUILT_IN_LROUND)
1753 CASE_MATHFN (BUILT_IN_MODF)
1754 CASE_MATHFN (BUILT_IN_NAN)
1755 CASE_MATHFN (BUILT_IN_NANS)
1756 CASE_MATHFN (BUILT_IN_NEARBYINT)
1757 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1758 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1759 CASE_MATHFN (BUILT_IN_POW)
1760 CASE_MATHFN (BUILT_IN_POWI)
1761 CASE_MATHFN (BUILT_IN_POW10)
1762 CASE_MATHFN (BUILT_IN_REMAINDER)
1763 CASE_MATHFN (BUILT_IN_REMQUO)
1764 CASE_MATHFN (BUILT_IN_RINT)
1765 CASE_MATHFN (BUILT_IN_ROUND)
1766 CASE_MATHFN (BUILT_IN_SCALB)
1767 CASE_MATHFN (BUILT_IN_SCALBLN)
1768 CASE_MATHFN (BUILT_IN_SCALBN)
1769 CASE_MATHFN (BUILT_IN_SIGNBIT)
1770 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1771 CASE_MATHFN (BUILT_IN_SIN)
1772 CASE_MATHFN (BUILT_IN_SINCOS)
1773 CASE_MATHFN (BUILT_IN_SINH)
1774 CASE_MATHFN (BUILT_IN_SQRT)
1775 CASE_MATHFN (BUILT_IN_TAN)
1776 CASE_MATHFN (BUILT_IN_TANH)
1777 CASE_MATHFN (BUILT_IN_TGAMMA)
1778 CASE_MATHFN (BUILT_IN_TRUNC)
1779 CASE_MATHFN (BUILT_IN_Y0)
1780 CASE_MATHFN (BUILT_IN_Y1)
1781 CASE_MATHFN (BUILT_IN_YN)
1783 default:
1784 return NULL_TREE;
1787 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1788 return fn_arr[fcode];
1789 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1790 return fn_arr[fcodef];
1791 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1792 return fn_arr[fcodel];
1793 else
1794 return NULL_TREE;
1797 /* Like mathfn_built_in_1(), but always use the implicit array. */
1799 tree
1800 mathfn_built_in (tree type, enum built_in_function fn)
1802 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1805 /* If errno must be maintained, expand the RTL to check if the result,
1806 TARGET, of a built-in function call, EXP, is NaN, and if so set
1807 errno to EDOM. */
1809 static void
1810 expand_errno_check (tree exp, rtx target)
1812 rtx lab = gen_label_rtx ();
1814 /* Test the result; if it is NaN, set errno=EDOM because
1815 the argument was not in the domain. */
1816 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1817 0, lab);
1819 #ifdef TARGET_EDOM
1820 /* If this built-in doesn't throw an exception, set errno directly. */
1821 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1823 #ifdef GEN_ERRNO_RTX
1824 rtx errno_rtx = GEN_ERRNO_RTX;
1825 #else
1826 rtx errno_rtx
1827 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1828 #endif
1829 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1830 emit_label (lab);
1831 return;
1833 #endif
1835 /* Make sure the library call isn't expanded as a tail call. */
1836 CALL_EXPR_TAILCALL (exp) = 0;
1838 /* We can't set errno=EDOM directly; let the library call do it.
1839 Pop the arguments right away in case the call gets deleted. */
1840 NO_DEFER_POP;
1841 expand_call (exp, target, 0);
1842 OK_DEFER_POP;
1843 emit_label (lab);
1846 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1847 Return NULL_RTX if a normal call should be emitted rather than expanding
1848 the function in-line. EXP is the expression that is a call to the builtin
1849 function; if convenient, the result should be placed in TARGET.
1850 SUBTARGET may be used as the target for computing one of EXP's operands. */
1852 static rtx
1853 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1855 optab builtin_optab;
1856 rtx op0, insns, before_call;
1857 tree fndecl = get_callee_fndecl (exp);
1858 enum machine_mode mode;
1859 bool errno_set = false;
1860 tree arg;
1862 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1863 return NULL_RTX;
1865 arg = CALL_EXPR_ARG (exp, 0);
1867 switch (DECL_FUNCTION_CODE (fndecl))
1869 CASE_FLT_FN (BUILT_IN_SQRT):
1870 errno_set = ! tree_expr_nonnegative_p (arg);
1871 builtin_optab = sqrt_optab;
1872 break;
1873 CASE_FLT_FN (BUILT_IN_EXP):
1874 errno_set = true; builtin_optab = exp_optab; break;
1875 CASE_FLT_FN (BUILT_IN_EXP10):
1876 CASE_FLT_FN (BUILT_IN_POW10):
1877 errno_set = true; builtin_optab = exp10_optab; break;
1878 CASE_FLT_FN (BUILT_IN_EXP2):
1879 errno_set = true; builtin_optab = exp2_optab; break;
1880 CASE_FLT_FN (BUILT_IN_EXPM1):
1881 errno_set = true; builtin_optab = expm1_optab; break;
1882 CASE_FLT_FN (BUILT_IN_LOGB):
1883 errno_set = true; builtin_optab = logb_optab; break;
1884 CASE_FLT_FN (BUILT_IN_LOG):
1885 errno_set = true; builtin_optab = log_optab; break;
1886 CASE_FLT_FN (BUILT_IN_LOG10):
1887 errno_set = true; builtin_optab = log10_optab; break;
1888 CASE_FLT_FN (BUILT_IN_LOG2):
1889 errno_set = true; builtin_optab = log2_optab; break;
1890 CASE_FLT_FN (BUILT_IN_LOG1P):
1891 errno_set = true; builtin_optab = log1p_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ASIN):
1893 builtin_optab = asin_optab; break;
1894 CASE_FLT_FN (BUILT_IN_ACOS):
1895 builtin_optab = acos_optab; break;
1896 CASE_FLT_FN (BUILT_IN_TAN):
1897 builtin_optab = tan_optab; break;
1898 CASE_FLT_FN (BUILT_IN_ATAN):
1899 builtin_optab = atan_optab; break;
1900 CASE_FLT_FN (BUILT_IN_FLOOR):
1901 builtin_optab = floor_optab; break;
1902 CASE_FLT_FN (BUILT_IN_CEIL):
1903 builtin_optab = ceil_optab; break;
1904 CASE_FLT_FN (BUILT_IN_TRUNC):
1905 builtin_optab = btrunc_optab; break;
1906 CASE_FLT_FN (BUILT_IN_ROUND):
1907 builtin_optab = round_optab; break;
1908 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1909 builtin_optab = nearbyint_optab;
1910 if (flag_trapping_math)
1911 break;
1912 /* Else fallthrough and expand as rint. */
1913 CASE_FLT_FN (BUILT_IN_RINT):
1914 builtin_optab = rint_optab; break;
1915 default:
1916 gcc_unreachable ();
1919 /* Make a suitable register to place result in. */
1920 mode = TYPE_MODE (TREE_TYPE (exp));
1922 if (! flag_errno_math || ! HONOR_NANS (mode))
1923 errno_set = false;
1925 /* Before working hard, check whether the instruction is available. */
1926 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1928 target = gen_reg_rtx (mode);
1930 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1931 need to expand the argument again. This way, we will not perform
1932 side-effects more the once. */
1933 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1935 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1937 start_sequence ();
1939 /* Compute into TARGET.
1940 Set TARGET to wherever the result comes back. */
1941 target = expand_unop (mode, builtin_optab, op0, target, 0);
1943 if (target != 0)
1945 if (errno_set)
1946 expand_errno_check (exp, target);
1948 /* Output the entire sequence. */
1949 insns = get_insns ();
1950 end_sequence ();
1951 emit_insn (insns);
1952 return target;
1955 /* If we were unable to expand via the builtin, stop the sequence
1956 (without outputting the insns) and call to the library function
1957 with the stabilized argument list. */
1958 end_sequence ();
1961 before_call = get_last_insn ();
1963 return expand_call (exp, target, target == const0_rtx);
1966 /* Expand a call to the builtin binary math functions (pow and atan2).
1967 Return NULL_RTX if a normal call should be emitted rather than expanding the
1968 function in-line. EXP is the expression that is a call to the builtin
1969 function; if convenient, the result should be placed in TARGET.
1970 SUBTARGET may be used as the target for computing one of EXP's
1971 operands. */
1973 static rtx
1974 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1976 optab builtin_optab;
1977 rtx op0, op1, insns;
1978 int op1_type = REAL_TYPE;
1979 tree fndecl = get_callee_fndecl (exp);
1980 tree arg0, arg1;
1981 enum machine_mode mode;
1982 bool errno_set = true;
1984 switch (DECL_FUNCTION_CODE (fndecl))
1986 CASE_FLT_FN (BUILT_IN_SCALBN):
1987 CASE_FLT_FN (BUILT_IN_SCALBLN):
1988 CASE_FLT_FN (BUILT_IN_LDEXP):
1989 op1_type = INTEGER_TYPE;
1990 default:
1991 break;
1994 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1995 return NULL_RTX;
1997 arg0 = CALL_EXPR_ARG (exp, 0);
1998 arg1 = CALL_EXPR_ARG (exp, 1);
2000 switch (DECL_FUNCTION_CODE (fndecl))
2002 CASE_FLT_FN (BUILT_IN_POW):
2003 builtin_optab = pow_optab; break;
2004 CASE_FLT_FN (BUILT_IN_ATAN2):
2005 builtin_optab = atan2_optab; break;
2006 CASE_FLT_FN (BUILT_IN_SCALB):
2007 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2008 return 0;
2009 builtin_optab = scalb_optab; break;
2010 CASE_FLT_FN (BUILT_IN_SCALBN):
2011 CASE_FLT_FN (BUILT_IN_SCALBLN):
2012 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2013 return 0;
2014 /* Fall through... */
2015 CASE_FLT_FN (BUILT_IN_LDEXP):
2016 builtin_optab = ldexp_optab; break;
2017 CASE_FLT_FN (BUILT_IN_FMOD):
2018 builtin_optab = fmod_optab; break;
2019 CASE_FLT_FN (BUILT_IN_REMAINDER):
2020 CASE_FLT_FN (BUILT_IN_DREM):
2021 builtin_optab = remainder_optab; break;
2022 default:
2023 gcc_unreachable ();
2026 /* Make a suitable register to place result in. */
2027 mode = TYPE_MODE (TREE_TYPE (exp));
2029 /* Before working hard, check whether the instruction is available. */
2030 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2031 return NULL_RTX;
2033 target = gen_reg_rtx (mode);
2035 if (! flag_errno_math || ! HONOR_NANS (mode))
2036 errno_set = false;
2038 /* Always stabilize the argument list. */
2039 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2040 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2042 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2043 op1 = expand_normal (arg1);
2045 start_sequence ();
2047 /* Compute into TARGET.
2048 Set TARGET to wherever the result comes back. */
2049 target = expand_binop (mode, builtin_optab, op0, op1,
2050 target, 0, OPTAB_DIRECT);
2052 /* If we were unable to expand via the builtin, stop the sequence
2053 (without outputting the insns) and call to the library function
2054 with the stabilized argument list. */
2055 if (target == 0)
2057 end_sequence ();
2058 return expand_call (exp, target, target == const0_rtx);
2061 if (errno_set)
2062 expand_errno_check (exp, target);
2064 /* Output the entire sequence. */
2065 insns = get_insns ();
2066 end_sequence ();
2067 emit_insn (insns);
2069 return target;
2072 /* Expand a call to the builtin sin and cos math functions.
2073 Return NULL_RTX if a normal call should be emitted rather than expanding the
2074 function in-line. EXP is the expression that is a call to the builtin
2075 function; if convenient, the result should be placed in TARGET.
2076 SUBTARGET may be used as the target for computing one of EXP's
2077 operands. */
2079 static rtx
2080 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2082 optab builtin_optab;
2083 rtx op0, insns;
2084 tree fndecl = get_callee_fndecl (exp);
2085 enum machine_mode mode;
2086 tree arg;
2088 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2089 return NULL_RTX;
2091 arg = CALL_EXPR_ARG (exp, 0);
2093 switch (DECL_FUNCTION_CODE (fndecl))
2095 CASE_FLT_FN (BUILT_IN_SIN):
2096 CASE_FLT_FN (BUILT_IN_COS):
2097 builtin_optab = sincos_optab; break;
2098 default:
2099 gcc_unreachable ();
2102 /* Make a suitable register to place result in. */
2103 mode = TYPE_MODE (TREE_TYPE (exp));
2105 /* Check if sincos insn is available, otherwise fallback
2106 to sin or cos insn. */
2107 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2108 switch (DECL_FUNCTION_CODE (fndecl))
2110 CASE_FLT_FN (BUILT_IN_SIN):
2111 builtin_optab = sin_optab; break;
2112 CASE_FLT_FN (BUILT_IN_COS):
2113 builtin_optab = cos_optab; break;
2114 default:
2115 gcc_unreachable ();
2118 /* Before working hard, check whether the instruction is available. */
2119 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2121 target = gen_reg_rtx (mode);
2123 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2124 need to expand the argument again. This way, we will not perform
2125 side-effects more the once. */
2126 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2128 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2130 start_sequence ();
2132 /* Compute into TARGET.
2133 Set TARGET to wherever the result comes back. */
2134 if (builtin_optab == sincos_optab)
2136 int result;
2138 switch (DECL_FUNCTION_CODE (fndecl))
2140 CASE_FLT_FN (BUILT_IN_SIN):
2141 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2142 break;
2143 CASE_FLT_FN (BUILT_IN_COS):
2144 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2145 break;
2146 default:
2147 gcc_unreachable ();
2149 gcc_assert (result);
2151 else
2153 target = expand_unop (mode, builtin_optab, op0, target, 0);
2156 if (target != 0)
2158 /* Output the entire sequence. */
2159 insns = get_insns ();
2160 end_sequence ();
2161 emit_insn (insns);
2162 return target;
2165 /* If we were unable to expand via the builtin, stop the sequence
2166 (without outputting the insns) and call to the library function
2167 with the stabilized argument list. */
2168 end_sequence ();
2171 target = expand_call (exp, target, target == const0_rtx);
2173 return target;
2176 /* Expand a call to one of the builtin math functions that operate on
2177 floating point argument and output an integer result (ilogb, isinf,
2178 isnan, etc).
2179 Return 0 if a normal call should be emitted rather than expanding the
2180 function in-line. EXP is the expression that is a call to the builtin
2181 function; if convenient, the result should be placed in TARGET.
2182 SUBTARGET may be used as the target for computing one of EXP's operands. */
2184 static rtx
2185 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2187 optab builtin_optab = 0;
2188 enum insn_code icode = CODE_FOR_nothing;
2189 rtx op0;
2190 tree fndecl = get_callee_fndecl (exp);
2191 enum machine_mode mode;
2192 bool errno_set = false;
2193 tree arg;
2195 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2196 return NULL_RTX;
2198 arg = CALL_EXPR_ARG (exp, 0);
2200 switch (DECL_FUNCTION_CODE (fndecl))
2202 CASE_FLT_FN (BUILT_IN_ILOGB):
2203 errno_set = true; builtin_optab = ilogb_optab; break;
2204 CASE_FLT_FN (BUILT_IN_ISINF):
2205 builtin_optab = isinf_optab; break;
2206 case BUILT_IN_ISNORMAL:
2207 case BUILT_IN_ISFINITE:
2208 CASE_FLT_FN (BUILT_IN_FINITE):
2209 /* These builtins have no optabs (yet). */
2210 break;
2211 default:
2212 gcc_unreachable ();
2215 /* There's no easy way to detect the case we need to set EDOM. */
2216 if (flag_errno_math && errno_set)
2217 return NULL_RTX;
2219 /* Optab mode depends on the mode of the input argument. */
2220 mode = TYPE_MODE (TREE_TYPE (arg));
2222 if (builtin_optab)
2223 icode = optab_handler (builtin_optab, mode)->insn_code;
2225 /* Before working hard, check whether the instruction is available. */
2226 if (icode != CODE_FOR_nothing)
2228 /* Make a suitable register to place result in. */
2229 if (!target
2230 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2231 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2233 gcc_assert (insn_data[icode].operand[0].predicate
2234 (target, GET_MODE (target)));
2236 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2237 need to expand the argument again. This way, we will not perform
2238 side-effects more the once. */
2239 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2241 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2243 if (mode != GET_MODE (op0))
2244 op0 = convert_to_mode (mode, op0, 0);
2246 /* Compute into TARGET.
2247 Set TARGET to wherever the result comes back. */
2248 emit_unop_insn (icode, target, op0, UNKNOWN);
2249 return target;
2252 /* If there is no optab, try generic code. */
2253 switch (DECL_FUNCTION_CODE (fndecl))
2255 tree result;
2257 CASE_FLT_FN (BUILT_IN_ISINF):
2259 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2260 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2261 tree const type = TREE_TYPE (arg);
2262 REAL_VALUE_TYPE r;
2263 char buf[128];
2265 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2266 real_from_string (&r, buf);
2267 result = build_call_expr (isgr_fn, 2,
2268 fold_build1 (ABS_EXPR, type, arg),
2269 build_real (type, r));
2270 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2272 CASE_FLT_FN (BUILT_IN_FINITE):
2273 case BUILT_IN_ISFINITE:
2275 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2276 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2277 tree const type = TREE_TYPE (arg);
2278 REAL_VALUE_TYPE r;
2279 char buf[128];
2281 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2282 real_from_string (&r, buf);
2283 result = build_call_expr (isle_fn, 2,
2284 fold_build1 (ABS_EXPR, type, arg),
2285 build_real (type, r));
2286 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2288 case BUILT_IN_ISNORMAL:
2290 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2291 islessequal(fabs(x),DBL_MAX). */
2292 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2293 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2294 tree const type = TREE_TYPE (arg);
2295 REAL_VALUE_TYPE rmax, rmin;
2296 char buf[128];
2298 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2299 real_from_string (&rmax, buf);
2300 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2301 real_from_string (&rmin, buf);
2302 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2303 result = build_call_expr (isle_fn, 2, arg,
2304 build_real (type, rmax));
2305 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2306 build_call_expr (isge_fn, 2, arg,
2307 build_real (type, rmin)));
2308 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2310 default:
2311 break;
2314 target = expand_call (exp, target, target == const0_rtx);
2316 return target;
2319 /* Expand a call to the builtin sincos math function.
2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
2321 function in-line. EXP is the expression that is a call to the builtin
2322 function. */
2324 static rtx
2325 expand_builtin_sincos (tree exp)
2327 rtx op0, op1, op2, target1, target2;
2328 enum machine_mode mode;
2329 tree arg, sinp, cosp;
2330 int result;
2332 if (!validate_arglist (exp, REAL_TYPE,
2333 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2334 return NULL_RTX;
2336 arg = CALL_EXPR_ARG (exp, 0);
2337 sinp = CALL_EXPR_ARG (exp, 1);
2338 cosp = CALL_EXPR_ARG (exp, 2);
2340 /* Make a suitable register to place result in. */
2341 mode = TYPE_MODE (TREE_TYPE (arg));
2343 /* Check if sincos insn is available, otherwise emit the call. */
2344 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2345 return NULL_RTX;
2347 target1 = gen_reg_rtx (mode);
2348 target2 = gen_reg_rtx (mode);
2350 op0 = expand_normal (arg);
2351 op1 = expand_normal (build_fold_indirect_ref (sinp));
2352 op2 = expand_normal (build_fold_indirect_ref (cosp));
2354 /* Compute into target1 and target2.
2355 Set TARGET to wherever the result comes back. */
2356 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2357 gcc_assert (result);
2359 /* Move target1 and target2 to the memory locations indicated
2360 by op1 and op2. */
2361 emit_move_insn (op1, target1);
2362 emit_move_insn (op2, target2);
2364 return const0_rtx;
2367 /* Expand a call to the internal cexpi builtin to the sincos math function.
2368 EXP is the expression that is a call to the builtin function; if convenient,
2369 the result should be placed in TARGET. SUBTARGET may be used as the target
2370 for computing one of EXP's operands. */
2372 static rtx
2373 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2375 tree fndecl = get_callee_fndecl (exp);
2376 tree arg, type;
2377 enum machine_mode mode;
2378 rtx op0, op1, op2;
2380 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2381 return NULL_RTX;
2383 arg = CALL_EXPR_ARG (exp, 0);
2384 type = TREE_TYPE (arg);
2385 mode = TYPE_MODE (TREE_TYPE (arg));
2387 /* Try expanding via a sincos optab, fall back to emitting a libcall
2388 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2389 is only generated from sincos, cexp or if we have either of them. */
2390 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2392 op1 = gen_reg_rtx (mode);
2393 op2 = gen_reg_rtx (mode);
2395 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2397 /* Compute into op1 and op2. */
2398 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2400 else if (TARGET_HAS_SINCOS)
2402 tree call, fn = NULL_TREE;
2403 tree top1, top2;
2404 rtx op1a, op2a;
2406 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2407 fn = built_in_decls[BUILT_IN_SINCOSF];
2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2409 fn = built_in_decls[BUILT_IN_SINCOS];
2410 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2411 fn = built_in_decls[BUILT_IN_SINCOSL];
2412 else
2413 gcc_unreachable ();
2415 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2416 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2417 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2418 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2419 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2420 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2422 /* Make sure not to fold the sincos call again. */
2423 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2424 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2425 call, 3, arg, top1, top2));
2427 else
2429 tree call, fn = NULL_TREE, narg;
2430 tree ctype = build_complex_type (type);
2432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2433 fn = built_in_decls[BUILT_IN_CEXPF];
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2435 fn = built_in_decls[BUILT_IN_CEXP];
2436 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2437 fn = built_in_decls[BUILT_IN_CEXPL];
2438 else
2439 gcc_unreachable ();
2441 /* If we don't have a decl for cexp create one. This is the
2442 friendliest fallback if the user calls __builtin_cexpi
2443 without full target C99 function support. */
2444 if (fn == NULL_TREE)
2446 tree fntype;
2447 const char *name = NULL;
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 name = "cexpf";
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 name = "cexp";
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 name = "cexpl";
2456 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2457 fn = build_fn_decl (name, fntype);
2460 narg = fold_build2 (COMPLEX_EXPR, ctype,
2461 build_real (type, dconst0), arg);
2463 /* Make sure not to fold the cexp call again. */
2464 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2465 return expand_expr (build_call_nary (ctype, call, 1, narg),
2466 target, VOIDmode, EXPAND_NORMAL);
2469 /* Now build the proper return type. */
2470 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2471 make_tree (TREE_TYPE (arg), op2),
2472 make_tree (TREE_TYPE (arg), op1)),
2473 target, VOIDmode, EXPAND_NORMAL);
2476 /* Expand a call to one of the builtin rounding functions gcc defines
2477 as an extension (lfloor and lceil). As these are gcc extensions we
2478 do not need to worry about setting errno to EDOM.
2479 If expanding via optab fails, lower expression to (int)(floor(x)).
2480 EXP is the expression that is a call to the builtin function;
2481 if convenient, the result should be placed in TARGET. */
2483 static rtx
2484 expand_builtin_int_roundingfn (tree exp, rtx target)
2486 convert_optab builtin_optab;
2487 rtx op0, insns, tmp;
2488 tree fndecl = get_callee_fndecl (exp);
2489 enum built_in_function fallback_fn;
2490 tree fallback_fndecl;
2491 enum machine_mode mode;
2492 tree arg;
2494 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2495 gcc_unreachable ();
2497 arg = CALL_EXPR_ARG (exp, 0);
2499 switch (DECL_FUNCTION_CODE (fndecl))
2501 CASE_FLT_FN (BUILT_IN_LCEIL):
2502 CASE_FLT_FN (BUILT_IN_LLCEIL):
2503 builtin_optab = lceil_optab;
2504 fallback_fn = BUILT_IN_CEIL;
2505 break;
2507 CASE_FLT_FN (BUILT_IN_LFLOOR):
2508 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2509 builtin_optab = lfloor_optab;
2510 fallback_fn = BUILT_IN_FLOOR;
2511 break;
2513 default:
2514 gcc_unreachable ();
2517 /* Make a suitable register to place result in. */
2518 mode = TYPE_MODE (TREE_TYPE (exp));
2520 target = gen_reg_rtx (mode);
2522 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2523 need to expand the argument again. This way, we will not perform
2524 side-effects more the once. */
2525 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2527 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2529 start_sequence ();
2531 /* Compute into TARGET. */
2532 if (expand_sfix_optab (target, op0, builtin_optab))
2534 /* Output the entire sequence. */
2535 insns = get_insns ();
2536 end_sequence ();
2537 emit_insn (insns);
2538 return target;
2541 /* If we were unable to expand via the builtin, stop the sequence
2542 (without outputting the insns). */
2543 end_sequence ();
2545 /* Fall back to floating point rounding optab. */
2546 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2548 /* For non-C99 targets we may end up without a fallback fndecl here
2549 if the user called __builtin_lfloor directly. In this case emit
2550 a call to the floor/ceil variants nevertheless. This should result
2551 in the best user experience for not full C99 targets. */
2552 if (fallback_fndecl == NULL_TREE)
2554 tree fntype;
2555 const char *name = NULL;
2557 switch (DECL_FUNCTION_CODE (fndecl))
2559 case BUILT_IN_LCEIL:
2560 case BUILT_IN_LLCEIL:
2561 name = "ceil";
2562 break;
2563 case BUILT_IN_LCEILF:
2564 case BUILT_IN_LLCEILF:
2565 name = "ceilf";
2566 break;
2567 case BUILT_IN_LCEILL:
2568 case BUILT_IN_LLCEILL:
2569 name = "ceill";
2570 break;
2571 case BUILT_IN_LFLOOR:
2572 case BUILT_IN_LLFLOOR:
2573 name = "floor";
2574 break;
2575 case BUILT_IN_LFLOORF:
2576 case BUILT_IN_LLFLOORF:
2577 name = "floorf";
2578 break;
2579 case BUILT_IN_LFLOORL:
2580 case BUILT_IN_LLFLOORL:
2581 name = "floorl";
2582 break;
2583 default:
2584 gcc_unreachable ();
2587 fntype = build_function_type_list (TREE_TYPE (arg),
2588 TREE_TYPE (arg), NULL_TREE);
2589 fallback_fndecl = build_fn_decl (name, fntype);
2592 exp = build_call_expr (fallback_fndecl, 1, arg);
2594 tmp = expand_normal (exp);
2596 /* Truncate the result of floating point optab to integer
2597 via expand_fix (). */
2598 target = gen_reg_rtx (mode);
2599 expand_fix (target, tmp, 0);
2601 return target;
2604 /* Expand a call to one of the builtin math functions doing integer
2605 conversion (lrint).
2606 Return 0 if a normal call should be emitted rather than expanding the
2607 function in-line. EXP is the expression that is a call to the builtin
2608 function; if convenient, the result should be placed in TARGET. */
2610 static rtx
2611 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2613 convert_optab builtin_optab;
2614 rtx op0, insns;
2615 tree fndecl = get_callee_fndecl (exp);
2616 tree arg;
2617 enum machine_mode mode;
2619 /* There's no easy way to detect the case we need to set EDOM. */
2620 if (flag_errno_math)
2621 return NULL_RTX;
2623 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2624 gcc_unreachable ();
2626 arg = CALL_EXPR_ARG (exp, 0);
2628 switch (DECL_FUNCTION_CODE (fndecl))
2630 CASE_FLT_FN (BUILT_IN_LRINT):
2631 CASE_FLT_FN (BUILT_IN_LLRINT):
2632 builtin_optab = lrint_optab; break;
2633 CASE_FLT_FN (BUILT_IN_LROUND):
2634 CASE_FLT_FN (BUILT_IN_LLROUND):
2635 builtin_optab = lround_optab; break;
2636 default:
2637 gcc_unreachable ();
2640 /* Make a suitable register to place result in. */
2641 mode = TYPE_MODE (TREE_TYPE (exp));
2643 target = gen_reg_rtx (mode);
2645 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2646 need to expand the argument again. This way, we will not perform
2647 side-effects more the once. */
2648 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2650 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2652 start_sequence ();
2654 if (expand_sfix_optab (target, op0, builtin_optab))
2656 /* Output the entire sequence. */
2657 insns = get_insns ();
2658 end_sequence ();
2659 emit_insn (insns);
2660 return target;
2663 /* If we were unable to expand via the builtin, stop the sequence
2664 (without outputting the insns) and call to the library function
2665 with the stabilized argument list. */
2666 end_sequence ();
2668 target = expand_call (exp, target, target == const0_rtx);
2670 return target;
2673 /* To evaluate powi(x,n), the floating point value x raised to the
2674 constant integer exponent n, we use a hybrid algorithm that
2675 combines the "window method" with look-up tables. For an
2676 introduction to exponentiation algorithms and "addition chains",
2677 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2678 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2679 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2680 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2682 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2683 multiplications to inline before calling the system library's pow
2684 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2685 so this default never requires calling pow, powf or powl. */
2687 #ifndef POWI_MAX_MULTS
2688 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2689 #endif
2691 /* The size of the "optimal power tree" lookup table. All
2692 exponents less than this value are simply looked up in the
2693 powi_table below. This threshold is also used to size the
2694 cache of pseudo registers that hold intermediate results. */
2695 #define POWI_TABLE_SIZE 256
2697 /* The size, in bits of the window, used in the "window method"
2698 exponentiation algorithm. This is equivalent to a radix of
2699 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2700 #define POWI_WINDOW_SIZE 3
2702 /* The following table is an efficient representation of an
2703 "optimal power tree". For each value, i, the corresponding
2704 value, j, in the table states than an optimal evaluation
2705 sequence for calculating pow(x,i) can be found by evaluating
2706 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2707 100 integers is given in Knuth's "Seminumerical algorithms". */
2709 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2711 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2712 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2713 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2714 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2715 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2716 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2717 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2718 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2719 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2720 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2721 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2722 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2723 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2724 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2725 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2726 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2727 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2728 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2729 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2730 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2731 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2732 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2733 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2734 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2735 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2736 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2737 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2738 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2739 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2740 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2741 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2742 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2746 /* Return the number of multiplications required to calculate
2747 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2748 subroutine of powi_cost. CACHE is an array indicating
2749 which exponents have already been calculated. */
2751 static int
2752 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2754 /* If we've already calculated this exponent, then this evaluation
2755 doesn't require any additional multiplications. */
2756 if (cache[n])
2757 return 0;
2759 cache[n] = true;
2760 return powi_lookup_cost (n - powi_table[n], cache)
2761 + powi_lookup_cost (powi_table[n], cache) + 1;
2764 /* Return the number of multiplications required to calculate
2765 powi(x,n) for an arbitrary x, given the exponent N. This
2766 function needs to be kept in sync with expand_powi below. */
2768 static int
2769 powi_cost (HOST_WIDE_INT n)
2771 bool cache[POWI_TABLE_SIZE];
2772 unsigned HOST_WIDE_INT digit;
2773 unsigned HOST_WIDE_INT val;
2774 int result;
2776 if (n == 0)
2777 return 0;
2779 /* Ignore the reciprocal when calculating the cost. */
2780 val = (n < 0) ? -n : n;
2782 /* Initialize the exponent cache. */
2783 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2784 cache[1] = true;
2786 result = 0;
2788 while (val >= POWI_TABLE_SIZE)
2790 if (val & 1)
2792 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2793 result += powi_lookup_cost (digit, cache)
2794 + POWI_WINDOW_SIZE + 1;
2795 val >>= POWI_WINDOW_SIZE;
2797 else
2799 val >>= 1;
2800 result++;
2804 return result + powi_lookup_cost (val, cache);
2807 /* Recursive subroutine of expand_powi. This function takes the array,
2808 CACHE, of already calculated exponents and an exponent N and returns
2809 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2811 static rtx
2812 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2814 unsigned HOST_WIDE_INT digit;
2815 rtx target, result;
2816 rtx op0, op1;
2818 if (n < POWI_TABLE_SIZE)
2820 if (cache[n])
2821 return cache[n];
2823 target = gen_reg_rtx (mode);
2824 cache[n] = target;
2826 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2827 op1 = expand_powi_1 (mode, powi_table[n], cache);
2829 else if (n & 1)
2831 target = gen_reg_rtx (mode);
2832 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2833 op0 = expand_powi_1 (mode, n - digit, cache);
2834 op1 = expand_powi_1 (mode, digit, cache);
2836 else
2838 target = gen_reg_rtx (mode);
2839 op0 = expand_powi_1 (mode, n >> 1, cache);
2840 op1 = op0;
2843 result = expand_mult (mode, op0, op1, target, 0);
2844 if (result != target)
2845 emit_move_insn (target, result);
2846 return target;
2849 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2850 floating point operand in mode MODE, and N is the exponent. This
2851 function needs to be kept in sync with powi_cost above. */
2853 static rtx
2854 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2856 unsigned HOST_WIDE_INT val;
2857 rtx cache[POWI_TABLE_SIZE];
2858 rtx result;
2860 if (n == 0)
2861 return CONST1_RTX (mode);
2863 val = (n < 0) ? -n : n;
2865 memset (cache, 0, sizeof (cache));
2866 cache[1] = x;
2868 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2870 /* If the original exponent was negative, reciprocate the result. */
2871 if (n < 0)
2872 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2873 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2875 return result;
2878 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2879 a normal call should be emitted rather than expanding the function
2880 in-line. EXP is the expression that is a call to the builtin
2881 function; if convenient, the result should be placed in TARGET. */
2883 static rtx
2884 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2886 tree arg0, arg1;
2887 tree fn, narg0;
2888 tree type = TREE_TYPE (exp);
2889 REAL_VALUE_TYPE cint, c, c2;
2890 HOST_WIDE_INT n;
2891 rtx op, op2;
2892 enum machine_mode mode = TYPE_MODE (type);
2894 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2895 return NULL_RTX;
2897 arg0 = CALL_EXPR_ARG (exp, 0);
2898 arg1 = CALL_EXPR_ARG (exp, 1);
2900 if (TREE_CODE (arg1) != REAL_CST
2901 || TREE_OVERFLOW (arg1))
2902 return expand_builtin_mathfn_2 (exp, target, subtarget);
2904 /* Handle constant exponents. */
2906 /* For integer valued exponents we can expand to an optimal multiplication
2907 sequence using expand_powi. */
2908 c = TREE_REAL_CST (arg1);
2909 n = real_to_integer (&c);
2910 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2911 if (real_identical (&c, &cint)
2912 && ((n >= -1 && n <= 2)
2913 || (flag_unsafe_math_optimizations
2914 && optimize_insn_for_speed_p ()
2915 && powi_cost (n) <= POWI_MAX_MULTS)))
2917 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2918 if (n != 1)
2920 op = force_reg (mode, op);
2921 op = expand_powi (op, mode, n);
2923 return op;
2926 narg0 = builtin_save_expr (arg0);
2928 /* If the exponent is not integer valued, check if it is half of an integer.
2929 In this case we can expand to sqrt (x) * x**(n/2). */
2930 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2931 if (fn != NULL_TREE)
2933 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2934 n = real_to_integer (&c2);
2935 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2936 if (real_identical (&c2, &cint)
2937 && ((flag_unsafe_math_optimizations
2938 && optimize_insn_for_speed_p ()
2939 && powi_cost (n/2) <= POWI_MAX_MULTS)
2940 || n == 1))
2942 tree call_expr = build_call_expr (fn, 1, narg0);
2943 /* Use expand_expr in case the newly built call expression
2944 was folded to a non-call. */
2945 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2946 if (n != 1)
2948 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2949 op2 = force_reg (mode, op2);
2950 op2 = expand_powi (op2, mode, abs (n / 2));
2951 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2952 0, OPTAB_LIB_WIDEN);
2953 /* If the original exponent was negative, reciprocate the
2954 result. */
2955 if (n < 0)
2956 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2957 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2959 return op;
2963 /* Try if the exponent is a third of an integer. In this case
2964 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2965 different from pow (x, 1./3.) due to rounding and behavior
2966 with negative x we need to constrain this transformation to
2967 unsafe math and positive x or finite math. */
2968 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2969 if (fn != NULL_TREE
2970 && flag_unsafe_math_optimizations
2971 && (tree_expr_nonnegative_p (arg0)
2972 || !HONOR_NANS (mode)))
2974 REAL_VALUE_TYPE dconst3;
2975 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2976 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2977 real_round (&c2, mode, &c2);
2978 n = real_to_integer (&c2);
2979 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2980 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2981 real_convert (&c2, mode, &c2);
2982 if (real_identical (&c2, &c)
2983 && ((optimize_insn_for_speed_p ()
2984 && powi_cost (n/3) <= POWI_MAX_MULTS)
2985 || n == 1))
2987 tree call_expr = build_call_expr (fn, 1,narg0);
2988 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2989 if (abs (n) % 3 == 2)
2990 op = expand_simple_binop (mode, MULT, op, op, op,
2991 0, OPTAB_LIB_WIDEN);
2992 if (n != 1)
2994 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2995 op2 = force_reg (mode, op2);
2996 op2 = expand_powi (op2, mode, abs (n / 3));
2997 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2998 0, OPTAB_LIB_WIDEN);
2999 /* If the original exponent was negative, reciprocate the
3000 result. */
3001 if (n < 0)
3002 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3003 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3005 return op;
3009 /* Fall back to optab expansion. */
3010 return expand_builtin_mathfn_2 (exp, target, subtarget);
3013 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3014 a normal call should be emitted rather than expanding the function
3015 in-line. EXP is the expression that is a call to the builtin
3016 function; if convenient, the result should be placed in TARGET. */
3018 static rtx
3019 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3021 tree arg0, arg1;
3022 rtx op0, op1;
3023 enum machine_mode mode;
3024 enum machine_mode mode2;
3026 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3027 return NULL_RTX;
3029 arg0 = CALL_EXPR_ARG (exp, 0);
3030 arg1 = CALL_EXPR_ARG (exp, 1);
3031 mode = TYPE_MODE (TREE_TYPE (exp));
3033 /* Handle constant power. */
3035 if (TREE_CODE (arg1) == INTEGER_CST
3036 && !TREE_OVERFLOW (arg1))
3038 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3040 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3041 Otherwise, check the number of multiplications required. */
3042 if ((TREE_INT_CST_HIGH (arg1) == 0
3043 || TREE_INT_CST_HIGH (arg1) == -1)
3044 && ((n >= -1 && n <= 2)
3045 || (optimize_insn_for_speed_p ()
3046 && powi_cost (n) <= POWI_MAX_MULTS)))
3048 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3049 op0 = force_reg (mode, op0);
3050 return expand_powi (op0, mode, n);
3054 /* Emit a libcall to libgcc. */
3056 /* Mode of the 2nd argument must match that of an int. */
3057 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3059 if (target == NULL_RTX)
3060 target = gen_reg_rtx (mode);
3062 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3063 if (GET_MODE (op0) != mode)
3064 op0 = convert_to_mode (mode, op0, 0);
3065 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3066 if (GET_MODE (op1) != mode2)
3067 op1 = convert_to_mode (mode2, op1, 0);
3069 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3070 target, LCT_CONST, mode, 2,
3071 op0, mode, op1, mode2);
3073 return target;
3076 /* Expand expression EXP which is a call to the strlen builtin. Return
3077 NULL_RTX if we failed the caller should emit a normal call, otherwise
3078 try to get the result in TARGET, if convenient. */
3080 static rtx
3081 expand_builtin_strlen (tree exp, rtx target,
3082 enum machine_mode target_mode)
3084 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3085 return NULL_RTX;
3086 else
3088 rtx pat;
3089 tree len;
3090 tree src = CALL_EXPR_ARG (exp, 0);
3091 rtx result, src_reg, char_rtx, before_strlen;
3092 enum machine_mode insn_mode = target_mode, char_mode;
3093 enum insn_code icode = CODE_FOR_nothing;
3094 int align;
3096 /* If the length can be computed at compile-time, return it. */
3097 len = c_strlen (src, 0);
3098 if (len)
3099 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3101 /* If the length can be computed at compile-time and is constant
3102 integer, but there are side-effects in src, evaluate
3103 src for side-effects, then return len.
3104 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3105 can be optimized into: i++; x = 3; */
3106 len = c_strlen (src, 1);
3107 if (len && TREE_CODE (len) == INTEGER_CST)
3109 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3110 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3113 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3115 /* If SRC is not a pointer type, don't do this operation inline. */
3116 if (align == 0)
3117 return NULL_RTX;
3119 /* Bail out if we can't compute strlen in the right mode. */
3120 while (insn_mode != VOIDmode)
3122 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3123 if (icode != CODE_FOR_nothing)
3124 break;
3126 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3128 if (insn_mode == VOIDmode)
3129 return NULL_RTX;
3131 /* Make a place to write the result of the instruction. */
3132 result = target;
3133 if (! (result != 0
3134 && REG_P (result)
3135 && GET_MODE (result) == insn_mode
3136 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3137 result = gen_reg_rtx (insn_mode);
3139 /* Make a place to hold the source address. We will not expand
3140 the actual source until we are sure that the expansion will
3141 not fail -- there are trees that cannot be expanded twice. */
3142 src_reg = gen_reg_rtx (Pmode);
3144 /* Mark the beginning of the strlen sequence so we can emit the
3145 source operand later. */
3146 before_strlen = get_last_insn ();
3148 char_rtx = const0_rtx;
3149 char_mode = insn_data[(int) icode].operand[2].mode;
3150 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3151 char_mode))
3152 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3154 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3155 char_rtx, GEN_INT (align));
3156 if (! pat)
3157 return NULL_RTX;
3158 emit_insn (pat);
3160 /* Now that we are assured of success, expand the source. */
3161 start_sequence ();
3162 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3163 if (pat != src_reg)
3164 emit_move_insn (src_reg, pat);
3165 pat = get_insns ();
3166 end_sequence ();
3168 if (before_strlen)
3169 emit_insn_after (pat, before_strlen);
3170 else
3171 emit_insn_before (pat, get_insns ());
3173 /* Return the value in the proper mode for this function. */
3174 if (GET_MODE (result) == target_mode)
3175 target = result;
3176 else if (target != 0)
3177 convert_move (target, result, 0);
3178 else
3179 target = convert_to_mode (target_mode, result, 0);
3181 return target;
3185 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3186 caller should emit a normal call, otherwise try to get the result
3187 in TARGET, if convenient (and in mode MODE if that's convenient). */
3189 static rtx
3190 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3192 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3194 tree type = TREE_TYPE (exp);
3195 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3196 CALL_EXPR_ARG (exp, 1), type);
3197 if (result)
3198 return expand_expr (result, target, mode, EXPAND_NORMAL);
3200 return NULL_RTX;
3203 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3204 caller should emit a normal call, otherwise try to get the result
3205 in TARGET, if convenient (and in mode MODE if that's convenient). */
3207 static rtx
3208 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3210 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3212 tree type = TREE_TYPE (exp);
3213 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3214 CALL_EXPR_ARG (exp, 1), type);
3215 if (result)
3216 return expand_expr (result, target, mode, EXPAND_NORMAL);
3218 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3220 return NULL_RTX;
3223 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3224 caller should emit a normal call, otherwise try to get the result
3225 in TARGET, if convenient (and in mode MODE if that's convenient). */
3227 static rtx
3228 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3230 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3232 tree type = TREE_TYPE (exp);
3233 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3234 CALL_EXPR_ARG (exp, 1), type);
3235 if (result)
3236 return expand_expr (result, target, mode, EXPAND_NORMAL);
3238 return NULL_RTX;
3241 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3242 caller should emit a normal call, otherwise try to get the result
3243 in TARGET, if convenient (and in mode MODE if that's convenient). */
3245 static rtx
3246 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3248 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3250 tree type = TREE_TYPE (exp);
3251 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3252 CALL_EXPR_ARG (exp, 1), type);
3253 if (result)
3254 return expand_expr (result, target, mode, EXPAND_NORMAL);
3256 return NULL_RTX;
3259 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3260 bytes from constant string DATA + OFFSET and return it as target
3261 constant. */
3263 static rtx
3264 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3265 enum machine_mode mode)
3267 const char *str = (const char *) data;
3269 gcc_assert (offset >= 0
3270 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3271 <= strlen (str) + 1));
3273 return c_readstr (str + offset, mode);
3276 /* Expand a call EXP to the memcpy builtin.
3277 Return NULL_RTX if we failed, the caller should emit a normal call,
3278 otherwise try to get the result in TARGET, if convenient (and in
3279 mode MODE if that's convenient). */
3281 static rtx
3282 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3284 tree fndecl = get_callee_fndecl (exp);
3286 if (!validate_arglist (exp,
3287 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3288 return NULL_RTX;
3289 else
3291 tree dest = CALL_EXPR_ARG (exp, 0);
3292 tree src = CALL_EXPR_ARG (exp, 1);
3293 tree len = CALL_EXPR_ARG (exp, 2);
3294 const char *src_str;
3295 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3296 unsigned int dest_align
3297 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3298 rtx dest_mem, src_mem, dest_addr, len_rtx;
3299 tree result = fold_builtin_memory_op (dest, src, len,
3300 TREE_TYPE (TREE_TYPE (fndecl)),
3301 false, /*endp=*/0);
3302 HOST_WIDE_INT expected_size = -1;
3303 unsigned int expected_align = 0;
3304 tree_ann_common_t ann;
3306 if (result)
3308 while (TREE_CODE (result) == COMPOUND_EXPR)
3310 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3311 EXPAND_NORMAL);
3312 result = TREE_OPERAND (result, 1);
3314 return expand_expr (result, target, mode, EXPAND_NORMAL);
3317 /* If DEST is not a pointer type, call the normal function. */
3318 if (dest_align == 0)
3319 return NULL_RTX;
3321 /* If either SRC is not a pointer type, don't do this
3322 operation in-line. */
3323 if (src_align == 0)
3324 return NULL_RTX;
3326 ann = tree_common_ann (exp);
3327 if (ann)
3328 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3330 if (expected_align < dest_align)
3331 expected_align = dest_align;
3332 dest_mem = get_memory_rtx (dest, len);
3333 set_mem_align (dest_mem, dest_align);
3334 len_rtx = expand_normal (len);
3335 src_str = c_getstr (src);
3337 /* If SRC is a string constant and block move would be done
3338 by pieces, we can avoid loading the string from memory
3339 and only stored the computed constants. */
3340 if (src_str
3341 && GET_CODE (len_rtx) == CONST_INT
3342 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3343 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3344 CONST_CAST (char *, src_str),
3345 dest_align, false))
3347 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3348 builtin_memcpy_read_str,
3349 CONST_CAST (char *, src_str),
3350 dest_align, false, 0);
3351 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3352 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3353 return dest_mem;
3356 src_mem = get_memory_rtx (src, len);
3357 set_mem_align (src_mem, src_align);
3359 /* Copy word part most expediently. */
3360 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3361 CALL_EXPR_TAILCALL (exp)
3362 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3363 expected_align, expected_size);
3365 if (dest_addr == 0)
3367 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3368 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3370 return dest_addr;
3374 /* Expand a call EXP to the mempcpy builtin.
3375 Return NULL_RTX if we failed; the caller should emit a normal call,
3376 otherwise try to get the result in TARGET, if convenient (and in
3377 mode MODE if that's convenient). If ENDP is 0 return the
3378 destination pointer, if ENDP is 1 return the end pointer ala
3379 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3380 stpcpy. */
3382 static rtx
3383 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3385 if (!validate_arglist (exp,
3386 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3387 return NULL_RTX;
3388 else
3390 tree dest = CALL_EXPR_ARG (exp, 0);
3391 tree src = CALL_EXPR_ARG (exp, 1);
3392 tree len = CALL_EXPR_ARG (exp, 2);
3393 return expand_builtin_mempcpy_args (dest, src, len,
3394 TREE_TYPE (exp),
3395 target, mode, /*endp=*/ 1);
3399 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3400 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3401 so that this can also be called without constructing an actual CALL_EXPR.
3402 TYPE is the return type of the call. The other arguments and return value
3403 are the same as for expand_builtin_mempcpy. */
3405 static rtx
3406 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3407 rtx target, enum machine_mode mode, int endp)
3409 /* If return value is ignored, transform mempcpy into memcpy. */
3410 if (target == const0_rtx)
3412 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3414 if (!fn)
3415 return NULL_RTX;
3417 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3418 target, mode, EXPAND_NORMAL);
3420 else
3422 const char *src_str;
3423 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3424 unsigned int dest_align
3425 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3426 rtx dest_mem, src_mem, len_rtx;
3427 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3429 if (result)
3431 while (TREE_CODE (result) == COMPOUND_EXPR)
3433 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3434 EXPAND_NORMAL);
3435 result = TREE_OPERAND (result, 1);
3437 return expand_expr (result, target, mode, EXPAND_NORMAL);
3440 /* If either SRC or DEST is not a pointer type, don't do this
3441 operation in-line. */
3442 if (dest_align == 0 || src_align == 0)
3443 return NULL_RTX;
3445 /* If LEN is not constant, call the normal function. */
3446 if (! host_integerp (len, 1))
3447 return NULL_RTX;
3449 len_rtx = expand_normal (len);
3450 src_str = c_getstr (src);
3452 /* If SRC is a string constant and block move would be done
3453 by pieces, we can avoid loading the string from memory
3454 and only stored the computed constants. */
3455 if (src_str
3456 && GET_CODE (len_rtx) == CONST_INT
3457 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3458 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3459 CONST_CAST (char *, src_str),
3460 dest_align, false))
3462 dest_mem = get_memory_rtx (dest, len);
3463 set_mem_align (dest_mem, dest_align);
3464 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3465 builtin_memcpy_read_str,
3466 CONST_CAST (char *, src_str),
3467 dest_align, false, endp);
3468 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3469 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3470 return dest_mem;
3473 if (GET_CODE (len_rtx) == CONST_INT
3474 && can_move_by_pieces (INTVAL (len_rtx),
3475 MIN (dest_align, src_align)))
3477 dest_mem = get_memory_rtx (dest, len);
3478 set_mem_align (dest_mem, dest_align);
3479 src_mem = get_memory_rtx (src, len);
3480 set_mem_align (src_mem, src_align);
3481 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3482 MIN (dest_align, src_align), endp);
3483 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3484 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3485 return dest_mem;
3488 return NULL_RTX;
3492 /* Expand expression EXP, which is a call to the memmove builtin. Return
3493 NULL_RTX if we failed; the caller should emit a normal call. */
3495 static rtx
3496 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3498 if (!validate_arglist (exp,
3499 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3500 return NULL_RTX;
3501 else
3503 tree dest = CALL_EXPR_ARG (exp, 0);
3504 tree src = CALL_EXPR_ARG (exp, 1);
3505 tree len = CALL_EXPR_ARG (exp, 2);
3506 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3507 target, mode, ignore);
3511 /* Helper function to do the actual work for expand_builtin_memmove. The
3512 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3513 so that this can also be called without constructing an actual CALL_EXPR.
3514 TYPE is the return type of the call. The other arguments and return value
3515 are the same as for expand_builtin_memmove. */
3517 static rtx
3518 expand_builtin_memmove_args (tree dest, tree src, tree len,
3519 tree type, rtx target, enum machine_mode mode,
3520 int ignore)
3522 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3524 if (result)
3526 STRIP_TYPE_NOPS (result);
3527 while (TREE_CODE (result) == COMPOUND_EXPR)
3529 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3530 EXPAND_NORMAL);
3531 result = TREE_OPERAND (result, 1);
3533 return expand_expr (result, target, mode, EXPAND_NORMAL);
3536 /* Otherwise, call the normal function. */
3537 return NULL_RTX;
3540 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3541 NULL_RTX if we failed the caller should emit a normal call. */
3543 static rtx
3544 expand_builtin_bcopy (tree exp, int ignore)
3546 tree type = TREE_TYPE (exp);
3547 tree src, dest, size;
3549 if (!validate_arglist (exp,
3550 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3551 return NULL_RTX;
3553 src = CALL_EXPR_ARG (exp, 0);
3554 dest = CALL_EXPR_ARG (exp, 1);
3555 size = CALL_EXPR_ARG (exp, 2);
3557 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3558 This is done this way so that if it isn't expanded inline, we fall
3559 back to calling bcopy instead of memmove. */
3560 return expand_builtin_memmove_args (dest, src,
3561 fold_convert (sizetype, size),
3562 type, const0_rtx, VOIDmode,
3563 ignore);
3566 #ifndef HAVE_movstr
3567 # define HAVE_movstr 0
3568 # define CODE_FOR_movstr CODE_FOR_nothing
3569 #endif
3571 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3572 we failed, the caller should emit a normal call, otherwise try to
3573 get the result in TARGET, if convenient. If ENDP is 0 return the
3574 destination pointer, if ENDP is 1 return the end pointer ala
3575 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3576 stpcpy. */
3578 static rtx
3579 expand_movstr (tree dest, tree src, rtx target, int endp)
3581 rtx end;
3582 rtx dest_mem;
3583 rtx src_mem;
3584 rtx insn;
3585 const struct insn_data * data;
3587 if (!HAVE_movstr)
3588 return NULL_RTX;
3590 dest_mem = get_memory_rtx (dest, NULL);
3591 src_mem = get_memory_rtx (src, NULL);
3592 if (!endp)
3594 target = force_reg (Pmode, XEXP (dest_mem, 0));
3595 dest_mem = replace_equiv_address (dest_mem, target);
3596 end = gen_reg_rtx (Pmode);
3598 else
3600 if (target == 0 || target == const0_rtx)
3602 end = gen_reg_rtx (Pmode);
3603 if (target == 0)
3604 target = end;
3606 else
3607 end = target;
3610 data = insn_data + CODE_FOR_movstr;
3612 if (data->operand[0].mode != VOIDmode)
3613 end = gen_lowpart (data->operand[0].mode, end);
3615 insn = data->genfun (end, dest_mem, src_mem);
3617 gcc_assert (insn);
3619 emit_insn (insn);
3621 /* movstr is supposed to set end to the address of the NUL
3622 terminator. If the caller requested a mempcpy-like return value,
3623 adjust it. */
3624 if (endp == 1 && target != const0_rtx)
3626 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3627 emit_move_insn (target, force_operand (tem, NULL_RTX));
3630 return target;
3633 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3634 NULL_RTX if we failed the caller should emit a normal call, otherwise
3635 try to get the result in TARGET, if convenient (and in mode MODE if that's
3636 convenient). */
3638 static rtx
3639 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3641 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3643 tree dest = CALL_EXPR_ARG (exp, 0);
3644 tree src = CALL_EXPR_ARG (exp, 1);
3645 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3647 return NULL_RTX;
3650 /* Helper function to do the actual work for expand_builtin_strcpy. The
3651 arguments to the builtin_strcpy call DEST and SRC are broken out
3652 so that this can also be called without constructing an actual CALL_EXPR.
3653 The other arguments and return value are the same as for
3654 expand_builtin_strcpy. */
3656 static rtx
3657 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3658 rtx target, enum machine_mode mode)
3660 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3661 if (result)
3662 return expand_expr (result, target, mode, EXPAND_NORMAL);
3663 return expand_movstr (dest, src, target, /*endp=*/0);
3667 /* Expand a call EXP to the stpcpy builtin.
3668 Return NULL_RTX if we failed the caller should emit a normal call,
3669 otherwise try to get the result in TARGET, if convenient (and in
3670 mode MODE if that's convenient). */
3672 static rtx
3673 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3675 tree dst, src;
3677 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3678 return NULL_RTX;
3680 dst = CALL_EXPR_ARG (exp, 0);
3681 src = CALL_EXPR_ARG (exp, 1);
3683 /* If return value is ignored, transform stpcpy into strcpy. */
3684 if (target == const0_rtx)
3686 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3687 if (!fn)
3688 return NULL_RTX;
3690 return expand_expr (build_call_expr (fn, 2, dst, src),
3691 target, mode, EXPAND_NORMAL);
3693 else
3695 tree len, lenp1;
3696 rtx ret;
3698 /* Ensure we get an actual string whose length can be evaluated at
3699 compile-time, not an expression containing a string. This is
3700 because the latter will potentially produce pessimized code
3701 when used to produce the return value. */
3702 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3703 return expand_movstr (dst, src, target, /*endp=*/2);
3705 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3706 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3707 target, mode, /*endp=*/2);
3709 if (ret)
3710 return ret;
3712 if (TREE_CODE (len) == INTEGER_CST)
3714 rtx len_rtx = expand_normal (len);
3716 if (GET_CODE (len_rtx) == CONST_INT)
3718 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3719 dst, src, target, mode);
3721 if (ret)
3723 if (! target)
3725 if (mode != VOIDmode)
3726 target = gen_reg_rtx (mode);
3727 else
3728 target = gen_reg_rtx (GET_MODE (ret));
3730 if (GET_MODE (target) != GET_MODE (ret))
3731 ret = gen_lowpart (GET_MODE (target), ret);
3733 ret = plus_constant (ret, INTVAL (len_rtx));
3734 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3735 gcc_assert (ret);
3737 return target;
3742 return expand_movstr (dst, src, target, /*endp=*/2);
3746 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3747 bytes from constant string DATA + OFFSET and return it as target
3748 constant. */
3751 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3752 enum machine_mode mode)
3754 const char *str = (const char *) data;
3756 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3757 return const0_rtx;
3759 return c_readstr (str + offset, mode);
3762 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3763 NULL_RTX if we failed the caller should emit a normal call. */
3765 static rtx
3766 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3768 tree fndecl = get_callee_fndecl (exp);
3770 if (validate_arglist (exp,
3771 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3773 tree dest = CALL_EXPR_ARG (exp, 0);
3774 tree src = CALL_EXPR_ARG (exp, 1);
3775 tree len = CALL_EXPR_ARG (exp, 2);
3776 tree slen = c_strlen (src, 1);
3777 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3779 if (result)
3781 while (TREE_CODE (result) == COMPOUND_EXPR)
3783 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3784 EXPAND_NORMAL);
3785 result = TREE_OPERAND (result, 1);
3787 return expand_expr (result, target, mode, EXPAND_NORMAL);
3790 /* We must be passed a constant len and src parameter. */
3791 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3792 return NULL_RTX;
3794 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3796 /* We're required to pad with trailing zeros if the requested
3797 len is greater than strlen(s2)+1. In that case try to
3798 use store_by_pieces, if it fails, punt. */
3799 if (tree_int_cst_lt (slen, len))
3801 unsigned int dest_align
3802 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3803 const char *p = c_getstr (src);
3804 rtx dest_mem;
3806 if (!p || dest_align == 0 || !host_integerp (len, 1)
3807 || !can_store_by_pieces (tree_low_cst (len, 1),
3808 builtin_strncpy_read_str,
3809 CONST_CAST (char *, p),
3810 dest_align, false))
3811 return NULL_RTX;
3813 dest_mem = get_memory_rtx (dest, len);
3814 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3815 builtin_strncpy_read_str,
3816 CONST_CAST (char *, p), dest_align, false, 0);
3817 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3818 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3819 return dest_mem;
3822 return NULL_RTX;
3825 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3826 bytes from constant string DATA + OFFSET and return it as target
3827 constant. */
3830 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3831 enum machine_mode mode)
3833 const char *c = (const char *) data;
3834 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3836 memset (p, *c, GET_MODE_SIZE (mode));
3838 return c_readstr (p, mode);
3841 /* Callback routine for store_by_pieces. Return the RTL of a register
3842 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3843 char value given in the RTL register data. For example, if mode is
3844 4 bytes wide, return the RTL for 0x01010101*data. */
3846 static rtx
3847 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3848 enum machine_mode mode)
3850 rtx target, coeff;
3851 size_t size;
3852 char *p;
3854 size = GET_MODE_SIZE (mode);
3855 if (size == 1)
3856 return (rtx) data;
3858 p = XALLOCAVEC (char, size);
3859 memset (p, 1, size);
3860 coeff = c_readstr (p, mode);
3862 target = convert_to_mode (mode, (rtx) data, 1);
3863 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3864 return force_reg (mode, target);
3867 /* Expand expression EXP, which is a call to the memset builtin. Return
3868 NULL_RTX if we failed the caller should emit a normal call, otherwise
3869 try to get the result in TARGET, if convenient (and in mode MODE if that's
3870 convenient). */
3872 static rtx
3873 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3875 if (!validate_arglist (exp,
3876 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3877 return NULL_RTX;
3878 else
3880 tree dest = CALL_EXPR_ARG (exp, 0);
3881 tree val = CALL_EXPR_ARG (exp, 1);
3882 tree len = CALL_EXPR_ARG (exp, 2);
3883 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3887 /* Helper function to do the actual work for expand_builtin_memset. The
3888 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3889 so that this can also be called without constructing an actual CALL_EXPR.
3890 The other arguments and return value are the same as for
3891 expand_builtin_memset. */
3893 static rtx
3894 expand_builtin_memset_args (tree dest, tree val, tree len,
3895 rtx target, enum machine_mode mode, tree orig_exp)
3897 tree fndecl, fn;
3898 enum built_in_function fcode;
3899 char c;
3900 unsigned int dest_align;
3901 rtx dest_mem, dest_addr, len_rtx;
3902 HOST_WIDE_INT expected_size = -1;
3903 unsigned int expected_align = 0;
3904 tree_ann_common_t ann;
3906 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3908 /* If DEST is not a pointer type, don't do this operation in-line. */
3909 if (dest_align == 0)
3910 return NULL_RTX;
3912 ann = tree_common_ann (orig_exp);
3913 if (ann)
3914 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3916 if (expected_align < dest_align)
3917 expected_align = dest_align;
3919 /* If the LEN parameter is zero, return DEST. */
3920 if (integer_zerop (len))
3922 /* Evaluate and ignore VAL in case it has side-effects. */
3923 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3924 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3927 /* Stabilize the arguments in case we fail. */
3928 dest = builtin_save_expr (dest);
3929 val = builtin_save_expr (val);
3930 len = builtin_save_expr (len);
3932 len_rtx = expand_normal (len);
3933 dest_mem = get_memory_rtx (dest, len);
3935 if (TREE_CODE (val) != INTEGER_CST)
3937 rtx val_rtx;
3939 val_rtx = expand_normal (val);
3940 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3941 val_rtx, 0);
3943 /* Assume that we can memset by pieces if we can store
3944 * the coefficients by pieces (in the required modes).
3945 * We can't pass builtin_memset_gen_str as that emits RTL. */
3946 c = 1;
3947 if (host_integerp (len, 1)
3948 && can_store_by_pieces (tree_low_cst (len, 1),
3949 builtin_memset_read_str, &c, dest_align,
3950 true))
3952 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3953 val_rtx);
3954 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3955 builtin_memset_gen_str, val_rtx, dest_align,
3956 true, 0);
3958 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3959 dest_align, expected_align,
3960 expected_size))
3961 goto do_libcall;
3963 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3964 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3965 return dest_mem;
3968 if (target_char_cast (val, &c))
3969 goto do_libcall;
3971 if (c)
3973 if (host_integerp (len, 1)
3974 && can_store_by_pieces (tree_low_cst (len, 1),
3975 builtin_memset_read_str, &c, dest_align,
3976 true))
3977 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3978 builtin_memset_read_str, &c, dest_align, true, 0);
3979 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3980 dest_align, expected_align,
3981 expected_size))
3982 goto do_libcall;
3984 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3985 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3986 return dest_mem;
3989 set_mem_align (dest_mem, dest_align);
3990 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3991 CALL_EXPR_TAILCALL (orig_exp)
3992 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3993 expected_align, expected_size);
3995 if (dest_addr == 0)
3997 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3998 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4001 return dest_addr;
4003 do_libcall:
4004 fndecl = get_callee_fndecl (orig_exp);
4005 fcode = DECL_FUNCTION_CODE (fndecl);
4006 if (fcode == BUILT_IN_MEMSET)
4007 fn = build_call_expr (fndecl, 3, dest, val, len);
4008 else if (fcode == BUILT_IN_BZERO)
4009 fn = build_call_expr (fndecl, 2, dest, len);
4010 else
4011 gcc_unreachable ();
4012 if (TREE_CODE (fn) == CALL_EXPR)
4013 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4014 return expand_call (fn, target, target == const0_rtx);
4017 /* Expand expression EXP, which is a call to the bzero builtin. Return
4018 NULL_RTX if we failed the caller should emit a normal call. */
4020 static rtx
4021 expand_builtin_bzero (tree exp)
4023 tree dest, size;
4025 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4026 return NULL_RTX;
4028 dest = CALL_EXPR_ARG (exp, 0);
4029 size = CALL_EXPR_ARG (exp, 1);
4031 /* New argument list transforming bzero(ptr x, int y) to
4032 memset(ptr x, int 0, size_t y). This is done this way
4033 so that if it isn't expanded inline, we fallback to
4034 calling bzero instead of memset. */
4036 return expand_builtin_memset_args (dest, integer_zero_node,
4037 fold_convert (sizetype, size),
4038 const0_rtx, VOIDmode, exp);
4041 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4042 caller should emit a normal call, otherwise try to get the result
4043 in TARGET, if convenient (and in mode MODE if that's convenient). */
4045 static rtx
4046 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4048 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4049 INTEGER_TYPE, VOID_TYPE))
4051 tree type = TREE_TYPE (exp);
4052 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4053 CALL_EXPR_ARG (exp, 1),
4054 CALL_EXPR_ARG (exp, 2), type);
4055 if (result)
4056 return expand_expr (result, target, mode, EXPAND_NORMAL);
4058 return NULL_RTX;
4061 /* Expand expression EXP, which is a call to the memcmp built-in function.
4062 Return NULL_RTX if we failed and the
4063 caller should emit a normal call, otherwise try to get the result in
4064 TARGET, if convenient (and in mode MODE, if that's convenient). */
4066 static rtx
4067 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4069 if (!validate_arglist (exp,
4070 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4071 return NULL_RTX;
4072 else
4074 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4075 CALL_EXPR_ARG (exp, 1),
4076 CALL_EXPR_ARG (exp, 2));
4077 if (result)
4078 return expand_expr (result, target, mode, EXPAND_NORMAL);
4081 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4083 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4084 rtx result;
4085 rtx insn;
4086 tree arg1 = CALL_EXPR_ARG (exp, 0);
4087 tree arg2 = CALL_EXPR_ARG (exp, 1);
4088 tree len = CALL_EXPR_ARG (exp, 2);
4090 int arg1_align
4091 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4092 int arg2_align
4093 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4094 enum machine_mode insn_mode;
4096 #ifdef HAVE_cmpmemsi
4097 if (HAVE_cmpmemsi)
4098 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4099 else
4100 #endif
4101 #ifdef HAVE_cmpstrnsi
4102 if (HAVE_cmpstrnsi)
4103 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4104 else
4105 #endif
4106 return NULL_RTX;
4108 /* If we don't have POINTER_TYPE, call the function. */
4109 if (arg1_align == 0 || arg2_align == 0)
4110 return NULL_RTX;
4112 /* Make a place to write the result of the instruction. */
4113 result = target;
4114 if (! (result != 0
4115 && REG_P (result) && GET_MODE (result) == insn_mode
4116 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4117 result = gen_reg_rtx (insn_mode);
4119 arg1_rtx = get_memory_rtx (arg1, len);
4120 arg2_rtx = get_memory_rtx (arg2, len);
4121 arg3_rtx = expand_normal (len);
4123 /* Set MEM_SIZE as appropriate. */
4124 if (GET_CODE (arg3_rtx) == CONST_INT)
4126 set_mem_size (arg1_rtx, arg3_rtx);
4127 set_mem_size (arg2_rtx, arg3_rtx);
4130 #ifdef HAVE_cmpmemsi
4131 if (HAVE_cmpmemsi)
4132 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4133 GEN_INT (MIN (arg1_align, arg2_align)));
4134 else
4135 #endif
4136 #ifdef HAVE_cmpstrnsi
4137 if (HAVE_cmpstrnsi)
4138 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4139 GEN_INT (MIN (arg1_align, arg2_align)));
4140 else
4141 #endif
4142 gcc_unreachable ();
4144 if (insn)
4145 emit_insn (insn);
4146 else
4147 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4148 TYPE_MODE (integer_type_node), 3,
4149 XEXP (arg1_rtx, 0), Pmode,
4150 XEXP (arg2_rtx, 0), Pmode,
4151 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4152 TYPE_UNSIGNED (sizetype)),
4153 TYPE_MODE (sizetype));
4155 /* Return the value in the proper mode for this function. */
4156 mode = TYPE_MODE (TREE_TYPE (exp));
4157 if (GET_MODE (result) == mode)
4158 return result;
4159 else if (target != 0)
4161 convert_move (target, result, 0);
4162 return target;
4164 else
4165 return convert_to_mode (mode, result, 0);
4167 #endif
4169 return NULL_RTX;
4172 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4173 if we failed the caller should emit a normal call, otherwise try to get
4174 the result in TARGET, if convenient. */
4176 static rtx
4177 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4179 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4180 return NULL_RTX;
4181 else
4183 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4184 CALL_EXPR_ARG (exp, 1));
4185 if (result)
4186 return expand_expr (result, target, mode, EXPAND_NORMAL);
4189 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4190 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4191 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4193 rtx arg1_rtx, arg2_rtx;
4194 rtx result, insn = NULL_RTX;
4195 tree fndecl, fn;
4196 tree arg1 = CALL_EXPR_ARG (exp, 0);
4197 tree arg2 = CALL_EXPR_ARG (exp, 1);
4199 int arg1_align
4200 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4201 int arg2_align
4202 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4204 /* If we don't have POINTER_TYPE, call the function. */
4205 if (arg1_align == 0 || arg2_align == 0)
4206 return NULL_RTX;
4208 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4209 arg1 = builtin_save_expr (arg1);
4210 arg2 = builtin_save_expr (arg2);
4212 arg1_rtx = get_memory_rtx (arg1, NULL);
4213 arg2_rtx = get_memory_rtx (arg2, NULL);
4215 #ifdef HAVE_cmpstrsi
4216 /* Try to call cmpstrsi. */
4217 if (HAVE_cmpstrsi)
4219 enum machine_mode insn_mode
4220 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4222 /* Make a place to write the result of the instruction. */
4223 result = target;
4224 if (! (result != 0
4225 && REG_P (result) && GET_MODE (result) == insn_mode
4226 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4227 result = gen_reg_rtx (insn_mode);
4229 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4230 GEN_INT (MIN (arg1_align, arg2_align)));
4232 #endif
4233 #ifdef HAVE_cmpstrnsi
4234 /* Try to determine at least one length and call cmpstrnsi. */
4235 if (!insn && HAVE_cmpstrnsi)
4237 tree len;
4238 rtx arg3_rtx;
4240 enum machine_mode insn_mode
4241 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4242 tree len1 = c_strlen (arg1, 1);
4243 tree len2 = c_strlen (arg2, 1);
4245 if (len1)
4246 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4247 if (len2)
4248 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4250 /* If we don't have a constant length for the first, use the length
4251 of the second, if we know it. We don't require a constant for
4252 this case; some cost analysis could be done if both are available
4253 but neither is constant. For now, assume they're equally cheap,
4254 unless one has side effects. If both strings have constant lengths,
4255 use the smaller. */
4257 if (!len1)
4258 len = len2;
4259 else if (!len2)
4260 len = len1;
4261 else if (TREE_SIDE_EFFECTS (len1))
4262 len = len2;
4263 else if (TREE_SIDE_EFFECTS (len2))
4264 len = len1;
4265 else if (TREE_CODE (len1) != INTEGER_CST)
4266 len = len2;
4267 else if (TREE_CODE (len2) != INTEGER_CST)
4268 len = len1;
4269 else if (tree_int_cst_lt (len1, len2))
4270 len = len1;
4271 else
4272 len = len2;
4274 /* If both arguments have side effects, we cannot optimize. */
4275 if (!len || TREE_SIDE_EFFECTS (len))
4276 goto do_libcall;
4278 arg3_rtx = expand_normal (len);
4280 /* Make a place to write the result of the instruction. */
4281 result = target;
4282 if (! (result != 0
4283 && REG_P (result) && GET_MODE (result) == insn_mode
4284 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4285 result = gen_reg_rtx (insn_mode);
4287 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4288 GEN_INT (MIN (arg1_align, arg2_align)));
4290 #endif
4292 if (insn)
4294 emit_insn (insn);
4296 /* Return the value in the proper mode for this function. */
4297 mode = TYPE_MODE (TREE_TYPE (exp));
4298 if (GET_MODE (result) == mode)
4299 return result;
4300 if (target == 0)
4301 return convert_to_mode (mode, result, 0);
4302 convert_move (target, result, 0);
4303 return target;
4306 /* Expand the library call ourselves using a stabilized argument
4307 list to avoid re-evaluating the function's arguments twice. */
4308 #ifdef HAVE_cmpstrnsi
4309 do_libcall:
4310 #endif
4311 fndecl = get_callee_fndecl (exp);
4312 fn = build_call_expr (fndecl, 2, arg1, arg2);
4313 if (TREE_CODE (fn) == CALL_EXPR)
4314 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4315 return expand_call (fn, target, target == const0_rtx);
4317 #endif
4318 return NULL_RTX;
4321 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4322 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4323 the result in TARGET, if convenient. */
4325 static rtx
4326 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4328 if (!validate_arglist (exp,
4329 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4330 return NULL_RTX;
4331 else
4333 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4334 CALL_EXPR_ARG (exp, 1),
4335 CALL_EXPR_ARG (exp, 2));
4336 if (result)
4337 return expand_expr (result, target, mode, EXPAND_NORMAL);
4340 /* If c_strlen can determine an expression for one of the string
4341 lengths, and it doesn't have side effects, then emit cmpstrnsi
4342 using length MIN(strlen(string)+1, arg3). */
4343 #ifdef HAVE_cmpstrnsi
4344 if (HAVE_cmpstrnsi)
4346 tree len, len1, len2;
4347 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4348 rtx result, insn;
4349 tree fndecl, fn;
4350 tree arg1 = CALL_EXPR_ARG (exp, 0);
4351 tree arg2 = CALL_EXPR_ARG (exp, 1);
4352 tree arg3 = CALL_EXPR_ARG (exp, 2);
4354 int arg1_align
4355 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4356 int arg2_align
4357 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4358 enum machine_mode insn_mode
4359 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4361 len1 = c_strlen (arg1, 1);
4362 len2 = c_strlen (arg2, 1);
4364 if (len1)
4365 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4366 if (len2)
4367 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4369 /* If we don't have a constant length for the first, use the length
4370 of the second, if we know it. We don't require a constant for
4371 this case; some cost analysis could be done if both are available
4372 but neither is constant. For now, assume they're equally cheap,
4373 unless one has side effects. If both strings have constant lengths,
4374 use the smaller. */
4376 if (!len1)
4377 len = len2;
4378 else if (!len2)
4379 len = len1;
4380 else if (TREE_SIDE_EFFECTS (len1))
4381 len = len2;
4382 else if (TREE_SIDE_EFFECTS (len2))
4383 len = len1;
4384 else if (TREE_CODE (len1) != INTEGER_CST)
4385 len = len2;
4386 else if (TREE_CODE (len2) != INTEGER_CST)
4387 len = len1;
4388 else if (tree_int_cst_lt (len1, len2))
4389 len = len1;
4390 else
4391 len = len2;
4393 /* If both arguments have side effects, we cannot optimize. */
4394 if (!len || TREE_SIDE_EFFECTS (len))
4395 return NULL_RTX;
4397 /* The actual new length parameter is MIN(len,arg3). */
4398 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4399 fold_convert (TREE_TYPE (len), arg3));
4401 /* If we don't have POINTER_TYPE, call the function. */
4402 if (arg1_align == 0 || arg2_align == 0)
4403 return NULL_RTX;
4405 /* Make a place to write the result of the instruction. */
4406 result = target;
4407 if (! (result != 0
4408 && REG_P (result) && GET_MODE (result) == insn_mode
4409 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4410 result = gen_reg_rtx (insn_mode);
4412 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4413 arg1 = builtin_save_expr (arg1);
4414 arg2 = builtin_save_expr (arg2);
4415 len = builtin_save_expr (len);
4417 arg1_rtx = get_memory_rtx (arg1, len);
4418 arg2_rtx = get_memory_rtx (arg2, len);
4419 arg3_rtx = expand_normal (len);
4420 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4421 GEN_INT (MIN (arg1_align, arg2_align)));
4422 if (insn)
4424 emit_insn (insn);
4426 /* Return the value in the proper mode for this function. */
4427 mode = TYPE_MODE (TREE_TYPE (exp));
4428 if (GET_MODE (result) == mode)
4429 return result;
4430 if (target == 0)
4431 return convert_to_mode (mode, result, 0);
4432 convert_move (target, result, 0);
4433 return target;
4436 /* Expand the library call ourselves using a stabilized argument
4437 list to avoid re-evaluating the function's arguments twice. */
4438 fndecl = get_callee_fndecl (exp);
4439 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4440 if (TREE_CODE (fn) == CALL_EXPR)
4441 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4442 return expand_call (fn, target, target == const0_rtx);
4444 #endif
4445 return NULL_RTX;
4448 /* Expand expression EXP, which is a call to the strcat builtin.
4449 Return NULL_RTX if we failed the caller should emit a normal call,
4450 otherwise try to get the result in TARGET, if convenient. */
4452 static rtx
4453 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4455 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4456 return NULL_RTX;
4457 else
4459 tree dst = CALL_EXPR_ARG (exp, 0);
4460 tree src = CALL_EXPR_ARG (exp, 1);
4461 const char *p = c_getstr (src);
4463 /* If the string length is zero, return the dst parameter. */
4464 if (p && *p == '\0')
4465 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4467 if (optimize_insn_for_speed_p ())
4469 /* See if we can store by pieces into (dst + strlen(dst)). */
4470 tree newsrc, newdst,
4471 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4472 rtx insns;
4474 /* Stabilize the argument list. */
4475 newsrc = builtin_save_expr (src);
4476 dst = builtin_save_expr (dst);
4478 start_sequence ();
4480 /* Create strlen (dst). */
4481 newdst = build_call_expr (strlen_fn, 1, dst);
4482 /* Create (dst p+ strlen (dst)). */
4484 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4485 newdst = builtin_save_expr (newdst);
4487 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4489 end_sequence (); /* Stop sequence. */
4490 return NULL_RTX;
4493 /* Output the entire sequence. */
4494 insns = get_insns ();
4495 end_sequence ();
4496 emit_insn (insns);
4498 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4501 return NULL_RTX;
4505 /* Expand expression EXP, which is a call to the strncat builtin.
4506 Return NULL_RTX if we failed the caller should emit a normal call,
4507 otherwise try to get the result in TARGET, if convenient. */
4509 static rtx
4510 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4512 if (validate_arglist (exp,
4513 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4515 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4516 CALL_EXPR_ARG (exp, 1),
4517 CALL_EXPR_ARG (exp, 2));
4518 if (result)
4519 return expand_expr (result, target, mode, EXPAND_NORMAL);
4521 return NULL_RTX;
4524 /* Expand expression EXP, which is a call to the strspn builtin.
4525 Return NULL_RTX if we failed the caller should emit a normal call,
4526 otherwise try to get the result in TARGET, if convenient. */
4528 static rtx
4529 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4531 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4533 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4534 CALL_EXPR_ARG (exp, 1));
4535 if (result)
4536 return expand_expr (result, target, mode, EXPAND_NORMAL);
4538 return NULL_RTX;
4541 /* Expand expression EXP, which is a call to the strcspn builtin.
4542 Return NULL_RTX if we failed the caller should emit a normal call,
4543 otherwise try to get the result in TARGET, if convenient. */
4545 static rtx
4546 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4548 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4550 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4551 CALL_EXPR_ARG (exp, 1));
4552 if (result)
4553 return expand_expr (result, target, mode, EXPAND_NORMAL);
4555 return NULL_RTX;
4558 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4559 if that's convenient. */
4562 expand_builtin_saveregs (void)
4564 rtx val, seq;
4566 /* Don't do __builtin_saveregs more than once in a function.
4567 Save the result of the first call and reuse it. */
4568 if (saveregs_value != 0)
4569 return saveregs_value;
4571 /* When this function is called, it means that registers must be
4572 saved on entry to this function. So we migrate the call to the
4573 first insn of this function. */
4575 start_sequence ();
4577 /* Do whatever the machine needs done in this case. */
4578 val = targetm.calls.expand_builtin_saveregs ();
4580 seq = get_insns ();
4581 end_sequence ();
4583 saveregs_value = val;
4585 /* Put the insns after the NOTE that starts the function. If this
4586 is inside a start_sequence, make the outer-level insn chain current, so
4587 the code is placed at the start of the function. */
4588 push_topmost_sequence ();
4589 emit_insn_after (seq, entry_of_function ());
4590 pop_topmost_sequence ();
4592 return val;
4595 /* __builtin_args_info (N) returns word N of the arg space info
4596 for the current function. The number and meanings of words
4597 is controlled by the definition of CUMULATIVE_ARGS. */
4599 static rtx
4600 expand_builtin_args_info (tree exp)
4602 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4603 int *word_ptr = (int *) &crtl->args.info;
4605 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4607 if (call_expr_nargs (exp) != 0)
4609 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4610 error ("argument of %<__builtin_args_info%> must be constant");
4611 else
4613 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4615 if (wordnum < 0 || wordnum >= nwords)
4616 error ("argument of %<__builtin_args_info%> out of range");
4617 else
4618 return GEN_INT (word_ptr[wordnum]);
4621 else
4622 error ("missing argument in %<__builtin_args_info%>");
4624 return const0_rtx;
4627 /* Expand a call to __builtin_next_arg. */
4629 static rtx
4630 expand_builtin_next_arg (void)
4632 /* Checking arguments is already done in fold_builtin_next_arg
4633 that must be called before this function. */
4634 return expand_binop (ptr_mode, add_optab,
4635 crtl->args.internal_arg_pointer,
4636 crtl->args.arg_offset_rtx,
4637 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4640 /* Make it easier for the backends by protecting the valist argument
4641 from multiple evaluations. */
4643 static tree
4644 stabilize_va_list (tree valist, int needs_lvalue)
4646 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4648 gcc_assert (vatype != NULL_TREE);
4650 if (TREE_CODE (vatype) == ARRAY_TYPE)
4652 if (TREE_SIDE_EFFECTS (valist))
4653 valist = save_expr (valist);
4655 /* For this case, the backends will be expecting a pointer to
4656 vatype, but it's possible we've actually been given an array
4657 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4658 So fix it. */
4659 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4661 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4662 valist = build_fold_addr_expr_with_type (valist, p1);
4665 else
4667 tree pt;
4669 if (! needs_lvalue)
4671 if (! TREE_SIDE_EFFECTS (valist))
4672 return valist;
4674 pt = build_pointer_type (vatype);
4675 valist = fold_build1 (ADDR_EXPR, pt, valist);
4676 TREE_SIDE_EFFECTS (valist) = 1;
4679 if (TREE_SIDE_EFFECTS (valist))
4680 valist = save_expr (valist);
4681 valist = build_fold_indirect_ref (valist);
4684 return valist;
4687 /* The "standard" definition of va_list is void*. */
4689 tree
4690 std_build_builtin_va_list (void)
4692 return ptr_type_node;
4695 /* The "standard" abi va_list is va_list_type_node. */
4697 tree
4698 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4700 return va_list_type_node;
4703 /* The "standard" type of va_list is va_list_type_node. */
4705 tree
4706 std_canonical_va_list_type (tree type)
4708 tree wtype, htype;
4710 if (INDIRECT_REF_P (type))
4711 type = TREE_TYPE (type);
4712 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4713 type = TREE_TYPE (type);
4714 wtype = va_list_type_node;
4715 htype = type;
4716 /* Treat structure va_list types. */
4717 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4718 htype = TREE_TYPE (htype);
4719 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4721 /* If va_list is an array type, the argument may have decayed
4722 to a pointer type, e.g. by being passed to another function.
4723 In that case, unwrap both types so that we can compare the
4724 underlying records. */
4725 if (TREE_CODE (htype) == ARRAY_TYPE
4726 || POINTER_TYPE_P (htype))
4728 wtype = TREE_TYPE (wtype);
4729 htype = TREE_TYPE (htype);
4732 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4733 return va_list_type_node;
4735 return NULL_TREE;
4738 /* The "standard" implementation of va_start: just assign `nextarg' to
4739 the variable. */
4741 void
4742 std_expand_builtin_va_start (tree valist, rtx nextarg)
4744 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4745 convert_move (va_r, nextarg, 0);
4748 /* Expand EXP, a call to __builtin_va_start. */
4750 static rtx
4751 expand_builtin_va_start (tree exp)
4753 rtx nextarg;
4754 tree valist;
4756 if (call_expr_nargs (exp) < 2)
4758 error ("too few arguments to function %<va_start%>");
4759 return const0_rtx;
4762 if (fold_builtin_next_arg (exp, true))
4763 return const0_rtx;
4765 nextarg = expand_builtin_next_arg ();
4766 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4768 if (targetm.expand_builtin_va_start)
4769 targetm.expand_builtin_va_start (valist, nextarg);
4770 else
4771 std_expand_builtin_va_start (valist, nextarg);
4773 return const0_rtx;
4776 /* The "standard" implementation of va_arg: read the value from the
4777 current (padded) address and increment by the (padded) size. */
4779 tree
4780 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4781 gimple_seq *post_p)
4783 tree addr, t, type_size, rounded_size, valist_tmp;
4784 unsigned HOST_WIDE_INT align, boundary;
4785 bool indirect;
4787 #ifdef ARGS_GROW_DOWNWARD
4788 /* All of the alignment and movement below is for args-grow-up machines.
4789 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4790 implement their own specialized gimplify_va_arg_expr routines. */
4791 gcc_unreachable ();
4792 #endif
4794 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4795 if (indirect)
4796 type = build_pointer_type (type);
4798 align = PARM_BOUNDARY / BITS_PER_UNIT;
4799 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4801 /* When we align parameter on stack for caller, if the parameter
4802 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4803 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4804 here with caller. */
4805 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4806 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4808 boundary /= BITS_PER_UNIT;
4810 /* Hoist the valist value into a temporary for the moment. */
4811 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4813 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4814 requires greater alignment, we must perform dynamic alignment. */
4815 if (boundary > align
4816 && !integer_zerop (TYPE_SIZE (type)))
4818 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4819 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4820 valist_tmp, size_int (boundary - 1)));
4821 gimplify_and_add (t, pre_p);
4823 t = fold_convert (sizetype, valist_tmp);
4824 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4825 fold_convert (TREE_TYPE (valist),
4826 fold_build2 (BIT_AND_EXPR, sizetype, t,
4827 size_int (-boundary))));
4828 gimplify_and_add (t, pre_p);
4830 else
4831 boundary = align;
4833 /* If the actual alignment is less than the alignment of the type,
4834 adjust the type accordingly so that we don't assume strict alignment
4835 when dereferencing the pointer. */
4836 boundary *= BITS_PER_UNIT;
4837 if (boundary < TYPE_ALIGN (type))
4839 type = build_variant_type_copy (type);
4840 TYPE_ALIGN (type) = boundary;
4843 /* Compute the rounded size of the type. */
4844 type_size = size_in_bytes (type);
4845 rounded_size = round_up (type_size, align);
4847 /* Reduce rounded_size so it's sharable with the postqueue. */
4848 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4850 /* Get AP. */
4851 addr = valist_tmp;
4852 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4854 /* Small args are padded downward. */
4855 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4856 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4857 size_binop (MINUS_EXPR, rounded_size, type_size));
4858 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4861 /* Compute new value for AP. */
4862 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4863 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4864 gimplify_and_add (t, pre_p);
4866 addr = fold_convert (build_pointer_type (type), addr);
4868 if (indirect)
4869 addr = build_va_arg_indirect_ref (addr);
4871 return build_va_arg_indirect_ref (addr);
4874 /* Build an indirect-ref expression over the given TREE, which represents a
4875 piece of a va_arg() expansion. */
4876 tree
4877 build_va_arg_indirect_ref (tree addr)
4879 addr = build_fold_indirect_ref (addr);
4881 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4882 mf_mark (addr);
4884 return addr;
4887 /* Return a dummy expression of type TYPE in order to keep going after an
4888 error. */
4890 static tree
4891 dummy_object (tree type)
4893 tree t = build_int_cst (build_pointer_type (type), 0);
4894 return build1 (INDIRECT_REF, type, t);
4897 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4898 builtin function, but a very special sort of operator. */
4900 enum gimplify_status
4901 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4903 tree promoted_type, have_va_type;
4904 tree valist = TREE_OPERAND (*expr_p, 0);
4905 tree type = TREE_TYPE (*expr_p);
4906 tree t;
4908 /* Verify that valist is of the proper type. */
4909 have_va_type = TREE_TYPE (valist);
4910 if (have_va_type == error_mark_node)
4911 return GS_ERROR;
4912 have_va_type = targetm.canonical_va_list_type (have_va_type);
4914 if (have_va_type == NULL_TREE)
4916 error ("first argument to %<va_arg%> not of type %<va_list%>");
4917 return GS_ERROR;
4920 /* Generate a diagnostic for requesting data of a type that cannot
4921 be passed through `...' due to type promotion at the call site. */
4922 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4923 != type)
4925 static bool gave_help;
4926 bool warned;
4928 /* Unfortunately, this is merely undefined, rather than a constraint
4929 violation, so we cannot make this an error. If this call is never
4930 executed, the program is still strictly conforming. */
4931 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4932 type, promoted_type);
4933 if (!gave_help && warned)
4935 gave_help = true;
4936 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4937 promoted_type, type);
4940 /* We can, however, treat "undefined" any way we please.
4941 Call abort to encourage the user to fix the program. */
4942 if (warned)
4943 inform (input_location, "if this code is reached, the program will abort");
4944 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4945 gimplify_and_add (t, pre_p);
4947 /* This is dead code, but go ahead and finish so that the
4948 mode of the result comes out right. */
4949 *expr_p = dummy_object (type);
4950 return GS_ALL_DONE;
4952 else
4954 /* Make it easier for the backends by protecting the valist argument
4955 from multiple evaluations. */
4956 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4958 /* For this case, the backends will be expecting a pointer to
4959 TREE_TYPE (abi), but it's possible we've
4960 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4961 So fix it. */
4962 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4964 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4965 valist = build_fold_addr_expr_with_type (valist, p1);
4968 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4970 else
4971 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4973 if (!targetm.gimplify_va_arg_expr)
4974 /* FIXME: Once most targets are converted we should merely
4975 assert this is non-null. */
4976 return GS_ALL_DONE;
4978 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4979 return GS_OK;
4983 /* Expand EXP, a call to __builtin_va_end. */
4985 static rtx
4986 expand_builtin_va_end (tree exp)
4988 tree valist = CALL_EXPR_ARG (exp, 0);
4990 /* Evaluate for side effects, if needed. I hate macros that don't
4991 do that. */
4992 if (TREE_SIDE_EFFECTS (valist))
4993 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4995 return const0_rtx;
4998 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4999 builtin rather than just as an assignment in stdarg.h because of the
5000 nastiness of array-type va_list types. */
5002 static rtx
5003 expand_builtin_va_copy (tree exp)
5005 tree dst, src, t;
5007 dst = CALL_EXPR_ARG (exp, 0);
5008 src = CALL_EXPR_ARG (exp, 1);
5010 dst = stabilize_va_list (dst, 1);
5011 src = stabilize_va_list (src, 0);
5013 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5015 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5017 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5018 TREE_SIDE_EFFECTS (t) = 1;
5019 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5021 else
5023 rtx dstb, srcb, size;
5025 /* Evaluate to pointers. */
5026 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5027 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5028 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5029 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5031 dstb = convert_memory_address (Pmode, dstb);
5032 srcb = convert_memory_address (Pmode, srcb);
5034 /* "Dereference" to BLKmode memories. */
5035 dstb = gen_rtx_MEM (BLKmode, dstb);
5036 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5037 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5038 srcb = gen_rtx_MEM (BLKmode, srcb);
5039 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5040 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5042 /* Copy. */
5043 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5046 return const0_rtx;
5049 /* Expand a call to one of the builtin functions __builtin_frame_address or
5050 __builtin_return_address. */
5052 static rtx
5053 expand_builtin_frame_address (tree fndecl, tree exp)
5055 /* The argument must be a nonnegative integer constant.
5056 It counts the number of frames to scan up the stack.
5057 The value is the return address saved in that frame. */
5058 if (call_expr_nargs (exp) == 0)
5059 /* Warning about missing arg was already issued. */
5060 return const0_rtx;
5061 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5063 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5064 error ("invalid argument to %<__builtin_frame_address%>");
5065 else
5066 error ("invalid argument to %<__builtin_return_address%>");
5067 return const0_rtx;
5069 else
5071 rtx tem
5072 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5073 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5075 /* Some ports cannot access arbitrary stack frames. */
5076 if (tem == NULL)
5078 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5079 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5080 else
5081 warning (0, "unsupported argument to %<__builtin_return_address%>");
5082 return const0_rtx;
5085 /* For __builtin_frame_address, return what we've got. */
5086 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5087 return tem;
5089 if (!REG_P (tem)
5090 && ! CONSTANT_P (tem))
5091 tem = copy_to_mode_reg (Pmode, tem);
5092 return tem;
5096 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5097 we failed and the caller should emit a normal call, otherwise try to get
5098 the result in TARGET, if convenient. */
5100 static rtx
5101 expand_builtin_alloca (tree exp, rtx target)
5103 rtx op0;
5104 rtx result;
5106 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5107 should always expand to function calls. These can be intercepted
5108 in libmudflap. */
5109 if (flag_mudflap)
5110 return NULL_RTX;
5112 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5113 return NULL_RTX;
5115 /* Compute the argument. */
5116 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5118 /* Allocate the desired space. */
5119 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5120 result = convert_memory_address (ptr_mode, result);
5122 return result;
5125 /* Expand a call to a bswap builtin with argument ARG0. MODE
5126 is the mode to expand with. */
5128 static rtx
5129 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5131 enum machine_mode mode;
5132 tree arg;
5133 rtx op0;
5135 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5136 return NULL_RTX;
5138 arg = CALL_EXPR_ARG (exp, 0);
5139 mode = TYPE_MODE (TREE_TYPE (arg));
5140 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5142 target = expand_unop (mode, bswap_optab, op0, target, 1);
5144 gcc_assert (target);
5146 return convert_to_mode (mode, target, 0);
5149 /* Expand a call to a unary builtin in EXP.
5150 Return NULL_RTX if a normal call should be emitted rather than expanding the
5151 function in-line. If convenient, the result should be placed in TARGET.
5152 SUBTARGET may be used as the target for computing one of EXP's operands. */
5154 static rtx
5155 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5156 rtx subtarget, optab op_optab)
5158 rtx op0;
5160 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5161 return NULL_RTX;
5163 /* Compute the argument. */
5164 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5165 VOIDmode, EXPAND_NORMAL);
5166 /* Compute op, into TARGET if possible.
5167 Set TARGET to wherever the result comes back. */
5168 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5169 op_optab, op0, target, 1);
5170 gcc_assert (target);
5172 return convert_to_mode (target_mode, target, 0);
5175 /* If the string passed to fputs is a constant and is one character
5176 long, we attempt to transform this call into __builtin_fputc(). */
5178 static rtx
5179 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5181 /* Verify the arguments in the original call. */
5182 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5184 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5185 CALL_EXPR_ARG (exp, 1),
5186 (target == const0_rtx),
5187 unlocked, NULL_TREE);
5188 if (result)
5189 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5191 return NULL_RTX;
5194 /* Expand a call to __builtin_expect. We just return our argument
5195 as the builtin_expect semantic should've been already executed by
5196 tree branch prediction pass. */
5198 static rtx
5199 expand_builtin_expect (tree exp, rtx target)
5201 tree arg, c;
5203 if (call_expr_nargs (exp) < 2)
5204 return const0_rtx;
5205 arg = CALL_EXPR_ARG (exp, 0);
5206 c = CALL_EXPR_ARG (exp, 1);
5208 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5209 /* When guessing was done, the hints should be already stripped away. */
5210 gcc_assert (!flag_guess_branch_prob
5211 || optimize == 0 || errorcount || sorrycount);
5212 return target;
5215 void
5216 expand_builtin_trap (void)
5218 #ifdef HAVE_trap
5219 if (HAVE_trap)
5220 emit_insn (gen_trap ());
5221 else
5222 #endif
5223 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5224 emit_barrier ();
5227 /* Expand EXP, a call to fabs, fabsf or fabsl.
5228 Return NULL_RTX if a normal call should be emitted rather than expanding
5229 the function inline. If convenient, the result should be placed
5230 in TARGET. SUBTARGET may be used as the target for computing
5231 the operand. */
5233 static rtx
5234 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5236 enum machine_mode mode;
5237 tree arg;
5238 rtx op0;
5240 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5241 return NULL_RTX;
5243 arg = CALL_EXPR_ARG (exp, 0);
5244 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5245 mode = TYPE_MODE (TREE_TYPE (arg));
5246 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5247 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5250 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5251 Return NULL is a normal call should be emitted rather than expanding the
5252 function inline. If convenient, the result should be placed in TARGET.
5253 SUBTARGET may be used as the target for computing the operand. */
5255 static rtx
5256 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5258 rtx op0, op1;
5259 tree arg;
5261 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5262 return NULL_RTX;
5264 arg = CALL_EXPR_ARG (exp, 0);
5265 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5267 arg = CALL_EXPR_ARG (exp, 1);
5268 op1 = expand_normal (arg);
5270 return expand_copysign (op0, op1, target);
5273 /* Create a new constant string literal and return a char* pointer to it.
5274 The STRING_CST value is the LEN characters at STR. */
5275 tree
5276 build_string_literal (int len, const char *str)
5278 tree t, elem, index, type;
5280 t = build_string (len, str);
5281 elem = build_type_variant (char_type_node, 1, 0);
5282 index = build_index_type (size_int (len - 1));
5283 type = build_array_type (elem, index);
5284 TREE_TYPE (t) = type;
5285 TREE_CONSTANT (t) = 1;
5286 TREE_READONLY (t) = 1;
5287 TREE_STATIC (t) = 1;
5289 type = build_pointer_type (elem);
5290 t = build1 (ADDR_EXPR, type,
5291 build4 (ARRAY_REF, elem,
5292 t, integer_zero_node, NULL_TREE, NULL_TREE));
5293 return t;
5296 /* Expand EXP, a call to printf or printf_unlocked.
5297 Return NULL_RTX if a normal call should be emitted rather than transforming
5298 the function inline. If convenient, the result should be placed in
5299 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5300 call. */
5301 static rtx
5302 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5303 bool unlocked)
5305 /* If we're using an unlocked function, assume the other unlocked
5306 functions exist explicitly. */
5307 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5308 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5309 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5310 : implicit_built_in_decls[BUILT_IN_PUTS];
5311 const char *fmt_str;
5312 tree fn = 0;
5313 tree fmt, arg;
5314 int nargs = call_expr_nargs (exp);
5316 /* If the return value is used, don't do the transformation. */
5317 if (target != const0_rtx)
5318 return NULL_RTX;
5320 /* Verify the required arguments in the original call. */
5321 if (nargs == 0)
5322 return NULL_RTX;
5323 fmt = CALL_EXPR_ARG (exp, 0);
5324 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5325 return NULL_RTX;
5327 /* Check whether the format is a literal string constant. */
5328 fmt_str = c_getstr (fmt);
5329 if (fmt_str == NULL)
5330 return NULL_RTX;
5332 if (!init_target_chars ())
5333 return NULL_RTX;
5335 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5336 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5338 if ((nargs != 2)
5339 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5340 return NULL_RTX;
5341 if (fn_puts)
5342 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5344 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5345 else if (strcmp (fmt_str, target_percent_c) == 0)
5347 if ((nargs != 2)
5348 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5349 return NULL_RTX;
5350 if (fn_putchar)
5351 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5353 else
5355 /* We can't handle anything else with % args or %% ... yet. */
5356 if (strchr (fmt_str, target_percent))
5357 return NULL_RTX;
5359 if (nargs > 1)
5360 return NULL_RTX;
5362 /* If the format specifier was "", printf does nothing. */
5363 if (fmt_str[0] == '\0')
5364 return const0_rtx;
5365 /* If the format specifier has length of 1, call putchar. */
5366 if (fmt_str[1] == '\0')
5368 /* Given printf("c"), (where c is any one character,)
5369 convert "c"[0] to an int and pass that to the replacement
5370 function. */
5371 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5372 if (fn_putchar)
5373 fn = build_call_expr (fn_putchar, 1, arg);
5375 else
5377 /* If the format specifier was "string\n", call puts("string"). */
5378 size_t len = strlen (fmt_str);
5379 if ((unsigned char)fmt_str[len - 1] == target_newline)
5381 /* Create a NUL-terminated string that's one char shorter
5382 than the original, stripping off the trailing '\n'. */
5383 char *newstr = XALLOCAVEC (char, len);
5384 memcpy (newstr, fmt_str, len - 1);
5385 newstr[len - 1] = 0;
5386 arg = build_string_literal (len, newstr);
5387 if (fn_puts)
5388 fn = build_call_expr (fn_puts, 1, arg);
5390 else
5391 /* We'd like to arrange to call fputs(string,stdout) here,
5392 but we need stdout and don't have a way to get it yet. */
5393 return NULL_RTX;
5397 if (!fn)
5398 return NULL_RTX;
5399 if (TREE_CODE (fn) == CALL_EXPR)
5400 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5401 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5404 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5405 Return NULL_RTX if a normal call should be emitted rather than transforming
5406 the function inline. If convenient, the result should be placed in
5407 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5408 call. */
5409 static rtx
5410 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5411 bool unlocked)
5413 /* If we're using an unlocked function, assume the other unlocked
5414 functions exist explicitly. */
5415 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5416 : implicit_built_in_decls[BUILT_IN_FPUTC];
5417 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5418 : implicit_built_in_decls[BUILT_IN_FPUTS];
5419 const char *fmt_str;
5420 tree fn = 0;
5421 tree fmt, fp, arg;
5422 int nargs = call_expr_nargs (exp);
5424 /* If the return value is used, don't do the transformation. */
5425 if (target != const0_rtx)
5426 return NULL_RTX;
5428 /* Verify the required arguments in the original call. */
5429 if (nargs < 2)
5430 return NULL_RTX;
5431 fp = CALL_EXPR_ARG (exp, 0);
5432 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5433 return NULL_RTX;
5434 fmt = CALL_EXPR_ARG (exp, 1);
5435 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5436 return NULL_RTX;
5438 /* Check whether the format is a literal string constant. */
5439 fmt_str = c_getstr (fmt);
5440 if (fmt_str == NULL)
5441 return NULL_RTX;
5443 if (!init_target_chars ())
5444 return NULL_RTX;
5446 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5447 if (strcmp (fmt_str, target_percent_s) == 0)
5449 if ((nargs != 3)
5450 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5451 return NULL_RTX;
5452 arg = CALL_EXPR_ARG (exp, 2);
5453 if (fn_fputs)
5454 fn = build_call_expr (fn_fputs, 2, arg, fp);
5456 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5457 else if (strcmp (fmt_str, target_percent_c) == 0)
5459 if ((nargs != 3)
5460 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5461 return NULL_RTX;
5462 arg = CALL_EXPR_ARG (exp, 2);
5463 if (fn_fputc)
5464 fn = build_call_expr (fn_fputc, 2, arg, fp);
5466 else
5468 /* We can't handle anything else with % args or %% ... yet. */
5469 if (strchr (fmt_str, target_percent))
5470 return NULL_RTX;
5472 if (nargs > 2)
5473 return NULL_RTX;
5475 /* If the format specifier was "", fprintf does nothing. */
5476 if (fmt_str[0] == '\0')
5478 /* Evaluate and ignore FILE* argument for side-effects. */
5479 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5480 return const0_rtx;
5483 /* When "string" doesn't contain %, replace all cases of
5484 fprintf(stream,string) with fputs(string,stream). The fputs
5485 builtin will take care of special cases like length == 1. */
5486 if (fn_fputs)
5487 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5490 if (!fn)
5491 return NULL_RTX;
5492 if (TREE_CODE (fn) == CALL_EXPR)
5493 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5494 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5497 /* Expand a call EXP to sprintf. Return NULL_RTX if
5498 a normal call should be emitted rather than expanding the function
5499 inline. If convenient, the result should be placed in TARGET with
5500 mode MODE. */
5502 static rtx
5503 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5505 tree dest, fmt;
5506 const char *fmt_str;
5507 int nargs = call_expr_nargs (exp);
5509 /* Verify the required arguments in the original call. */
5510 if (nargs < 2)
5511 return NULL_RTX;
5512 dest = CALL_EXPR_ARG (exp, 0);
5513 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5514 return NULL_RTX;
5515 fmt = CALL_EXPR_ARG (exp, 0);
5516 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5517 return NULL_RTX;
5519 /* Check whether the format is a literal string constant. */
5520 fmt_str = c_getstr (fmt);
5521 if (fmt_str == NULL)
5522 return NULL_RTX;
5524 if (!init_target_chars ())
5525 return NULL_RTX;
5527 /* If the format doesn't contain % args or %%, use strcpy. */
5528 if (strchr (fmt_str, target_percent) == 0)
5530 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5531 tree exp;
5533 if ((nargs > 2) || ! fn)
5534 return NULL_RTX;
5535 expand_expr (build_call_expr (fn, 2, dest, fmt),
5536 const0_rtx, VOIDmode, EXPAND_NORMAL);
5537 if (target == const0_rtx)
5538 return const0_rtx;
5539 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5540 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5542 /* If the format is "%s", use strcpy if the result isn't used. */
5543 else if (strcmp (fmt_str, target_percent_s) == 0)
5545 tree fn, arg, len;
5546 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5548 if (! fn)
5549 return NULL_RTX;
5550 if (nargs != 3)
5551 return NULL_RTX;
5552 arg = CALL_EXPR_ARG (exp, 2);
5553 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5554 return NULL_RTX;
5556 if (target != const0_rtx)
5558 len = c_strlen (arg, 1);
5559 if (! len || TREE_CODE (len) != INTEGER_CST)
5560 return NULL_RTX;
5562 else
5563 len = NULL_TREE;
5565 expand_expr (build_call_expr (fn, 2, dest, arg),
5566 const0_rtx, VOIDmode, EXPAND_NORMAL);
5568 if (target == const0_rtx)
5569 return const0_rtx;
5570 return expand_expr (len, target, mode, EXPAND_NORMAL);
5573 return NULL_RTX;
5576 /* Expand a call to either the entry or exit function profiler. */
5578 static rtx
5579 expand_builtin_profile_func (bool exitp)
5581 rtx this_rtx, which;
5583 this_rtx = DECL_RTL (current_function_decl);
5584 gcc_assert (MEM_P (this_rtx));
5585 this_rtx = XEXP (this_rtx, 0);
5587 if (exitp)
5588 which = profile_function_exit_libfunc;
5589 else
5590 which = profile_function_entry_libfunc;
5592 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5593 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5595 Pmode);
5597 return const0_rtx;
5600 /* Expand a call to __builtin___clear_cache. */
5602 static rtx
5603 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5605 #ifndef HAVE_clear_cache
5606 #ifdef CLEAR_INSN_CACHE
5607 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5608 does something. Just do the default expansion to a call to
5609 __clear_cache(). */
5610 return NULL_RTX;
5611 #else
5612 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5613 does nothing. There is no need to call it. Do nothing. */
5614 return const0_rtx;
5615 #endif /* CLEAR_INSN_CACHE */
5616 #else
5617 /* We have a "clear_cache" insn, and it will handle everything. */
5618 tree begin, end;
5619 rtx begin_rtx, end_rtx;
5620 enum insn_code icode;
5622 /* We must not expand to a library call. If we did, any
5623 fallback library function in libgcc that might contain a call to
5624 __builtin___clear_cache() would recurse infinitely. */
5625 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5627 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5628 return const0_rtx;
5631 if (HAVE_clear_cache)
5633 icode = CODE_FOR_clear_cache;
5635 begin = CALL_EXPR_ARG (exp, 0);
5636 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5637 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5638 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5639 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5641 end = CALL_EXPR_ARG (exp, 1);
5642 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5643 end_rtx = convert_memory_address (Pmode, end_rtx);
5644 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5645 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5647 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5649 return const0_rtx;
5650 #endif /* HAVE_clear_cache */
5653 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5655 static rtx
5656 round_trampoline_addr (rtx tramp)
5658 rtx temp, addend, mask;
5660 /* If we don't need too much alignment, we'll have been guaranteed
5661 proper alignment by get_trampoline_type. */
5662 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5663 return tramp;
5665 /* Round address up to desired boundary. */
5666 temp = gen_reg_rtx (Pmode);
5667 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5668 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5670 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5671 temp, 0, OPTAB_LIB_WIDEN);
5672 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5673 temp, 0, OPTAB_LIB_WIDEN);
5675 return tramp;
5678 static rtx
5679 expand_builtin_init_trampoline (tree exp)
5681 tree t_tramp, t_func, t_chain;
5682 rtx r_tramp, r_func, r_chain;
5683 #ifdef TRAMPOLINE_TEMPLATE
5684 rtx blktramp;
5685 #endif
5687 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5688 POINTER_TYPE, VOID_TYPE))
5689 return NULL_RTX;
5691 t_tramp = CALL_EXPR_ARG (exp, 0);
5692 t_func = CALL_EXPR_ARG (exp, 1);
5693 t_chain = CALL_EXPR_ARG (exp, 2);
5695 r_tramp = expand_normal (t_tramp);
5696 r_func = expand_normal (t_func);
5697 r_chain = expand_normal (t_chain);
5699 /* Generate insns to initialize the trampoline. */
5700 r_tramp = round_trampoline_addr (r_tramp);
5701 #ifdef TRAMPOLINE_TEMPLATE
5702 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5703 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5704 emit_block_move (blktramp, assemble_trampoline_template (),
5705 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5706 #endif
5707 trampolines_created = 1;
5708 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5710 return const0_rtx;
5713 static rtx
5714 expand_builtin_adjust_trampoline (tree exp)
5716 rtx tramp;
5718 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5719 return NULL_RTX;
5721 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5722 tramp = round_trampoline_addr (tramp);
5723 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5724 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5725 #endif
5727 return tramp;
5730 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5731 function. The function first checks whether the back end provides
5732 an insn to implement signbit for the respective mode. If not, it
5733 checks whether the floating point format of the value is such that
5734 the sign bit can be extracted. If that is not the case, the
5735 function returns NULL_RTX to indicate that a normal call should be
5736 emitted rather than expanding the function in-line. EXP is the
5737 expression that is a call to the builtin function; if convenient,
5738 the result should be placed in TARGET. */
5739 static rtx
5740 expand_builtin_signbit (tree exp, rtx target)
5742 const struct real_format *fmt;
5743 enum machine_mode fmode, imode, rmode;
5744 HOST_WIDE_INT hi, lo;
5745 tree arg;
5746 int word, bitpos;
5747 enum insn_code icode;
5748 rtx temp;
5750 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5751 return NULL_RTX;
5753 arg = CALL_EXPR_ARG (exp, 0);
5754 fmode = TYPE_MODE (TREE_TYPE (arg));
5755 rmode = TYPE_MODE (TREE_TYPE (exp));
5756 fmt = REAL_MODE_FORMAT (fmode);
5758 arg = builtin_save_expr (arg);
5760 /* Expand the argument yielding a RTX expression. */
5761 temp = expand_normal (arg);
5763 /* Check if the back end provides an insn that handles signbit for the
5764 argument's mode. */
5765 icode = signbit_optab->handlers [(int) fmode].insn_code;
5766 if (icode != CODE_FOR_nothing)
5768 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5769 emit_unop_insn (icode, target, temp, UNKNOWN);
5770 return target;
5773 /* For floating point formats without a sign bit, implement signbit
5774 as "ARG < 0.0". */
5775 bitpos = fmt->signbit_ro;
5776 if (bitpos < 0)
5778 /* But we can't do this if the format supports signed zero. */
5779 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5780 return NULL_RTX;
5782 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5783 build_real (TREE_TYPE (arg), dconst0));
5784 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5787 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5789 imode = int_mode_for_mode (fmode);
5790 if (imode == BLKmode)
5791 return NULL_RTX;
5792 temp = gen_lowpart (imode, temp);
5794 else
5796 imode = word_mode;
5797 /* Handle targets with different FP word orders. */
5798 if (FLOAT_WORDS_BIG_ENDIAN)
5799 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5800 else
5801 word = bitpos / BITS_PER_WORD;
5802 temp = operand_subword_force (temp, word, fmode);
5803 bitpos = bitpos % BITS_PER_WORD;
5806 /* Force the intermediate word_mode (or narrower) result into a
5807 register. This avoids attempting to create paradoxical SUBREGs
5808 of floating point modes below. */
5809 temp = force_reg (imode, temp);
5811 /* If the bitpos is within the "result mode" lowpart, the operation
5812 can be implement with a single bitwise AND. Otherwise, we need
5813 a right shift and an AND. */
5815 if (bitpos < GET_MODE_BITSIZE (rmode))
5817 if (bitpos < HOST_BITS_PER_WIDE_INT)
5819 hi = 0;
5820 lo = (HOST_WIDE_INT) 1 << bitpos;
5822 else
5824 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5825 lo = 0;
5828 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5829 temp = gen_lowpart (rmode, temp);
5830 temp = expand_binop (rmode, and_optab, temp,
5831 immed_double_const (lo, hi, rmode),
5832 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5834 else
5836 /* Perform a logical right shift to place the signbit in the least
5837 significant bit, then truncate the result to the desired mode
5838 and mask just this bit. */
5839 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5840 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5841 temp = gen_lowpart (rmode, temp);
5842 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5843 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5846 return temp;
5849 /* Expand fork or exec calls. TARGET is the desired target of the
5850 call. EXP is the call. FN is the
5851 identificator of the actual function. IGNORE is nonzero if the
5852 value is to be ignored. */
5854 static rtx
5855 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5857 tree id, decl;
5858 tree call;
5860 /* If we are not profiling, just call the function. */
5861 if (!profile_arc_flag)
5862 return NULL_RTX;
5864 /* Otherwise call the wrapper. This should be equivalent for the rest of
5865 compiler, so the code does not diverge, and the wrapper may run the
5866 code necessary for keeping the profiling sane. */
5868 switch (DECL_FUNCTION_CODE (fn))
5870 case BUILT_IN_FORK:
5871 id = get_identifier ("__gcov_fork");
5872 break;
5874 case BUILT_IN_EXECL:
5875 id = get_identifier ("__gcov_execl");
5876 break;
5878 case BUILT_IN_EXECV:
5879 id = get_identifier ("__gcov_execv");
5880 break;
5882 case BUILT_IN_EXECLP:
5883 id = get_identifier ("__gcov_execlp");
5884 break;
5886 case BUILT_IN_EXECLE:
5887 id = get_identifier ("__gcov_execle");
5888 break;
5890 case BUILT_IN_EXECVP:
5891 id = get_identifier ("__gcov_execvp");
5892 break;
5894 case BUILT_IN_EXECVE:
5895 id = get_identifier ("__gcov_execve");
5896 break;
5898 default:
5899 gcc_unreachable ();
5902 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5903 DECL_EXTERNAL (decl) = 1;
5904 TREE_PUBLIC (decl) = 1;
5905 DECL_ARTIFICIAL (decl) = 1;
5906 TREE_NOTHROW (decl) = 1;
5907 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5908 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5909 call = rewrite_call_expr (exp, 0, decl, 0);
5910 return expand_call (call, target, ignore);
5915 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5916 the pointer in these functions is void*, the tree optimizers may remove
5917 casts. The mode computed in expand_builtin isn't reliable either, due
5918 to __sync_bool_compare_and_swap.
5920 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5921 group of builtins. This gives us log2 of the mode size. */
5923 static inline enum machine_mode
5924 get_builtin_sync_mode (int fcode_diff)
5926 /* The size is not negotiable, so ask not to get BLKmode in return
5927 if the target indicates that a smaller size would be better. */
5928 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5931 /* Expand the memory expression LOC and return the appropriate memory operand
5932 for the builtin_sync operations. */
5934 static rtx
5935 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5937 rtx addr, mem;
5939 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5941 /* Note that we explicitly do not want any alias information for this
5942 memory, so that we kill all other live memories. Otherwise we don't
5943 satisfy the full barrier semantics of the intrinsic. */
5944 mem = validize_mem (gen_rtx_MEM (mode, addr));
5946 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5947 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5948 MEM_VOLATILE_P (mem) = 1;
5950 return mem;
5953 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5954 EXP is the CALL_EXPR. CODE is the rtx code
5955 that corresponds to the arithmetic or logical operation from the name;
5956 an exception here is that NOT actually means NAND. TARGET is an optional
5957 place for us to store the results; AFTER is true if this is the
5958 fetch_and_xxx form. IGNORE is true if we don't actually care about
5959 the result of the operation at all. */
5961 static rtx
5962 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5963 enum rtx_code code, bool after,
5964 rtx target, bool ignore)
5966 rtx val, mem;
5967 enum machine_mode old_mode;
5969 /* Expand the operands. */
5970 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5972 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5973 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5974 of CONST_INTs, where we know the old_mode only from the call argument. */
5975 old_mode = GET_MODE (val);
5976 if (old_mode == VOIDmode)
5977 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5978 val = convert_modes (mode, old_mode, val, 1);
5980 if (ignore)
5981 return expand_sync_operation (mem, val, code);
5982 else
5983 return expand_sync_fetch_operation (mem, val, code, after, target);
5986 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5987 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5988 true if this is the boolean form. TARGET is a place for us to store the
5989 results; this is NOT optional if IS_BOOL is true. */
5991 static rtx
5992 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5993 bool is_bool, rtx target)
5995 rtx old_val, new_val, mem;
5996 enum machine_mode old_mode;
5998 /* Expand the operands. */
5999 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6002 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6003 mode, EXPAND_NORMAL);
6004 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6005 of CONST_INTs, where we know the old_mode only from the call argument. */
6006 old_mode = GET_MODE (old_val);
6007 if (old_mode == VOIDmode)
6008 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6009 old_val = convert_modes (mode, old_mode, old_val, 1);
6011 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6012 mode, EXPAND_NORMAL);
6013 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6014 of CONST_INTs, where we know the old_mode only from the call argument. */
6015 old_mode = GET_MODE (new_val);
6016 if (old_mode == VOIDmode)
6017 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6018 new_val = convert_modes (mode, old_mode, new_val, 1);
6020 if (is_bool)
6021 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6022 else
6023 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6026 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6027 general form is actually an atomic exchange, and some targets only
6028 support a reduced form with the second argument being a constant 1.
6029 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6030 the results. */
6032 static rtx
6033 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6034 rtx target)
6036 rtx val, mem;
6037 enum machine_mode old_mode;
6039 /* Expand the operands. */
6040 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6041 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6042 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6043 of CONST_INTs, where we know the old_mode only from the call argument. */
6044 old_mode = GET_MODE (val);
6045 if (old_mode == VOIDmode)
6046 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6047 val = convert_modes (mode, old_mode, val, 1);
6049 return expand_sync_lock_test_and_set (mem, val, target);
6052 /* Expand the __sync_synchronize intrinsic. */
6054 static void
6055 expand_builtin_synchronize (void)
6057 tree x;
6059 #ifdef HAVE_memory_barrier
6060 if (HAVE_memory_barrier)
6062 emit_insn (gen_memory_barrier ());
6063 return;
6065 #endif
6067 if (synchronize_libfunc != NULL_RTX)
6069 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6070 return;
6073 /* If no explicit memory barrier instruction is available, create an
6074 empty asm stmt with a memory clobber. */
6075 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6076 tree_cons (NULL, build_string (6, "memory"), NULL));
6077 ASM_VOLATILE_P (x) = 1;
6078 expand_asm_expr (x);
6081 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6083 static void
6084 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6086 enum insn_code icode;
6087 rtx mem, insn;
6088 rtx val = const0_rtx;
6090 /* Expand the operands. */
6091 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6093 /* If there is an explicit operation in the md file, use it. */
6094 icode = sync_lock_release[mode];
6095 if (icode != CODE_FOR_nothing)
6097 if (!insn_data[icode].operand[1].predicate (val, mode))
6098 val = force_reg (mode, val);
6100 insn = GEN_FCN (icode) (mem, val);
6101 if (insn)
6103 emit_insn (insn);
6104 return;
6108 /* Otherwise we can implement this operation by emitting a barrier
6109 followed by a store of zero. */
6110 expand_builtin_synchronize ();
6111 emit_move_insn (mem, val);
6114 /* Expand an expression EXP that calls a built-in function,
6115 with result going to TARGET if that's convenient
6116 (and in mode MODE if that's convenient).
6117 SUBTARGET may be used as the target for computing one of EXP's operands.
6118 IGNORE is nonzero if the value is to be ignored. */
6121 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6122 int ignore)
6124 tree fndecl = get_callee_fndecl (exp);
6125 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6126 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6128 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6129 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6131 /* When not optimizing, generate calls to library functions for a certain
6132 set of builtins. */
6133 if (!optimize
6134 && !called_as_built_in (fndecl)
6135 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6136 && fcode != BUILT_IN_ALLOCA
6137 && fcode != BUILT_IN_FREE)
6138 return expand_call (exp, target, ignore);
6140 /* The built-in function expanders test for target == const0_rtx
6141 to determine whether the function's result will be ignored. */
6142 if (ignore)
6143 target = const0_rtx;
6145 /* If the result of a pure or const built-in function is ignored, and
6146 none of its arguments are volatile, we can avoid expanding the
6147 built-in call and just evaluate the arguments for side-effects. */
6148 if (target == const0_rtx
6149 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6151 bool volatilep = false;
6152 tree arg;
6153 call_expr_arg_iterator iter;
6155 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6156 if (TREE_THIS_VOLATILE (arg))
6158 volatilep = true;
6159 break;
6162 if (! volatilep)
6164 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6165 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6166 return const0_rtx;
6170 switch (fcode)
6172 CASE_FLT_FN (BUILT_IN_FABS):
6173 target = expand_builtin_fabs (exp, target, subtarget);
6174 if (target)
6175 return target;
6176 break;
6178 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6179 target = expand_builtin_copysign (exp, target, subtarget);
6180 if (target)
6181 return target;
6182 break;
6184 /* Just do a normal library call if we were unable to fold
6185 the values. */
6186 CASE_FLT_FN (BUILT_IN_CABS):
6187 break;
6189 CASE_FLT_FN (BUILT_IN_EXP):
6190 CASE_FLT_FN (BUILT_IN_EXP10):
6191 CASE_FLT_FN (BUILT_IN_POW10):
6192 CASE_FLT_FN (BUILT_IN_EXP2):
6193 CASE_FLT_FN (BUILT_IN_EXPM1):
6194 CASE_FLT_FN (BUILT_IN_LOGB):
6195 CASE_FLT_FN (BUILT_IN_LOG):
6196 CASE_FLT_FN (BUILT_IN_LOG10):
6197 CASE_FLT_FN (BUILT_IN_LOG2):
6198 CASE_FLT_FN (BUILT_IN_LOG1P):
6199 CASE_FLT_FN (BUILT_IN_TAN):
6200 CASE_FLT_FN (BUILT_IN_ASIN):
6201 CASE_FLT_FN (BUILT_IN_ACOS):
6202 CASE_FLT_FN (BUILT_IN_ATAN):
6203 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6204 because of possible accuracy problems. */
6205 if (! flag_unsafe_math_optimizations)
6206 break;
6207 CASE_FLT_FN (BUILT_IN_SQRT):
6208 CASE_FLT_FN (BUILT_IN_FLOOR):
6209 CASE_FLT_FN (BUILT_IN_CEIL):
6210 CASE_FLT_FN (BUILT_IN_TRUNC):
6211 CASE_FLT_FN (BUILT_IN_ROUND):
6212 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6213 CASE_FLT_FN (BUILT_IN_RINT):
6214 target = expand_builtin_mathfn (exp, target, subtarget);
6215 if (target)
6216 return target;
6217 break;
6219 CASE_FLT_FN (BUILT_IN_ILOGB):
6220 if (! flag_unsafe_math_optimizations)
6221 break;
6222 CASE_FLT_FN (BUILT_IN_ISINF):
6223 CASE_FLT_FN (BUILT_IN_FINITE):
6224 case BUILT_IN_ISFINITE:
6225 case BUILT_IN_ISNORMAL:
6226 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6227 if (target)
6228 return target;
6229 break;
6231 CASE_FLT_FN (BUILT_IN_LCEIL):
6232 CASE_FLT_FN (BUILT_IN_LLCEIL):
6233 CASE_FLT_FN (BUILT_IN_LFLOOR):
6234 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6235 target = expand_builtin_int_roundingfn (exp, target);
6236 if (target)
6237 return target;
6238 break;
6240 CASE_FLT_FN (BUILT_IN_LRINT):
6241 CASE_FLT_FN (BUILT_IN_LLRINT):
6242 CASE_FLT_FN (BUILT_IN_LROUND):
6243 CASE_FLT_FN (BUILT_IN_LLROUND):
6244 target = expand_builtin_int_roundingfn_2 (exp, target);
6245 if (target)
6246 return target;
6247 break;
6249 CASE_FLT_FN (BUILT_IN_POW):
6250 target = expand_builtin_pow (exp, target, subtarget);
6251 if (target)
6252 return target;
6253 break;
6255 CASE_FLT_FN (BUILT_IN_POWI):
6256 target = expand_builtin_powi (exp, target, subtarget);
6257 if (target)
6258 return target;
6259 break;
6261 CASE_FLT_FN (BUILT_IN_ATAN2):
6262 CASE_FLT_FN (BUILT_IN_LDEXP):
6263 CASE_FLT_FN (BUILT_IN_SCALB):
6264 CASE_FLT_FN (BUILT_IN_SCALBN):
6265 CASE_FLT_FN (BUILT_IN_SCALBLN):
6266 if (! flag_unsafe_math_optimizations)
6267 break;
6269 CASE_FLT_FN (BUILT_IN_FMOD):
6270 CASE_FLT_FN (BUILT_IN_REMAINDER):
6271 CASE_FLT_FN (BUILT_IN_DREM):
6272 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6273 if (target)
6274 return target;
6275 break;
6277 CASE_FLT_FN (BUILT_IN_CEXPI):
6278 target = expand_builtin_cexpi (exp, target, subtarget);
6279 gcc_assert (target);
6280 return target;
6282 CASE_FLT_FN (BUILT_IN_SIN):
6283 CASE_FLT_FN (BUILT_IN_COS):
6284 if (! flag_unsafe_math_optimizations)
6285 break;
6286 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6287 if (target)
6288 return target;
6289 break;
6291 CASE_FLT_FN (BUILT_IN_SINCOS):
6292 if (! flag_unsafe_math_optimizations)
6293 break;
6294 target = expand_builtin_sincos (exp);
6295 if (target)
6296 return target;
6297 break;
6299 case BUILT_IN_APPLY_ARGS:
6300 return expand_builtin_apply_args ();
6302 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6303 FUNCTION with a copy of the parameters described by
6304 ARGUMENTS, and ARGSIZE. It returns a block of memory
6305 allocated on the stack into which is stored all the registers
6306 that might possibly be used for returning the result of a
6307 function. ARGUMENTS is the value returned by
6308 __builtin_apply_args. ARGSIZE is the number of bytes of
6309 arguments that must be copied. ??? How should this value be
6310 computed? We'll also need a safe worst case value for varargs
6311 functions. */
6312 case BUILT_IN_APPLY:
6313 if (!validate_arglist (exp, POINTER_TYPE,
6314 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6315 && !validate_arglist (exp, REFERENCE_TYPE,
6316 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6317 return const0_rtx;
6318 else
6320 rtx ops[3];
6322 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6323 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6324 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6326 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6329 /* __builtin_return (RESULT) causes the function to return the
6330 value described by RESULT. RESULT is address of the block of
6331 memory returned by __builtin_apply. */
6332 case BUILT_IN_RETURN:
6333 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6334 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6335 return const0_rtx;
6337 case BUILT_IN_SAVEREGS:
6338 return expand_builtin_saveregs ();
6340 case BUILT_IN_ARGS_INFO:
6341 return expand_builtin_args_info (exp);
6343 case BUILT_IN_VA_ARG_PACK:
6344 /* All valid uses of __builtin_va_arg_pack () are removed during
6345 inlining. */
6346 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6347 return const0_rtx;
6349 case BUILT_IN_VA_ARG_PACK_LEN:
6350 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6351 inlining. */
6352 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6353 return const0_rtx;
6355 /* Return the address of the first anonymous stack arg. */
6356 case BUILT_IN_NEXT_ARG:
6357 if (fold_builtin_next_arg (exp, false))
6358 return const0_rtx;
6359 return expand_builtin_next_arg ();
6361 case BUILT_IN_CLEAR_CACHE:
6362 target = expand_builtin___clear_cache (exp);
6363 if (target)
6364 return target;
6365 break;
6367 case BUILT_IN_CLASSIFY_TYPE:
6368 return expand_builtin_classify_type (exp);
6370 case BUILT_IN_CONSTANT_P:
6371 return const0_rtx;
6373 case BUILT_IN_FRAME_ADDRESS:
6374 case BUILT_IN_RETURN_ADDRESS:
6375 return expand_builtin_frame_address (fndecl, exp);
6377 /* Returns the address of the area where the structure is returned.
6378 0 otherwise. */
6379 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6380 if (call_expr_nargs (exp) != 0
6381 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6382 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6383 return const0_rtx;
6384 else
6385 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6387 case BUILT_IN_ALLOCA:
6388 target = expand_builtin_alloca (exp, target);
6389 if (target)
6390 return target;
6391 break;
6393 case BUILT_IN_STACK_SAVE:
6394 return expand_stack_save ();
6396 case BUILT_IN_STACK_RESTORE:
6397 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6398 return const0_rtx;
6400 case BUILT_IN_BSWAP32:
6401 case BUILT_IN_BSWAP64:
6402 target = expand_builtin_bswap (exp, target, subtarget);
6404 if (target)
6405 return target;
6406 break;
6408 CASE_INT_FN (BUILT_IN_FFS):
6409 case BUILT_IN_FFSIMAX:
6410 target = expand_builtin_unop (target_mode, exp, target,
6411 subtarget, ffs_optab);
6412 if (target)
6413 return target;
6414 break;
6416 CASE_INT_FN (BUILT_IN_CLZ):
6417 case BUILT_IN_CLZIMAX:
6418 target = expand_builtin_unop (target_mode, exp, target,
6419 subtarget, clz_optab);
6420 if (target)
6421 return target;
6422 break;
6424 CASE_INT_FN (BUILT_IN_CTZ):
6425 case BUILT_IN_CTZIMAX:
6426 target = expand_builtin_unop (target_mode, exp, target,
6427 subtarget, ctz_optab);
6428 if (target)
6429 return target;
6430 break;
6432 CASE_INT_FN (BUILT_IN_POPCOUNT):
6433 case BUILT_IN_POPCOUNTIMAX:
6434 target = expand_builtin_unop (target_mode, exp, target,
6435 subtarget, popcount_optab);
6436 if (target)
6437 return target;
6438 break;
6440 CASE_INT_FN (BUILT_IN_PARITY):
6441 case BUILT_IN_PARITYIMAX:
6442 target = expand_builtin_unop (target_mode, exp, target,
6443 subtarget, parity_optab);
6444 if (target)
6445 return target;
6446 break;
6448 case BUILT_IN_STRLEN:
6449 target = expand_builtin_strlen (exp, target, target_mode);
6450 if (target)
6451 return target;
6452 break;
6454 case BUILT_IN_STRCPY:
6455 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_STRNCPY:
6461 target = expand_builtin_strncpy (exp, target, mode);
6462 if (target)
6463 return target;
6464 break;
6466 case BUILT_IN_STPCPY:
6467 target = expand_builtin_stpcpy (exp, target, mode);
6468 if (target)
6469 return target;
6470 break;
6472 case BUILT_IN_STRCAT:
6473 target = expand_builtin_strcat (fndecl, exp, target, mode);
6474 if (target)
6475 return target;
6476 break;
6478 case BUILT_IN_STRNCAT:
6479 target = expand_builtin_strncat (exp, target, mode);
6480 if (target)
6481 return target;
6482 break;
6484 case BUILT_IN_STRSPN:
6485 target = expand_builtin_strspn (exp, target, mode);
6486 if (target)
6487 return target;
6488 break;
6490 case BUILT_IN_STRCSPN:
6491 target = expand_builtin_strcspn (exp, target, mode);
6492 if (target)
6493 return target;
6494 break;
6496 case BUILT_IN_STRSTR:
6497 target = expand_builtin_strstr (exp, target, mode);
6498 if (target)
6499 return target;
6500 break;
6502 case BUILT_IN_STRPBRK:
6503 target = expand_builtin_strpbrk (exp, target, mode);
6504 if (target)
6505 return target;
6506 break;
6508 case BUILT_IN_INDEX:
6509 case BUILT_IN_STRCHR:
6510 target = expand_builtin_strchr (exp, target, mode);
6511 if (target)
6512 return target;
6513 break;
6515 case BUILT_IN_RINDEX:
6516 case BUILT_IN_STRRCHR:
6517 target = expand_builtin_strrchr (exp, target, mode);
6518 if (target)
6519 return target;
6520 break;
6522 case BUILT_IN_MEMCPY:
6523 target = expand_builtin_memcpy (exp, target, mode);
6524 if (target)
6525 return target;
6526 break;
6528 case BUILT_IN_MEMPCPY:
6529 target = expand_builtin_mempcpy (exp, target, mode);
6530 if (target)
6531 return target;
6532 break;
6534 case BUILT_IN_MEMMOVE:
6535 target = expand_builtin_memmove (exp, target, mode, ignore);
6536 if (target)
6537 return target;
6538 break;
6540 case BUILT_IN_BCOPY:
6541 target = expand_builtin_bcopy (exp, ignore);
6542 if (target)
6543 return target;
6544 break;
6546 case BUILT_IN_MEMSET:
6547 target = expand_builtin_memset (exp, target, mode);
6548 if (target)
6549 return target;
6550 break;
6552 case BUILT_IN_BZERO:
6553 target = expand_builtin_bzero (exp);
6554 if (target)
6555 return target;
6556 break;
6558 case BUILT_IN_STRCMP:
6559 target = expand_builtin_strcmp (exp, target, mode);
6560 if (target)
6561 return target;
6562 break;
6564 case BUILT_IN_STRNCMP:
6565 target = expand_builtin_strncmp (exp, target, mode);
6566 if (target)
6567 return target;
6568 break;
6570 case BUILT_IN_MEMCHR:
6571 target = expand_builtin_memchr (exp, target, mode);
6572 if (target)
6573 return target;
6574 break;
6576 case BUILT_IN_BCMP:
6577 case BUILT_IN_MEMCMP:
6578 target = expand_builtin_memcmp (exp, target, mode);
6579 if (target)
6580 return target;
6581 break;
6583 case BUILT_IN_SETJMP:
6584 /* This should have been lowered to the builtins below. */
6585 gcc_unreachable ();
6587 case BUILT_IN_SETJMP_SETUP:
6588 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6589 and the receiver label. */
6590 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6592 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6593 VOIDmode, EXPAND_NORMAL);
6594 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6595 rtx label_r = label_rtx (label);
6597 /* This is copied from the handling of non-local gotos. */
6598 expand_builtin_setjmp_setup (buf_addr, label_r);
6599 nonlocal_goto_handler_labels
6600 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6601 nonlocal_goto_handler_labels);
6602 /* ??? Do not let expand_label treat us as such since we would
6603 not want to be both on the list of non-local labels and on
6604 the list of forced labels. */
6605 FORCED_LABEL (label) = 0;
6606 return const0_rtx;
6608 break;
6610 case BUILT_IN_SETJMP_DISPATCHER:
6611 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6612 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6614 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6615 rtx label_r = label_rtx (label);
6617 /* Remove the dispatcher label from the list of non-local labels
6618 since the receiver labels have been added to it above. */
6619 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6620 return const0_rtx;
6622 break;
6624 case BUILT_IN_SETJMP_RECEIVER:
6625 /* __builtin_setjmp_receiver is passed the receiver label. */
6626 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6628 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6629 rtx label_r = label_rtx (label);
6631 expand_builtin_setjmp_receiver (label_r);
6632 return const0_rtx;
6634 break;
6636 /* __builtin_longjmp is passed a pointer to an array of five words.
6637 It's similar to the C library longjmp function but works with
6638 __builtin_setjmp above. */
6639 case BUILT_IN_LONGJMP:
6640 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6642 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6643 VOIDmode, EXPAND_NORMAL);
6644 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6646 if (value != const1_rtx)
6648 error ("%<__builtin_longjmp%> second argument must be 1");
6649 return const0_rtx;
6652 expand_builtin_longjmp (buf_addr, value);
6653 return const0_rtx;
6655 break;
6657 case BUILT_IN_NONLOCAL_GOTO:
6658 target = expand_builtin_nonlocal_goto (exp);
6659 if (target)
6660 return target;
6661 break;
6663 /* This updates the setjmp buffer that is its argument with the value
6664 of the current stack pointer. */
6665 case BUILT_IN_UPDATE_SETJMP_BUF:
6666 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6668 rtx buf_addr
6669 = expand_normal (CALL_EXPR_ARG (exp, 0));
6671 expand_builtin_update_setjmp_buf (buf_addr);
6672 return const0_rtx;
6674 break;
6676 case BUILT_IN_TRAP:
6677 expand_builtin_trap ();
6678 return const0_rtx;
6680 case BUILT_IN_PRINTF:
6681 target = expand_builtin_printf (exp, target, mode, false);
6682 if (target)
6683 return target;
6684 break;
6686 case BUILT_IN_PRINTF_UNLOCKED:
6687 target = expand_builtin_printf (exp, target, mode, true);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_FPUTS:
6693 target = expand_builtin_fputs (exp, target, false);
6694 if (target)
6695 return target;
6696 break;
6697 case BUILT_IN_FPUTS_UNLOCKED:
6698 target = expand_builtin_fputs (exp, target, true);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_FPRINTF:
6704 target = expand_builtin_fprintf (exp, target, mode, false);
6705 if (target)
6706 return target;
6707 break;
6709 case BUILT_IN_FPRINTF_UNLOCKED:
6710 target = expand_builtin_fprintf (exp, target, mode, true);
6711 if (target)
6712 return target;
6713 break;
6715 case BUILT_IN_SPRINTF:
6716 target = expand_builtin_sprintf (exp, target, mode);
6717 if (target)
6718 return target;
6719 break;
6721 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6722 case BUILT_IN_SIGNBITD32:
6723 case BUILT_IN_SIGNBITD64:
6724 case BUILT_IN_SIGNBITD128:
6725 target = expand_builtin_signbit (exp, target);
6726 if (target)
6727 return target;
6728 break;
6730 /* Various hooks for the DWARF 2 __throw routine. */
6731 case BUILT_IN_UNWIND_INIT:
6732 expand_builtin_unwind_init ();
6733 return const0_rtx;
6734 case BUILT_IN_DWARF_CFA:
6735 return virtual_cfa_rtx;
6736 #ifdef DWARF2_UNWIND_INFO
6737 case BUILT_IN_DWARF_SP_COLUMN:
6738 return expand_builtin_dwarf_sp_column ();
6739 case BUILT_IN_INIT_DWARF_REG_SIZES:
6740 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6741 return const0_rtx;
6742 #endif
6743 case BUILT_IN_FROB_RETURN_ADDR:
6744 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6745 case BUILT_IN_EXTRACT_RETURN_ADDR:
6746 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6747 case BUILT_IN_EH_RETURN:
6748 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6749 CALL_EXPR_ARG (exp, 1));
6750 return const0_rtx;
6751 #ifdef EH_RETURN_DATA_REGNO
6752 case BUILT_IN_EH_RETURN_DATA_REGNO:
6753 return expand_builtin_eh_return_data_regno (exp);
6754 #endif
6755 case BUILT_IN_EXTEND_POINTER:
6756 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6758 case BUILT_IN_VA_START:
6759 return expand_builtin_va_start (exp);
6760 case BUILT_IN_VA_END:
6761 return expand_builtin_va_end (exp);
6762 case BUILT_IN_VA_COPY:
6763 return expand_builtin_va_copy (exp);
6764 case BUILT_IN_EXPECT:
6765 return expand_builtin_expect (exp, target);
6766 case BUILT_IN_PREFETCH:
6767 expand_builtin_prefetch (exp);
6768 return const0_rtx;
6770 case BUILT_IN_PROFILE_FUNC_ENTER:
6771 return expand_builtin_profile_func (false);
6772 case BUILT_IN_PROFILE_FUNC_EXIT:
6773 return expand_builtin_profile_func (true);
6775 case BUILT_IN_INIT_TRAMPOLINE:
6776 return expand_builtin_init_trampoline (exp);
6777 case BUILT_IN_ADJUST_TRAMPOLINE:
6778 return expand_builtin_adjust_trampoline (exp);
6780 case BUILT_IN_FORK:
6781 case BUILT_IN_EXECL:
6782 case BUILT_IN_EXECV:
6783 case BUILT_IN_EXECLP:
6784 case BUILT_IN_EXECLE:
6785 case BUILT_IN_EXECVP:
6786 case BUILT_IN_EXECVE:
6787 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6788 if (target)
6789 return target;
6790 break;
6792 case BUILT_IN_FETCH_AND_ADD_1:
6793 case BUILT_IN_FETCH_AND_ADD_2:
6794 case BUILT_IN_FETCH_AND_ADD_4:
6795 case BUILT_IN_FETCH_AND_ADD_8:
6796 case BUILT_IN_FETCH_AND_ADD_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6798 target = expand_builtin_sync_operation (mode, exp, PLUS,
6799 false, target, ignore);
6800 if (target)
6801 return target;
6802 break;
6804 case BUILT_IN_FETCH_AND_SUB_1:
6805 case BUILT_IN_FETCH_AND_SUB_2:
6806 case BUILT_IN_FETCH_AND_SUB_4:
6807 case BUILT_IN_FETCH_AND_SUB_8:
6808 case BUILT_IN_FETCH_AND_SUB_16:
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6810 target = expand_builtin_sync_operation (mode, exp, MINUS,
6811 false, target, ignore);
6812 if (target)
6813 return target;
6814 break;
6816 case BUILT_IN_FETCH_AND_OR_1:
6817 case BUILT_IN_FETCH_AND_OR_2:
6818 case BUILT_IN_FETCH_AND_OR_4:
6819 case BUILT_IN_FETCH_AND_OR_8:
6820 case BUILT_IN_FETCH_AND_OR_16:
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6822 target = expand_builtin_sync_operation (mode, exp, IOR,
6823 false, target, ignore);
6824 if (target)
6825 return target;
6826 break;
6828 case BUILT_IN_FETCH_AND_AND_1:
6829 case BUILT_IN_FETCH_AND_AND_2:
6830 case BUILT_IN_FETCH_AND_AND_4:
6831 case BUILT_IN_FETCH_AND_AND_8:
6832 case BUILT_IN_FETCH_AND_AND_16:
6833 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6834 target = expand_builtin_sync_operation (mode, exp, AND,
6835 false, target, ignore);
6836 if (target)
6837 return target;
6838 break;
6840 case BUILT_IN_FETCH_AND_XOR_1:
6841 case BUILT_IN_FETCH_AND_XOR_2:
6842 case BUILT_IN_FETCH_AND_XOR_4:
6843 case BUILT_IN_FETCH_AND_XOR_8:
6844 case BUILT_IN_FETCH_AND_XOR_16:
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6846 target = expand_builtin_sync_operation (mode, exp, XOR,
6847 false, target, ignore);
6848 if (target)
6849 return target;
6850 break;
6852 case BUILT_IN_FETCH_AND_NAND_1:
6853 case BUILT_IN_FETCH_AND_NAND_2:
6854 case BUILT_IN_FETCH_AND_NAND_4:
6855 case BUILT_IN_FETCH_AND_NAND_8:
6856 case BUILT_IN_FETCH_AND_NAND_16:
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6858 target = expand_builtin_sync_operation (mode, exp, NOT,
6859 false, target, ignore);
6860 if (target)
6861 return target;
6862 break;
6864 case BUILT_IN_ADD_AND_FETCH_1:
6865 case BUILT_IN_ADD_AND_FETCH_2:
6866 case BUILT_IN_ADD_AND_FETCH_4:
6867 case BUILT_IN_ADD_AND_FETCH_8:
6868 case BUILT_IN_ADD_AND_FETCH_16:
6869 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6870 target = expand_builtin_sync_operation (mode, exp, PLUS,
6871 true, target, ignore);
6872 if (target)
6873 return target;
6874 break;
6876 case BUILT_IN_SUB_AND_FETCH_1:
6877 case BUILT_IN_SUB_AND_FETCH_2:
6878 case BUILT_IN_SUB_AND_FETCH_4:
6879 case BUILT_IN_SUB_AND_FETCH_8:
6880 case BUILT_IN_SUB_AND_FETCH_16:
6881 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6882 target = expand_builtin_sync_operation (mode, exp, MINUS,
6883 true, target, ignore);
6884 if (target)
6885 return target;
6886 break;
6888 case BUILT_IN_OR_AND_FETCH_1:
6889 case BUILT_IN_OR_AND_FETCH_2:
6890 case BUILT_IN_OR_AND_FETCH_4:
6891 case BUILT_IN_OR_AND_FETCH_8:
6892 case BUILT_IN_OR_AND_FETCH_16:
6893 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6894 target = expand_builtin_sync_operation (mode, exp, IOR,
6895 true, target, ignore);
6896 if (target)
6897 return target;
6898 break;
6900 case BUILT_IN_AND_AND_FETCH_1:
6901 case BUILT_IN_AND_AND_FETCH_2:
6902 case BUILT_IN_AND_AND_FETCH_4:
6903 case BUILT_IN_AND_AND_FETCH_8:
6904 case BUILT_IN_AND_AND_FETCH_16:
6905 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6906 target = expand_builtin_sync_operation (mode, exp, AND,
6907 true, target, ignore);
6908 if (target)
6909 return target;
6910 break;
6912 case BUILT_IN_XOR_AND_FETCH_1:
6913 case BUILT_IN_XOR_AND_FETCH_2:
6914 case BUILT_IN_XOR_AND_FETCH_4:
6915 case BUILT_IN_XOR_AND_FETCH_8:
6916 case BUILT_IN_XOR_AND_FETCH_16:
6917 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6918 target = expand_builtin_sync_operation (mode, exp, XOR,
6919 true, target, ignore);
6920 if (target)
6921 return target;
6922 break;
6924 case BUILT_IN_NAND_AND_FETCH_1:
6925 case BUILT_IN_NAND_AND_FETCH_2:
6926 case BUILT_IN_NAND_AND_FETCH_4:
6927 case BUILT_IN_NAND_AND_FETCH_8:
6928 case BUILT_IN_NAND_AND_FETCH_16:
6929 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6930 target = expand_builtin_sync_operation (mode, exp, NOT,
6931 true, target, ignore);
6932 if (target)
6933 return target;
6934 break;
6936 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6937 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6938 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6939 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6940 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6941 if (mode == VOIDmode)
6942 mode = TYPE_MODE (boolean_type_node);
6943 if (!target || !register_operand (target, mode))
6944 target = gen_reg_rtx (mode);
6946 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6947 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6948 if (target)
6949 return target;
6950 break;
6952 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6953 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6954 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6955 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6956 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6957 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6958 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6959 if (target)
6960 return target;
6961 break;
6963 case BUILT_IN_LOCK_TEST_AND_SET_1:
6964 case BUILT_IN_LOCK_TEST_AND_SET_2:
6965 case BUILT_IN_LOCK_TEST_AND_SET_4:
6966 case BUILT_IN_LOCK_TEST_AND_SET_8:
6967 case BUILT_IN_LOCK_TEST_AND_SET_16:
6968 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6969 target = expand_builtin_lock_test_and_set (mode, exp, target);
6970 if (target)
6971 return target;
6972 break;
6974 case BUILT_IN_LOCK_RELEASE_1:
6975 case BUILT_IN_LOCK_RELEASE_2:
6976 case BUILT_IN_LOCK_RELEASE_4:
6977 case BUILT_IN_LOCK_RELEASE_8:
6978 case BUILT_IN_LOCK_RELEASE_16:
6979 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6980 expand_builtin_lock_release (mode, exp);
6981 return const0_rtx;
6983 case BUILT_IN_SYNCHRONIZE:
6984 expand_builtin_synchronize ();
6985 return const0_rtx;
6987 case BUILT_IN_OBJECT_SIZE:
6988 return expand_builtin_object_size (exp);
6990 case BUILT_IN_MEMCPY_CHK:
6991 case BUILT_IN_MEMPCPY_CHK:
6992 case BUILT_IN_MEMMOVE_CHK:
6993 case BUILT_IN_MEMSET_CHK:
6994 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6995 if (target)
6996 return target;
6997 break;
6999 case BUILT_IN_STRCPY_CHK:
7000 case BUILT_IN_STPCPY_CHK:
7001 case BUILT_IN_STRNCPY_CHK:
7002 case BUILT_IN_STRCAT_CHK:
7003 case BUILT_IN_STRNCAT_CHK:
7004 case BUILT_IN_SNPRINTF_CHK:
7005 case BUILT_IN_VSNPRINTF_CHK:
7006 maybe_emit_chk_warning (exp, fcode);
7007 break;
7009 case BUILT_IN_SPRINTF_CHK:
7010 case BUILT_IN_VSPRINTF_CHK:
7011 maybe_emit_sprintf_chk_warning (exp, fcode);
7012 break;
7014 case BUILT_IN_FREE:
7015 maybe_emit_free_warning (exp);
7016 break;
7018 default: /* just do library call, if unknown builtin */
7019 break;
7022 /* The switch statement above can drop through to cause the function
7023 to be called normally. */
7024 return expand_call (exp, target, ignore);
7027 /* Determine whether a tree node represents a call to a built-in
7028 function. If the tree T is a call to a built-in function with
7029 the right number of arguments of the appropriate types, return
7030 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7031 Otherwise the return value is END_BUILTINS. */
7033 enum built_in_function
7034 builtin_mathfn_code (const_tree t)
7036 const_tree fndecl, arg, parmlist;
7037 const_tree argtype, parmtype;
7038 const_call_expr_arg_iterator iter;
7040 if (TREE_CODE (t) != CALL_EXPR
7041 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7042 return END_BUILTINS;
7044 fndecl = get_callee_fndecl (t);
7045 if (fndecl == NULL_TREE
7046 || TREE_CODE (fndecl) != FUNCTION_DECL
7047 || ! DECL_BUILT_IN (fndecl)
7048 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7049 return END_BUILTINS;
7051 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7052 init_const_call_expr_arg_iterator (t, &iter);
7053 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7055 /* If a function doesn't take a variable number of arguments,
7056 the last element in the list will have type `void'. */
7057 parmtype = TREE_VALUE (parmlist);
7058 if (VOID_TYPE_P (parmtype))
7060 if (more_const_call_expr_args_p (&iter))
7061 return END_BUILTINS;
7062 return DECL_FUNCTION_CODE (fndecl);
7065 if (! more_const_call_expr_args_p (&iter))
7066 return END_BUILTINS;
7068 arg = next_const_call_expr_arg (&iter);
7069 argtype = TREE_TYPE (arg);
7071 if (SCALAR_FLOAT_TYPE_P (parmtype))
7073 if (! SCALAR_FLOAT_TYPE_P (argtype))
7074 return END_BUILTINS;
7076 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7078 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7079 return END_BUILTINS;
7081 else if (POINTER_TYPE_P (parmtype))
7083 if (! POINTER_TYPE_P (argtype))
7084 return END_BUILTINS;
7086 else if (INTEGRAL_TYPE_P (parmtype))
7088 if (! INTEGRAL_TYPE_P (argtype))
7089 return END_BUILTINS;
7091 else
7092 return END_BUILTINS;
7095 /* Variable-length argument list. */
7096 return DECL_FUNCTION_CODE (fndecl);
7099 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7100 evaluate to a constant. */
7102 static tree
7103 fold_builtin_constant_p (tree arg)
7105 /* We return 1 for a numeric type that's known to be a constant
7106 value at compile-time or for an aggregate type that's a
7107 literal constant. */
7108 STRIP_NOPS (arg);
7110 /* If we know this is a constant, emit the constant of one. */
7111 if (CONSTANT_CLASS_P (arg)
7112 || (TREE_CODE (arg) == CONSTRUCTOR
7113 && TREE_CONSTANT (arg)))
7114 return integer_one_node;
7115 if (TREE_CODE (arg) == ADDR_EXPR)
7117 tree op = TREE_OPERAND (arg, 0);
7118 if (TREE_CODE (op) == STRING_CST
7119 || (TREE_CODE (op) == ARRAY_REF
7120 && integer_zerop (TREE_OPERAND (op, 1))
7121 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7122 return integer_one_node;
7125 /* If this expression has side effects, show we don't know it to be a
7126 constant. Likewise if it's a pointer or aggregate type since in
7127 those case we only want literals, since those are only optimized
7128 when generating RTL, not later.
7129 And finally, if we are compiling an initializer, not code, we
7130 need to return a definite result now; there's not going to be any
7131 more optimization done. */
7132 if (TREE_SIDE_EFFECTS (arg)
7133 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7134 || POINTER_TYPE_P (TREE_TYPE (arg))
7135 || cfun == 0
7136 || folding_initializer)
7137 return integer_zero_node;
7139 return NULL_TREE;
7142 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7143 return it as a truthvalue. */
7145 static tree
7146 build_builtin_expect_predicate (tree pred, tree expected)
7148 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7150 fn = built_in_decls[BUILT_IN_EXPECT];
7151 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7152 ret_type = TREE_TYPE (TREE_TYPE (fn));
7153 pred_type = TREE_VALUE (arg_types);
7154 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7156 pred = fold_convert (pred_type, pred);
7157 expected = fold_convert (expected_type, expected);
7158 call_expr = build_call_expr (fn, 2, pred, expected);
7160 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7161 build_int_cst (ret_type, 0));
7164 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7165 NULL_TREE if no simplification is possible. */
7167 static tree
7168 fold_builtin_expect (tree arg0, tree arg1)
7170 tree inner, fndecl;
7171 enum tree_code code;
7173 /* If this is a builtin_expect within a builtin_expect keep the
7174 inner one. See through a comparison against a constant. It
7175 might have been added to create a thruthvalue. */
7176 inner = arg0;
7177 if (COMPARISON_CLASS_P (inner)
7178 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7179 inner = TREE_OPERAND (inner, 0);
7181 if (TREE_CODE (inner) == CALL_EXPR
7182 && (fndecl = get_callee_fndecl (inner))
7183 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7184 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7185 return arg0;
7187 /* Distribute the expected value over short-circuiting operators.
7188 See through the cast from truthvalue_type_node to long. */
7189 inner = arg0;
7190 while (TREE_CODE (inner) == NOP_EXPR
7191 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7192 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7193 inner = TREE_OPERAND (inner, 0);
7195 code = TREE_CODE (inner);
7196 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7198 tree op0 = TREE_OPERAND (inner, 0);
7199 tree op1 = TREE_OPERAND (inner, 1);
7201 op0 = build_builtin_expect_predicate (op0, arg1);
7202 op1 = build_builtin_expect_predicate (op1, arg1);
7203 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7205 return fold_convert (TREE_TYPE (arg0), inner);
7208 /* If the argument isn't invariant then there's nothing else we can do. */
7209 if (!TREE_CONSTANT (arg0))
7210 return NULL_TREE;
7212 /* If we expect that a comparison against the argument will fold to
7213 a constant return the constant. In practice, this means a true
7214 constant or the address of a non-weak symbol. */
7215 inner = arg0;
7216 STRIP_NOPS (inner);
7217 if (TREE_CODE (inner) == ADDR_EXPR)
7221 inner = TREE_OPERAND (inner, 0);
7223 while (TREE_CODE (inner) == COMPONENT_REF
7224 || TREE_CODE (inner) == ARRAY_REF);
7225 if (DECL_P (inner) && DECL_WEAK (inner))
7226 return NULL_TREE;
7229 /* Otherwise, ARG0 already has the proper type for the return value. */
7230 return arg0;
7233 /* Fold a call to __builtin_classify_type with argument ARG. */
7235 static tree
7236 fold_builtin_classify_type (tree arg)
7238 if (arg == 0)
7239 return build_int_cst (NULL_TREE, no_type_class);
7241 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7244 /* Fold a call to __builtin_strlen with argument ARG. */
7246 static tree
7247 fold_builtin_strlen (tree arg)
7249 if (!validate_arg (arg, POINTER_TYPE))
7250 return NULL_TREE;
7251 else
7253 tree len = c_strlen (arg, 0);
7255 if (len)
7257 /* Convert from the internal "sizetype" type to "size_t". */
7258 if (size_type_node)
7259 len = fold_convert (size_type_node, len);
7260 return len;
7263 return NULL_TREE;
7267 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7269 static tree
7270 fold_builtin_inf (tree type, int warn)
7272 REAL_VALUE_TYPE real;
7274 /* __builtin_inff is intended to be usable to define INFINITY on all
7275 targets. If an infinity is not available, INFINITY expands "to a
7276 positive constant of type float that overflows at translation
7277 time", footnote "In this case, using INFINITY will violate the
7278 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7279 Thus we pedwarn to ensure this constraint violation is
7280 diagnosed. */
7281 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7282 pedwarn (input_location, 0, "target format does not support infinity");
7284 real_inf (&real);
7285 return build_real (type, real);
7288 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7290 static tree
7291 fold_builtin_nan (tree arg, tree type, int quiet)
7293 REAL_VALUE_TYPE real;
7294 const char *str;
7296 if (!validate_arg (arg, POINTER_TYPE))
7297 return NULL_TREE;
7298 str = c_getstr (arg);
7299 if (!str)
7300 return NULL_TREE;
7302 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7303 return NULL_TREE;
7305 return build_real (type, real);
7308 /* Return true if the floating point expression T has an integer value.
7309 We also allow +Inf, -Inf and NaN to be considered integer values. */
7311 static bool
7312 integer_valued_real_p (tree t)
7314 switch (TREE_CODE (t))
7316 case FLOAT_EXPR:
7317 return true;
7319 case ABS_EXPR:
7320 case SAVE_EXPR:
7321 return integer_valued_real_p (TREE_OPERAND (t, 0));
7323 case COMPOUND_EXPR:
7324 case MODIFY_EXPR:
7325 case BIND_EXPR:
7326 return integer_valued_real_p (TREE_OPERAND (t, 1));
7328 case PLUS_EXPR:
7329 case MINUS_EXPR:
7330 case MULT_EXPR:
7331 case MIN_EXPR:
7332 case MAX_EXPR:
7333 return integer_valued_real_p (TREE_OPERAND (t, 0))
7334 && integer_valued_real_p (TREE_OPERAND (t, 1));
7336 case COND_EXPR:
7337 return integer_valued_real_p (TREE_OPERAND (t, 1))
7338 && integer_valued_real_p (TREE_OPERAND (t, 2));
7340 case REAL_CST:
7341 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7343 case NOP_EXPR:
7345 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7346 if (TREE_CODE (type) == INTEGER_TYPE)
7347 return true;
7348 if (TREE_CODE (type) == REAL_TYPE)
7349 return integer_valued_real_p (TREE_OPERAND (t, 0));
7350 break;
7353 case CALL_EXPR:
7354 switch (builtin_mathfn_code (t))
7356 CASE_FLT_FN (BUILT_IN_CEIL):
7357 CASE_FLT_FN (BUILT_IN_FLOOR):
7358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7359 CASE_FLT_FN (BUILT_IN_RINT):
7360 CASE_FLT_FN (BUILT_IN_ROUND):
7361 CASE_FLT_FN (BUILT_IN_TRUNC):
7362 return true;
7364 CASE_FLT_FN (BUILT_IN_FMIN):
7365 CASE_FLT_FN (BUILT_IN_FMAX):
7366 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7367 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7369 default:
7370 break;
7372 break;
7374 default:
7375 break;
7377 return false;
7380 /* FNDECL is assumed to be a builtin where truncation can be propagated
7381 across (for instance floor((double)f) == (double)floorf (f).
7382 Do the transformation for a call with argument ARG. */
7384 static tree
7385 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7387 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7389 if (!validate_arg (arg, REAL_TYPE))
7390 return NULL_TREE;
7392 /* Integer rounding functions are idempotent. */
7393 if (fcode == builtin_mathfn_code (arg))
7394 return arg;
7396 /* If argument is already integer valued, and we don't need to worry
7397 about setting errno, there's no need to perform rounding. */
7398 if (! flag_errno_math && integer_valued_real_p (arg))
7399 return arg;
7401 if (optimize)
7403 tree arg0 = strip_float_extensions (arg);
7404 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7405 tree newtype = TREE_TYPE (arg0);
7406 tree decl;
7408 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7409 && (decl = mathfn_built_in (newtype, fcode)))
7410 return fold_convert (ftype,
7411 build_call_expr (decl, 1,
7412 fold_convert (newtype, arg0)));
7414 return NULL_TREE;
7417 /* FNDECL is assumed to be builtin which can narrow the FP type of
7418 the argument, for instance lround((double)f) -> lroundf (f).
7419 Do the transformation for a call with argument ARG. */
7421 static tree
7422 fold_fixed_mathfn (tree fndecl, tree arg)
7424 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7426 if (!validate_arg (arg, REAL_TYPE))
7427 return NULL_TREE;
7429 /* If argument is already integer valued, and we don't need to worry
7430 about setting errno, there's no need to perform rounding. */
7431 if (! flag_errno_math && integer_valued_real_p (arg))
7432 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7434 if (optimize)
7436 tree ftype = TREE_TYPE (arg);
7437 tree arg0 = strip_float_extensions (arg);
7438 tree newtype = TREE_TYPE (arg0);
7439 tree decl;
7441 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7442 && (decl = mathfn_built_in (newtype, fcode)))
7443 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7446 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7447 sizeof (long long) == sizeof (long). */
7448 if (TYPE_PRECISION (long_long_integer_type_node)
7449 == TYPE_PRECISION (long_integer_type_node))
7451 tree newfn = NULL_TREE;
7452 switch (fcode)
7454 CASE_FLT_FN (BUILT_IN_LLCEIL):
7455 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7456 break;
7458 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7459 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7460 break;
7462 CASE_FLT_FN (BUILT_IN_LLROUND):
7463 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7464 break;
7466 CASE_FLT_FN (BUILT_IN_LLRINT):
7467 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7468 break;
7470 default:
7471 break;
7474 if (newfn)
7476 tree newcall = build_call_expr(newfn, 1, arg);
7477 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7481 return NULL_TREE;
7484 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7485 return type. Return NULL_TREE if no simplification can be made. */
7487 static tree
7488 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7490 tree res;
7492 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7493 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7494 return NULL_TREE;
7496 /* Calculate the result when the argument is a constant. */
7497 if (TREE_CODE (arg) == COMPLEX_CST
7498 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7499 type, mpfr_hypot)))
7500 return res;
7502 if (TREE_CODE (arg) == COMPLEX_EXPR)
7504 tree real = TREE_OPERAND (arg, 0);
7505 tree imag = TREE_OPERAND (arg, 1);
7507 /* If either part is zero, cabs is fabs of the other. */
7508 if (real_zerop (real))
7509 return fold_build1 (ABS_EXPR, type, imag);
7510 if (real_zerop (imag))
7511 return fold_build1 (ABS_EXPR, type, real);
7513 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7514 if (flag_unsafe_math_optimizations
7515 && operand_equal_p (real, imag, OEP_PURE_SAME))
7517 const REAL_VALUE_TYPE sqrt2_trunc
7518 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7519 STRIP_NOPS (real);
7520 return fold_build2 (MULT_EXPR, type,
7521 fold_build1 (ABS_EXPR, type, real),
7522 build_real (type, sqrt2_trunc));
7526 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7527 if (TREE_CODE (arg) == NEGATE_EXPR
7528 || TREE_CODE (arg) == CONJ_EXPR)
7529 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7531 /* Don't do this when optimizing for size. */
7532 if (flag_unsafe_math_optimizations
7533 && optimize && optimize_function_for_speed_p (cfun))
7535 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7537 if (sqrtfn != NULL_TREE)
7539 tree rpart, ipart, result;
7541 arg = builtin_save_expr (arg);
7543 rpart = fold_build1 (REALPART_EXPR, type, arg);
7544 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7546 rpart = builtin_save_expr (rpart);
7547 ipart = builtin_save_expr (ipart);
7549 result = fold_build2 (PLUS_EXPR, type,
7550 fold_build2 (MULT_EXPR, type,
7551 rpart, rpart),
7552 fold_build2 (MULT_EXPR, type,
7553 ipart, ipart));
7555 return build_call_expr (sqrtfn, 1, result);
7559 return NULL_TREE;
7562 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7563 Return NULL_TREE if no simplification can be made. */
7565 static tree
7566 fold_builtin_sqrt (tree arg, tree type)
7569 enum built_in_function fcode;
7570 tree res;
7572 if (!validate_arg (arg, REAL_TYPE))
7573 return NULL_TREE;
7575 /* Calculate the result when the argument is a constant. */
7576 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7577 return res;
7579 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7580 fcode = builtin_mathfn_code (arg);
7581 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7583 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7584 arg = fold_build2 (MULT_EXPR, type,
7585 CALL_EXPR_ARG (arg, 0),
7586 build_real (type, dconsthalf));
7587 return build_call_expr (expfn, 1, arg);
7590 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7591 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7593 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7595 if (powfn)
7597 tree arg0 = CALL_EXPR_ARG (arg, 0);
7598 tree tree_root;
7599 /* The inner root was either sqrt or cbrt. */
7600 REAL_VALUE_TYPE dconstroot =
7601 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconst_third ();
7603 /* Adjust for the outer root. */
7604 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7605 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7606 tree_root = build_real (type, dconstroot);
7607 return build_call_expr (powfn, 2, arg0, tree_root);
7611 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7612 if (flag_unsafe_math_optimizations
7613 && (fcode == BUILT_IN_POW
7614 || fcode == BUILT_IN_POWF
7615 || fcode == BUILT_IN_POWL))
7617 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7618 tree arg0 = CALL_EXPR_ARG (arg, 0);
7619 tree arg1 = CALL_EXPR_ARG (arg, 1);
7620 tree narg1;
7621 if (!tree_expr_nonnegative_p (arg0))
7622 arg0 = build1 (ABS_EXPR, type, arg0);
7623 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7624 build_real (type, dconsthalf));
7625 return build_call_expr (powfn, 2, arg0, narg1);
7628 return NULL_TREE;
7631 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7632 Return NULL_TREE if no simplification can be made. */
7634 static tree
7635 fold_builtin_cbrt (tree arg, tree type)
7637 const enum built_in_function fcode = builtin_mathfn_code (arg);
7638 tree res;
7640 if (!validate_arg (arg, REAL_TYPE))
7641 return NULL_TREE;
7643 /* Calculate the result when the argument is a constant. */
7644 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7645 return res;
7647 if (flag_unsafe_math_optimizations)
7649 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7650 if (BUILTIN_EXPONENT_P (fcode))
7652 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7653 const REAL_VALUE_TYPE third_trunc =
7654 real_value_truncate (TYPE_MODE (type), dconst_third ());
7655 arg = fold_build2 (MULT_EXPR, type,
7656 CALL_EXPR_ARG (arg, 0),
7657 build_real (type, third_trunc));
7658 return build_call_expr (expfn, 1, arg);
7661 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7662 if (BUILTIN_SQRT_P (fcode))
7664 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7666 if (powfn)
7668 tree arg0 = CALL_EXPR_ARG (arg, 0);
7669 tree tree_root;
7670 REAL_VALUE_TYPE dconstroot = dconst_third ();
7672 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7673 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7674 tree_root = build_real (type, dconstroot);
7675 return build_call_expr (powfn, 2, arg0, tree_root);
7679 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7680 if (BUILTIN_CBRT_P (fcode))
7682 tree arg0 = CALL_EXPR_ARG (arg, 0);
7683 if (tree_expr_nonnegative_p (arg0))
7685 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7687 if (powfn)
7689 tree tree_root;
7690 REAL_VALUE_TYPE dconstroot;
7692 real_arithmetic (&dconstroot, MULT_EXPR,
7693 dconst_third_ptr (), dconst_third_ptr ());
7694 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7695 tree_root = build_real (type, dconstroot);
7696 return build_call_expr (powfn, 2, arg0, tree_root);
7701 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7702 if (fcode == BUILT_IN_POW
7703 || fcode == BUILT_IN_POWF
7704 || fcode == BUILT_IN_POWL)
7706 tree arg00 = CALL_EXPR_ARG (arg, 0);
7707 tree arg01 = CALL_EXPR_ARG (arg, 1);
7708 if (tree_expr_nonnegative_p (arg00))
7710 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7711 const REAL_VALUE_TYPE dconstroot
7712 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7713 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7714 build_real (type, dconstroot));
7715 return build_call_expr (powfn, 2, arg00, narg01);
7719 return NULL_TREE;
7722 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7723 TYPE is the type of the return value. Return NULL_TREE if no
7724 simplification can be made. */
7726 static tree
7727 fold_builtin_cos (tree arg, tree type, tree fndecl)
7729 tree res, narg;
7731 if (!validate_arg (arg, REAL_TYPE))
7732 return NULL_TREE;
7734 /* Calculate the result when the argument is a constant. */
7735 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7736 return res;
7738 /* Optimize cos(-x) into cos (x). */
7739 if ((narg = fold_strip_sign_ops (arg)))
7740 return build_call_expr (fndecl, 1, narg);
7742 return NULL_TREE;
7745 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7746 Return NULL_TREE if no simplification can be made. */
7748 static tree
7749 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7751 if (validate_arg (arg, REAL_TYPE))
7753 tree res, narg;
7755 /* Calculate the result when the argument is a constant. */
7756 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7757 return res;
7759 /* Optimize cosh(-x) into cosh (x). */
7760 if ((narg = fold_strip_sign_ops (arg)))
7761 return build_call_expr (fndecl, 1, narg);
7764 return NULL_TREE;
7767 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7768 Return NULL_TREE if no simplification can be made. */
7770 static tree
7771 fold_builtin_tan (tree arg, tree type)
7773 enum built_in_function fcode;
7774 tree res;
7776 if (!validate_arg (arg, REAL_TYPE))
7777 return NULL_TREE;
7779 /* Calculate the result when the argument is a constant. */
7780 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7781 return res;
7783 /* Optimize tan(atan(x)) = x. */
7784 fcode = builtin_mathfn_code (arg);
7785 if (flag_unsafe_math_optimizations
7786 && (fcode == BUILT_IN_ATAN
7787 || fcode == BUILT_IN_ATANF
7788 || fcode == BUILT_IN_ATANL))
7789 return CALL_EXPR_ARG (arg, 0);
7791 return NULL_TREE;
7794 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7795 NULL_TREE if no simplification can be made. */
7797 static tree
7798 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7800 tree type;
7801 tree res, fn, call;
7803 if (!validate_arg (arg0, REAL_TYPE)
7804 || !validate_arg (arg1, POINTER_TYPE)
7805 || !validate_arg (arg2, POINTER_TYPE))
7806 return NULL_TREE;
7808 type = TREE_TYPE (arg0);
7810 /* Calculate the result when the argument is a constant. */
7811 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7812 return res;
7814 /* Canonicalize sincos to cexpi. */
7815 if (!TARGET_C99_FUNCTIONS)
7816 return NULL_TREE;
7817 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7818 if (!fn)
7819 return NULL_TREE;
7821 call = build_call_expr (fn, 1, arg0);
7822 call = builtin_save_expr (call);
7824 return build2 (COMPOUND_EXPR, type,
7825 build2 (MODIFY_EXPR, void_type_node,
7826 build_fold_indirect_ref (arg1),
7827 build1 (IMAGPART_EXPR, type, call)),
7828 build2 (MODIFY_EXPR, void_type_node,
7829 build_fold_indirect_ref (arg2),
7830 build1 (REALPART_EXPR, type, call)));
7833 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7834 NULL_TREE if no simplification can be made. */
7836 static tree
7837 fold_builtin_cexp (tree arg0, tree type)
7839 tree rtype;
7840 tree realp, imagp, ifn;
7842 if (!validate_arg (arg0, COMPLEX_TYPE))
7843 return NULL_TREE;
7845 rtype = TREE_TYPE (TREE_TYPE (arg0));
7847 /* In case we can figure out the real part of arg0 and it is constant zero
7848 fold to cexpi. */
7849 if (!TARGET_C99_FUNCTIONS)
7850 return NULL_TREE;
7851 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7852 if (!ifn)
7853 return NULL_TREE;
7855 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7856 && real_zerop (realp))
7858 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7859 return build_call_expr (ifn, 1, narg);
7862 /* In case we can easily decompose real and imaginary parts split cexp
7863 to exp (r) * cexpi (i). */
7864 if (flag_unsafe_math_optimizations
7865 && realp)
7867 tree rfn, rcall, icall;
7869 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7870 if (!rfn)
7871 return NULL_TREE;
7873 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7874 if (!imagp)
7875 return NULL_TREE;
7877 icall = build_call_expr (ifn, 1, imagp);
7878 icall = builtin_save_expr (icall);
7879 rcall = build_call_expr (rfn, 1, realp);
7880 rcall = builtin_save_expr (rcall);
7881 return fold_build2 (COMPLEX_EXPR, type,
7882 fold_build2 (MULT_EXPR, rtype,
7883 rcall,
7884 fold_build1 (REALPART_EXPR, rtype, icall)),
7885 fold_build2 (MULT_EXPR, rtype,
7886 rcall,
7887 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7890 return NULL_TREE;
7893 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7894 Return NULL_TREE if no simplification can be made. */
7896 static tree
7897 fold_builtin_trunc (tree fndecl, tree arg)
7899 if (!validate_arg (arg, REAL_TYPE))
7900 return NULL_TREE;
7902 /* Optimize trunc of constant value. */
7903 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7905 REAL_VALUE_TYPE r, x;
7906 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7908 x = TREE_REAL_CST (arg);
7909 real_trunc (&r, TYPE_MODE (type), &x);
7910 return build_real (type, r);
7913 return fold_trunc_transparent_mathfn (fndecl, arg);
7916 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7917 Return NULL_TREE if no simplification can be made. */
7919 static tree
7920 fold_builtin_floor (tree fndecl, tree arg)
7922 if (!validate_arg (arg, REAL_TYPE))
7923 return NULL_TREE;
7925 /* Optimize floor of constant value. */
7926 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7928 REAL_VALUE_TYPE x;
7930 x = TREE_REAL_CST (arg);
7931 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7933 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7934 REAL_VALUE_TYPE r;
7936 real_floor (&r, TYPE_MODE (type), &x);
7937 return build_real (type, r);
7941 /* Fold floor (x) where x is nonnegative to trunc (x). */
7942 if (tree_expr_nonnegative_p (arg))
7944 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7945 if (truncfn)
7946 return build_call_expr (truncfn, 1, arg);
7949 return fold_trunc_transparent_mathfn (fndecl, arg);
7952 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7953 Return NULL_TREE if no simplification can be made. */
7955 static tree
7956 fold_builtin_ceil (tree fndecl, tree arg)
7958 if (!validate_arg (arg, REAL_TYPE))
7959 return NULL_TREE;
7961 /* Optimize ceil of constant value. */
7962 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7964 REAL_VALUE_TYPE x;
7966 x = TREE_REAL_CST (arg);
7967 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7969 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7970 REAL_VALUE_TYPE r;
7972 real_ceil (&r, TYPE_MODE (type), &x);
7973 return build_real (type, r);
7977 return fold_trunc_transparent_mathfn (fndecl, arg);
7980 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7981 Return NULL_TREE if no simplification can be made. */
7983 static tree
7984 fold_builtin_round (tree fndecl, tree arg)
7986 if (!validate_arg (arg, REAL_TYPE))
7987 return NULL_TREE;
7989 /* Optimize round of constant value. */
7990 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7992 REAL_VALUE_TYPE x;
7994 x = TREE_REAL_CST (arg);
7995 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7997 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7998 REAL_VALUE_TYPE r;
8000 real_round (&r, TYPE_MODE (type), &x);
8001 return build_real (type, r);
8005 return fold_trunc_transparent_mathfn (fndecl, arg);
8008 /* Fold function call to builtin lround, lroundf or lroundl (or the
8009 corresponding long long versions) and other rounding functions. ARG
8010 is the argument to the call. Return NULL_TREE if no simplification
8011 can be made. */
8013 static tree
8014 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8016 if (!validate_arg (arg, REAL_TYPE))
8017 return NULL_TREE;
8019 /* Optimize lround of constant value. */
8020 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8022 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8024 if (real_isfinite (&x))
8026 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8027 tree ftype = TREE_TYPE (arg);
8028 unsigned HOST_WIDE_INT lo2;
8029 HOST_WIDE_INT hi, lo;
8030 REAL_VALUE_TYPE r;
8032 switch (DECL_FUNCTION_CODE (fndecl))
8034 CASE_FLT_FN (BUILT_IN_LFLOOR):
8035 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8036 real_floor (&r, TYPE_MODE (ftype), &x);
8037 break;
8039 CASE_FLT_FN (BUILT_IN_LCEIL):
8040 CASE_FLT_FN (BUILT_IN_LLCEIL):
8041 real_ceil (&r, TYPE_MODE (ftype), &x);
8042 break;
8044 CASE_FLT_FN (BUILT_IN_LROUND):
8045 CASE_FLT_FN (BUILT_IN_LLROUND):
8046 real_round (&r, TYPE_MODE (ftype), &x);
8047 break;
8049 default:
8050 gcc_unreachable ();
8053 REAL_VALUE_TO_INT (&lo, &hi, r);
8054 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8055 return build_int_cst_wide (itype, lo2, hi);
8059 switch (DECL_FUNCTION_CODE (fndecl))
8061 CASE_FLT_FN (BUILT_IN_LFLOOR):
8062 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8063 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8064 if (tree_expr_nonnegative_p (arg))
8065 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8066 arg);
8067 break;
8068 default:;
8071 return fold_fixed_mathfn (fndecl, arg);
8074 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8075 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8076 the argument to the call. Return NULL_TREE if no simplification can
8077 be made. */
8079 static tree
8080 fold_builtin_bitop (tree fndecl, tree arg)
8082 if (!validate_arg (arg, INTEGER_TYPE))
8083 return NULL_TREE;
8085 /* Optimize for constant argument. */
8086 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8088 HOST_WIDE_INT hi, width, result;
8089 unsigned HOST_WIDE_INT lo;
8090 tree type;
8092 type = TREE_TYPE (arg);
8093 width = TYPE_PRECISION (type);
8094 lo = TREE_INT_CST_LOW (arg);
8096 /* Clear all the bits that are beyond the type's precision. */
8097 if (width > HOST_BITS_PER_WIDE_INT)
8099 hi = TREE_INT_CST_HIGH (arg);
8100 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8101 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8103 else
8105 hi = 0;
8106 if (width < HOST_BITS_PER_WIDE_INT)
8107 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8110 switch (DECL_FUNCTION_CODE (fndecl))
8112 CASE_INT_FN (BUILT_IN_FFS):
8113 if (lo != 0)
8114 result = exact_log2 (lo & -lo) + 1;
8115 else if (hi != 0)
8116 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8117 else
8118 result = 0;
8119 break;
8121 CASE_INT_FN (BUILT_IN_CLZ):
8122 if (hi != 0)
8123 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8124 else if (lo != 0)
8125 result = width - floor_log2 (lo) - 1;
8126 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8127 result = width;
8128 break;
8130 CASE_INT_FN (BUILT_IN_CTZ):
8131 if (lo != 0)
8132 result = exact_log2 (lo & -lo);
8133 else if (hi != 0)
8134 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8135 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8136 result = width;
8137 break;
8139 CASE_INT_FN (BUILT_IN_POPCOUNT):
8140 result = 0;
8141 while (lo)
8142 result++, lo &= lo - 1;
8143 while (hi)
8144 result++, hi &= hi - 1;
8145 break;
8147 CASE_INT_FN (BUILT_IN_PARITY):
8148 result = 0;
8149 while (lo)
8150 result++, lo &= lo - 1;
8151 while (hi)
8152 result++, hi &= hi - 1;
8153 result &= 1;
8154 break;
8156 default:
8157 gcc_unreachable ();
8160 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8163 return NULL_TREE;
8166 /* Fold function call to builtin_bswap and the long and long long
8167 variants. Return NULL_TREE if no simplification can be made. */
8168 static tree
8169 fold_builtin_bswap (tree fndecl, tree arg)
8171 if (! validate_arg (arg, INTEGER_TYPE))
8172 return NULL_TREE;
8174 /* Optimize constant value. */
8175 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8177 HOST_WIDE_INT hi, width, r_hi = 0;
8178 unsigned HOST_WIDE_INT lo, r_lo = 0;
8179 tree type;
8181 type = TREE_TYPE (arg);
8182 width = TYPE_PRECISION (type);
8183 lo = TREE_INT_CST_LOW (arg);
8184 hi = TREE_INT_CST_HIGH (arg);
8186 switch (DECL_FUNCTION_CODE (fndecl))
8188 case BUILT_IN_BSWAP32:
8189 case BUILT_IN_BSWAP64:
8191 int s;
8193 for (s = 0; s < width; s += 8)
8195 int d = width - s - 8;
8196 unsigned HOST_WIDE_INT byte;
8198 if (s < HOST_BITS_PER_WIDE_INT)
8199 byte = (lo >> s) & 0xff;
8200 else
8201 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8203 if (d < HOST_BITS_PER_WIDE_INT)
8204 r_lo |= byte << d;
8205 else
8206 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8210 break;
8212 default:
8213 gcc_unreachable ();
8216 if (width < HOST_BITS_PER_WIDE_INT)
8217 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8218 else
8219 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8222 return NULL_TREE;
8225 /* Return true if EXPR is the real constant contained in VALUE. */
8227 static bool
8228 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8230 STRIP_NOPS (expr);
8232 return ((TREE_CODE (expr) == REAL_CST
8233 && !TREE_OVERFLOW (expr)
8234 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8235 || (TREE_CODE (expr) == COMPLEX_CST
8236 && real_dconstp (TREE_REALPART (expr), value)
8237 && real_zerop (TREE_IMAGPART (expr))));
8240 /* A subroutine of fold_builtin to fold the various logarithmic
8241 functions. Return NULL_TREE if no simplification can me made.
8242 FUNC is the corresponding MPFR logarithm function. */
8244 static tree
8245 fold_builtin_logarithm (tree fndecl, tree arg,
8246 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8248 if (validate_arg (arg, REAL_TYPE))
8250 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8251 tree res;
8252 const enum built_in_function fcode = builtin_mathfn_code (arg);
8254 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8255 instead we'll look for 'e' truncated to MODE. So only do
8256 this if flag_unsafe_math_optimizations is set. */
8257 if (flag_unsafe_math_optimizations && func == mpfr_log)
8259 const REAL_VALUE_TYPE e_truncated =
8260 real_value_truncate (TYPE_MODE (type), dconst_e ());
8261 if (real_dconstp (arg, &e_truncated))
8262 return build_real (type, dconst1);
8265 /* Calculate the result when the argument is a constant. */
8266 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8267 return res;
8269 /* Special case, optimize logN(expN(x)) = x. */
8270 if (flag_unsafe_math_optimizations
8271 && ((func == mpfr_log
8272 && (fcode == BUILT_IN_EXP
8273 || fcode == BUILT_IN_EXPF
8274 || fcode == BUILT_IN_EXPL))
8275 || (func == mpfr_log2
8276 && (fcode == BUILT_IN_EXP2
8277 || fcode == BUILT_IN_EXP2F
8278 || fcode == BUILT_IN_EXP2L))
8279 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8280 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8282 /* Optimize logN(func()) for various exponential functions. We
8283 want to determine the value "x" and the power "exponent" in
8284 order to transform logN(x**exponent) into exponent*logN(x). */
8285 if (flag_unsafe_math_optimizations)
8287 tree exponent = 0, x = 0;
8289 switch (fcode)
8291 CASE_FLT_FN (BUILT_IN_EXP):
8292 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8293 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8294 dconst_e ()));
8295 exponent = CALL_EXPR_ARG (arg, 0);
8296 break;
8297 CASE_FLT_FN (BUILT_IN_EXP2):
8298 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8299 x = build_real (type, dconst2);
8300 exponent = CALL_EXPR_ARG (arg, 0);
8301 break;
8302 CASE_FLT_FN (BUILT_IN_EXP10):
8303 CASE_FLT_FN (BUILT_IN_POW10):
8304 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8306 REAL_VALUE_TYPE dconst10;
8307 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8308 x = build_real (type, dconst10);
8310 exponent = CALL_EXPR_ARG (arg, 0);
8311 break;
8312 CASE_FLT_FN (BUILT_IN_SQRT):
8313 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8314 x = CALL_EXPR_ARG (arg, 0);
8315 exponent = build_real (type, dconsthalf);
8316 break;
8317 CASE_FLT_FN (BUILT_IN_CBRT):
8318 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8319 x = CALL_EXPR_ARG (arg, 0);
8320 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8321 dconst_third ()));
8322 break;
8323 CASE_FLT_FN (BUILT_IN_POW):
8324 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8325 x = CALL_EXPR_ARG (arg, 0);
8326 exponent = CALL_EXPR_ARG (arg, 1);
8327 break;
8328 default:
8329 break;
8332 /* Now perform the optimization. */
8333 if (x && exponent)
8335 tree logfn = build_call_expr (fndecl, 1, x);
8336 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8341 return NULL_TREE;
8344 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8345 NULL_TREE if no simplification can be made. */
8347 static tree
8348 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8350 tree res, narg0, narg1;
8352 if (!validate_arg (arg0, REAL_TYPE)
8353 || !validate_arg (arg1, REAL_TYPE))
8354 return NULL_TREE;
8356 /* Calculate the result when the argument is a constant. */
8357 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8358 return res;
8360 /* If either argument to hypot has a negate or abs, strip that off.
8361 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8362 narg0 = fold_strip_sign_ops (arg0);
8363 narg1 = fold_strip_sign_ops (arg1);
8364 if (narg0 || narg1)
8366 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8367 narg1 ? narg1 : arg1);
8370 /* If either argument is zero, hypot is fabs of the other. */
8371 if (real_zerop (arg0))
8372 return fold_build1 (ABS_EXPR, type, arg1);
8373 else if (real_zerop (arg1))
8374 return fold_build1 (ABS_EXPR, type, arg0);
8376 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8377 if (flag_unsafe_math_optimizations
8378 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8380 const REAL_VALUE_TYPE sqrt2_trunc
8381 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8382 return fold_build2 (MULT_EXPR, type,
8383 fold_build1 (ABS_EXPR, type, arg0),
8384 build_real (type, sqrt2_trunc));
8387 return NULL_TREE;
8391 /* Fold a builtin function call to pow, powf, or powl. Return
8392 NULL_TREE if no simplification can be made. */
8393 static tree
8394 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8396 tree res;
8398 if (!validate_arg (arg0, REAL_TYPE)
8399 || !validate_arg (arg1, REAL_TYPE))
8400 return NULL_TREE;
8402 /* Calculate the result when the argument is a constant. */
8403 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8404 return res;
8406 /* Optimize pow(1.0,y) = 1.0. */
8407 if (real_onep (arg0))
8408 return omit_one_operand (type, build_real (type, dconst1), arg1);
8410 if (TREE_CODE (arg1) == REAL_CST
8411 && !TREE_OVERFLOW (arg1))
8413 REAL_VALUE_TYPE cint;
8414 REAL_VALUE_TYPE c;
8415 HOST_WIDE_INT n;
8417 c = TREE_REAL_CST (arg1);
8419 /* Optimize pow(x,0.0) = 1.0. */
8420 if (REAL_VALUES_EQUAL (c, dconst0))
8421 return omit_one_operand (type, build_real (type, dconst1),
8422 arg0);
8424 /* Optimize pow(x,1.0) = x. */
8425 if (REAL_VALUES_EQUAL (c, dconst1))
8426 return arg0;
8428 /* Optimize pow(x,-1.0) = 1.0/x. */
8429 if (REAL_VALUES_EQUAL (c, dconstm1))
8430 return fold_build2 (RDIV_EXPR, type,
8431 build_real (type, dconst1), arg0);
8433 /* Optimize pow(x,0.5) = sqrt(x). */
8434 if (flag_unsafe_math_optimizations
8435 && REAL_VALUES_EQUAL (c, dconsthalf))
8437 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8439 if (sqrtfn != NULL_TREE)
8440 return build_call_expr (sqrtfn, 1, arg0);
8443 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8444 if (flag_unsafe_math_optimizations)
8446 const REAL_VALUE_TYPE dconstroot
8447 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8449 if (REAL_VALUES_EQUAL (c, dconstroot))
8451 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8452 if (cbrtfn != NULL_TREE)
8453 return build_call_expr (cbrtfn, 1, arg0);
8457 /* Check for an integer exponent. */
8458 n = real_to_integer (&c);
8459 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8460 if (real_identical (&c, &cint))
8462 /* Attempt to evaluate pow at compile-time. */
8463 if (TREE_CODE (arg0) == REAL_CST
8464 && !TREE_OVERFLOW (arg0))
8466 REAL_VALUE_TYPE x;
8467 bool inexact;
8469 x = TREE_REAL_CST (arg0);
8470 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8471 if (flag_unsafe_math_optimizations || !inexact)
8472 return build_real (type, x);
8475 /* Strip sign ops from even integer powers. */
8476 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8478 tree narg0 = fold_strip_sign_ops (arg0);
8479 if (narg0)
8480 return build_call_expr (fndecl, 2, narg0, arg1);
8485 if (flag_unsafe_math_optimizations)
8487 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8489 /* Optimize pow(expN(x),y) = expN(x*y). */
8490 if (BUILTIN_EXPONENT_P (fcode))
8492 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8493 tree arg = CALL_EXPR_ARG (arg0, 0);
8494 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8495 return build_call_expr (expfn, 1, arg);
8498 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8499 if (BUILTIN_SQRT_P (fcode))
8501 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8502 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8503 build_real (type, dconsthalf));
8504 return build_call_expr (fndecl, 2, narg0, narg1);
8507 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8508 if (BUILTIN_CBRT_P (fcode))
8510 tree arg = CALL_EXPR_ARG (arg0, 0);
8511 if (tree_expr_nonnegative_p (arg))
8513 const REAL_VALUE_TYPE dconstroot
8514 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8515 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8516 build_real (type, dconstroot));
8517 return build_call_expr (fndecl, 2, arg, narg1);
8521 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8522 if (fcode == BUILT_IN_POW
8523 || fcode == BUILT_IN_POWF
8524 || fcode == BUILT_IN_POWL)
8526 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8527 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8528 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8529 return build_call_expr (fndecl, 2, arg00, narg1);
8533 return NULL_TREE;
8536 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8537 Return NULL_TREE if no simplification can be made. */
8538 static tree
8539 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8540 tree arg0, tree arg1, tree type)
8542 if (!validate_arg (arg0, REAL_TYPE)
8543 || !validate_arg (arg1, INTEGER_TYPE))
8544 return NULL_TREE;
8546 /* Optimize pow(1.0,y) = 1.0. */
8547 if (real_onep (arg0))
8548 return omit_one_operand (type, build_real (type, dconst1), arg1);
8550 if (host_integerp (arg1, 0))
8552 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8554 /* Evaluate powi at compile-time. */
8555 if (TREE_CODE (arg0) == REAL_CST
8556 && !TREE_OVERFLOW (arg0))
8558 REAL_VALUE_TYPE x;
8559 x = TREE_REAL_CST (arg0);
8560 real_powi (&x, TYPE_MODE (type), &x, c);
8561 return build_real (type, x);
8564 /* Optimize pow(x,0) = 1.0. */
8565 if (c == 0)
8566 return omit_one_operand (type, build_real (type, dconst1),
8567 arg0);
8569 /* Optimize pow(x,1) = x. */
8570 if (c == 1)
8571 return arg0;
8573 /* Optimize pow(x,-1) = 1.0/x. */
8574 if (c == -1)
8575 return fold_build2 (RDIV_EXPR, type,
8576 build_real (type, dconst1), arg0);
8579 return NULL_TREE;
8582 /* A subroutine of fold_builtin to fold the various exponent
8583 functions. Return NULL_TREE if no simplification can be made.
8584 FUNC is the corresponding MPFR exponent function. */
8586 static tree
8587 fold_builtin_exponent (tree fndecl, tree arg,
8588 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8590 if (validate_arg (arg, REAL_TYPE))
8592 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8593 tree res;
8595 /* Calculate the result when the argument is a constant. */
8596 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8597 return res;
8599 /* Optimize expN(logN(x)) = x. */
8600 if (flag_unsafe_math_optimizations)
8602 const enum built_in_function fcode = builtin_mathfn_code (arg);
8604 if ((func == mpfr_exp
8605 && (fcode == BUILT_IN_LOG
8606 || fcode == BUILT_IN_LOGF
8607 || fcode == BUILT_IN_LOGL))
8608 || (func == mpfr_exp2
8609 && (fcode == BUILT_IN_LOG2
8610 || fcode == BUILT_IN_LOG2F
8611 || fcode == BUILT_IN_LOG2L))
8612 || (func == mpfr_exp10
8613 && (fcode == BUILT_IN_LOG10
8614 || fcode == BUILT_IN_LOG10F
8615 || fcode == BUILT_IN_LOG10L)))
8616 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8620 return NULL_TREE;
8623 /* Return true if VAR is a VAR_DECL or a component thereof. */
8625 static bool
8626 var_decl_component_p (tree var)
8628 tree inner = var;
8629 while (handled_component_p (inner))
8630 inner = TREE_OPERAND (inner, 0);
8631 return SSA_VAR_P (inner);
8634 /* Fold function call to builtin memset. Return
8635 NULL_TREE if no simplification can be made. */
8637 static tree
8638 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8640 tree var, ret;
8641 unsigned HOST_WIDE_INT length, cval;
8643 if (! validate_arg (dest, POINTER_TYPE)
8644 || ! validate_arg (c, INTEGER_TYPE)
8645 || ! validate_arg (len, INTEGER_TYPE))
8646 return NULL_TREE;
8648 if (! host_integerp (len, 1))
8649 return NULL_TREE;
8651 /* If the LEN parameter is zero, return DEST. */
8652 if (integer_zerop (len))
8653 return omit_one_operand (type, dest, c);
8655 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8656 return NULL_TREE;
8658 var = dest;
8659 STRIP_NOPS (var);
8660 if (TREE_CODE (var) != ADDR_EXPR)
8661 return NULL_TREE;
8663 var = TREE_OPERAND (var, 0);
8664 if (TREE_THIS_VOLATILE (var))
8665 return NULL_TREE;
8667 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8668 && !POINTER_TYPE_P (TREE_TYPE (var)))
8669 return NULL_TREE;
8671 if (! var_decl_component_p (var))
8672 return NULL_TREE;
8674 length = tree_low_cst (len, 1);
8675 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8676 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8677 < (int) length)
8678 return NULL_TREE;
8680 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8681 return NULL_TREE;
8683 if (integer_zerop (c))
8684 cval = 0;
8685 else
8687 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8688 return NULL_TREE;
8690 cval = tree_low_cst (c, 1);
8691 cval &= 0xff;
8692 cval |= cval << 8;
8693 cval |= cval << 16;
8694 cval |= (cval << 31) << 1;
8697 ret = build_int_cst_type (TREE_TYPE (var), cval);
8698 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8699 if (ignore)
8700 return ret;
8702 return omit_one_operand (type, dest, ret);
8705 /* Fold function call to builtin memset. Return
8706 NULL_TREE if no simplification can be made. */
8708 static tree
8709 fold_builtin_bzero (tree dest, tree size, bool ignore)
8711 if (! validate_arg (dest, POINTER_TYPE)
8712 || ! validate_arg (size, INTEGER_TYPE))
8713 return NULL_TREE;
8715 if (!ignore)
8716 return NULL_TREE;
8718 /* New argument list transforming bzero(ptr x, int y) to
8719 memset(ptr x, int 0, size_t y). This is done this way
8720 so that if it isn't expanded inline, we fallback to
8721 calling bzero instead of memset. */
8723 return fold_builtin_memset (dest, integer_zero_node,
8724 fold_convert (sizetype, size),
8725 void_type_node, ignore);
8728 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8729 NULL_TREE if no simplification can be made.
8730 If ENDP is 0, return DEST (like memcpy).
8731 If ENDP is 1, return DEST+LEN (like mempcpy).
8732 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8733 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8734 (memmove). */
8736 static tree
8737 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8739 tree destvar, srcvar, expr;
8741 if (! validate_arg (dest, POINTER_TYPE)
8742 || ! validate_arg (src, POINTER_TYPE)
8743 || ! validate_arg (len, INTEGER_TYPE))
8744 return NULL_TREE;
8746 /* If the LEN parameter is zero, return DEST. */
8747 if (integer_zerop (len))
8748 return omit_one_operand (type, dest, src);
8750 /* If SRC and DEST are the same (and not volatile), return
8751 DEST{,+LEN,+LEN-1}. */
8752 if (operand_equal_p (src, dest, 0))
8753 expr = len;
8754 else
8756 tree srctype, desttype;
8757 if (endp == 3)
8759 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8760 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8762 /* Both DEST and SRC must be pointer types.
8763 ??? This is what old code did. Is the testing for pointer types
8764 really mandatory?
8766 If either SRC is readonly or length is 1, we can use memcpy. */
8767 if (dest_align && src_align
8768 && (readonly_data_expr (src)
8769 || (host_integerp (len, 1)
8770 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8771 tree_low_cst (len, 1)))))
8773 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8774 if (!fn)
8775 return NULL_TREE;
8776 return build_call_expr (fn, 3, dest, src, len);
8778 return NULL_TREE;
8781 if (!host_integerp (len, 0))
8782 return NULL_TREE;
8783 /* FIXME:
8784 This logic lose for arguments like (type *)malloc (sizeof (type)),
8785 since we strip the casts of up to VOID return value from malloc.
8786 Perhaps we ought to inherit type from non-VOID argument here? */
8787 STRIP_NOPS (src);
8788 STRIP_NOPS (dest);
8789 srctype = TREE_TYPE (TREE_TYPE (src));
8790 desttype = TREE_TYPE (TREE_TYPE (dest));
8791 if (!srctype || !desttype
8792 || !TYPE_SIZE_UNIT (srctype)
8793 || !TYPE_SIZE_UNIT (desttype)
8794 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8795 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8796 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8797 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8798 return NULL_TREE;
8800 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8801 < (int) TYPE_ALIGN (desttype)
8802 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8803 < (int) TYPE_ALIGN (srctype)))
8804 return NULL_TREE;
8806 if (!ignore)
8807 dest = builtin_save_expr (dest);
8809 srcvar = build_fold_indirect_ref (src);
8810 if (TREE_THIS_VOLATILE (srcvar))
8811 return NULL_TREE;
8812 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8813 return NULL_TREE;
8814 /* With memcpy, it is possible to bypass aliasing rules, so without
8815 this check i.e. execute/20060930-2.c would be misoptimized, because
8816 it use conflicting alias set to hold argument for the memcpy call.
8817 This check is probably unnecessary with -fno-strict-aliasing.
8818 Similarly for destvar. See also PR29286. */
8819 if (!var_decl_component_p (srcvar)
8820 /* Accept: memcpy (*char_var, "test", 1); that simplify
8821 to char_var='t'; */
8822 || is_gimple_min_invariant (srcvar)
8823 || readonly_data_expr (src))
8824 return NULL_TREE;
8826 destvar = build_fold_indirect_ref (dest);
8827 if (TREE_THIS_VOLATILE (destvar))
8828 return NULL_TREE;
8829 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8830 return NULL_TREE;
8831 if (!var_decl_component_p (destvar))
8832 return NULL_TREE;
8834 if (srctype == desttype
8835 || (gimple_in_ssa_p (cfun)
8836 && useless_type_conversion_p (desttype, srctype)))
8837 expr = srcvar;
8838 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8839 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8840 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8841 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8842 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8843 else
8844 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8845 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8848 if (ignore)
8849 return expr;
8851 if (endp == 0 || endp == 3)
8852 return omit_one_operand (type, dest, expr);
8854 if (expr == len)
8855 expr = NULL_TREE;
8857 if (endp == 2)
8858 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8859 ssize_int (1));
8861 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8862 dest = fold_convert (type, dest);
8863 if (expr)
8864 dest = omit_one_operand (type, dest, expr);
8865 return dest;
8868 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8869 If LEN is not NULL, it represents the length of the string to be
8870 copied. Return NULL_TREE if no simplification can be made. */
8872 tree
8873 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8875 tree fn;
8877 if (!validate_arg (dest, POINTER_TYPE)
8878 || !validate_arg (src, POINTER_TYPE))
8879 return NULL_TREE;
8881 /* If SRC and DEST are the same (and not volatile), return DEST. */
8882 if (operand_equal_p (src, dest, 0))
8883 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8885 if (optimize_function_for_size_p (cfun))
8886 return NULL_TREE;
8888 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8889 if (!fn)
8890 return NULL_TREE;
8892 if (!len)
8894 len = c_strlen (src, 1);
8895 if (! len || TREE_SIDE_EFFECTS (len))
8896 return NULL_TREE;
8899 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8900 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8901 build_call_expr (fn, 3, dest, src, len));
8904 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8905 If SLEN is not NULL, it represents the length of the source string.
8906 Return NULL_TREE if no simplification can be made. */
8908 tree
8909 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8911 tree fn;
8913 if (!validate_arg (dest, POINTER_TYPE)
8914 || !validate_arg (src, POINTER_TYPE)
8915 || !validate_arg (len, INTEGER_TYPE))
8916 return NULL_TREE;
8918 /* If the LEN parameter is zero, return DEST. */
8919 if (integer_zerop (len))
8920 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8922 /* We can't compare slen with len as constants below if len is not a
8923 constant. */
8924 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8925 return NULL_TREE;
8927 if (!slen)
8928 slen = c_strlen (src, 1);
8930 /* Now, we must be passed a constant src ptr parameter. */
8931 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8932 return NULL_TREE;
8934 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8936 /* We do not support simplification of this case, though we do
8937 support it when expanding trees into RTL. */
8938 /* FIXME: generate a call to __builtin_memset. */
8939 if (tree_int_cst_lt (slen, len))
8940 return NULL_TREE;
8942 /* OK transform into builtin memcpy. */
8943 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8944 if (!fn)
8945 return NULL_TREE;
8946 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8947 build_call_expr (fn, 3, dest, src, len));
8950 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8951 arguments to the call, and TYPE is its return type.
8952 Return NULL_TREE if no simplification can be made. */
8954 static tree
8955 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8957 if (!validate_arg (arg1, POINTER_TYPE)
8958 || !validate_arg (arg2, INTEGER_TYPE)
8959 || !validate_arg (len, INTEGER_TYPE))
8960 return NULL_TREE;
8961 else
8963 const char *p1;
8965 if (TREE_CODE (arg2) != INTEGER_CST
8966 || !host_integerp (len, 1))
8967 return NULL_TREE;
8969 p1 = c_getstr (arg1);
8970 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8972 char c;
8973 const char *r;
8974 tree tem;
8976 if (target_char_cast (arg2, &c))
8977 return NULL_TREE;
8979 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8981 if (r == NULL)
8982 return build_int_cst (TREE_TYPE (arg1), 0);
8984 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8985 size_int (r - p1));
8986 return fold_convert (type, tem);
8988 return NULL_TREE;
8992 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8993 Return NULL_TREE if no simplification can be made. */
8995 static tree
8996 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8998 const char *p1, *p2;
9000 if (!validate_arg (arg1, POINTER_TYPE)
9001 || !validate_arg (arg2, POINTER_TYPE)
9002 || !validate_arg (len, INTEGER_TYPE))
9003 return NULL_TREE;
9005 /* If the LEN parameter is zero, return zero. */
9006 if (integer_zerop (len))
9007 return omit_two_operands (integer_type_node, integer_zero_node,
9008 arg1, arg2);
9010 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9011 if (operand_equal_p (arg1, arg2, 0))
9012 return omit_one_operand (integer_type_node, integer_zero_node, len);
9014 p1 = c_getstr (arg1);
9015 p2 = c_getstr (arg2);
9017 /* If all arguments are constant, and the value of len is not greater
9018 than the lengths of arg1 and arg2, evaluate at compile-time. */
9019 if (host_integerp (len, 1) && p1 && p2
9020 && compare_tree_int (len, strlen (p1) + 1) <= 0
9021 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9023 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9025 if (r > 0)
9026 return integer_one_node;
9027 else if (r < 0)
9028 return integer_minus_one_node;
9029 else
9030 return integer_zero_node;
9033 /* If len parameter is one, return an expression corresponding to
9034 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9035 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9037 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9038 tree cst_uchar_ptr_node
9039 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9041 tree ind1 = fold_convert (integer_type_node,
9042 build1 (INDIRECT_REF, cst_uchar_node,
9043 fold_convert (cst_uchar_ptr_node,
9044 arg1)));
9045 tree ind2 = fold_convert (integer_type_node,
9046 build1 (INDIRECT_REF, cst_uchar_node,
9047 fold_convert (cst_uchar_ptr_node,
9048 arg2)));
9049 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9052 return NULL_TREE;
9055 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9056 Return NULL_TREE if no simplification can be made. */
9058 static tree
9059 fold_builtin_strcmp (tree arg1, tree arg2)
9061 const char *p1, *p2;
9063 if (!validate_arg (arg1, POINTER_TYPE)
9064 || !validate_arg (arg2, POINTER_TYPE))
9065 return NULL_TREE;
9067 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9068 if (operand_equal_p (arg1, arg2, 0))
9069 return integer_zero_node;
9071 p1 = c_getstr (arg1);
9072 p2 = c_getstr (arg2);
9074 if (p1 && p2)
9076 const int i = strcmp (p1, p2);
9077 if (i < 0)
9078 return integer_minus_one_node;
9079 else if (i > 0)
9080 return integer_one_node;
9081 else
9082 return integer_zero_node;
9085 /* If the second arg is "", return *(const unsigned char*)arg1. */
9086 if (p2 && *p2 == '\0')
9088 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9089 tree cst_uchar_ptr_node
9090 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9092 return fold_convert (integer_type_node,
9093 build1 (INDIRECT_REF, cst_uchar_node,
9094 fold_convert (cst_uchar_ptr_node,
9095 arg1)));
9098 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9099 if (p1 && *p1 == '\0')
9101 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9102 tree cst_uchar_ptr_node
9103 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9105 tree temp = fold_convert (integer_type_node,
9106 build1 (INDIRECT_REF, cst_uchar_node,
9107 fold_convert (cst_uchar_ptr_node,
9108 arg2)));
9109 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9112 return NULL_TREE;
9115 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9116 Return NULL_TREE if no simplification can be made. */
9118 static tree
9119 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9121 const char *p1, *p2;
9123 if (!validate_arg (arg1, POINTER_TYPE)
9124 || !validate_arg (arg2, POINTER_TYPE)
9125 || !validate_arg (len, INTEGER_TYPE))
9126 return NULL_TREE;
9128 /* If the LEN parameter is zero, return zero. */
9129 if (integer_zerop (len))
9130 return omit_two_operands (integer_type_node, integer_zero_node,
9131 arg1, arg2);
9133 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9134 if (operand_equal_p (arg1, arg2, 0))
9135 return omit_one_operand (integer_type_node, integer_zero_node, len);
9137 p1 = c_getstr (arg1);
9138 p2 = c_getstr (arg2);
9140 if (host_integerp (len, 1) && p1 && p2)
9142 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9143 if (i > 0)
9144 return integer_one_node;
9145 else if (i < 0)
9146 return integer_minus_one_node;
9147 else
9148 return integer_zero_node;
9151 /* If the second arg is "", and the length is greater than zero,
9152 return *(const unsigned char*)arg1. */
9153 if (p2 && *p2 == '\0'
9154 && TREE_CODE (len) == INTEGER_CST
9155 && tree_int_cst_sgn (len) == 1)
9157 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9158 tree cst_uchar_ptr_node
9159 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9161 return fold_convert (integer_type_node,
9162 build1 (INDIRECT_REF, cst_uchar_node,
9163 fold_convert (cst_uchar_ptr_node,
9164 arg1)));
9167 /* If the first arg is "", and the length is greater than zero,
9168 return -*(const unsigned char*)arg2. */
9169 if (p1 && *p1 == '\0'
9170 && TREE_CODE (len) == INTEGER_CST
9171 && tree_int_cst_sgn (len) == 1)
9173 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9174 tree cst_uchar_ptr_node
9175 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9177 tree temp = fold_convert (integer_type_node,
9178 build1 (INDIRECT_REF, cst_uchar_node,
9179 fold_convert (cst_uchar_ptr_node,
9180 arg2)));
9181 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9184 /* If len parameter is one, return an expression corresponding to
9185 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9186 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9188 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9189 tree cst_uchar_ptr_node
9190 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9192 tree ind1 = fold_convert (integer_type_node,
9193 build1 (INDIRECT_REF, cst_uchar_node,
9194 fold_convert (cst_uchar_ptr_node,
9195 arg1)));
9196 tree ind2 = fold_convert (integer_type_node,
9197 build1 (INDIRECT_REF, cst_uchar_node,
9198 fold_convert (cst_uchar_ptr_node,
9199 arg2)));
9200 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9203 return NULL_TREE;
9206 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9207 ARG. Return NULL_TREE if no simplification can be made. */
9209 static tree
9210 fold_builtin_signbit (tree arg, tree type)
9212 tree temp;
9214 if (!validate_arg (arg, REAL_TYPE))
9215 return NULL_TREE;
9217 /* If ARG is a compile-time constant, determine the result. */
9218 if (TREE_CODE (arg) == REAL_CST
9219 && !TREE_OVERFLOW (arg))
9221 REAL_VALUE_TYPE c;
9223 c = TREE_REAL_CST (arg);
9224 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9225 return fold_convert (type, temp);
9228 /* If ARG is non-negative, the result is always zero. */
9229 if (tree_expr_nonnegative_p (arg))
9230 return omit_one_operand (type, integer_zero_node, arg);
9232 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9233 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9234 return fold_build2 (LT_EXPR, type, arg,
9235 build_real (TREE_TYPE (arg), dconst0));
9237 return NULL_TREE;
9240 /* Fold function call to builtin copysign, copysignf or copysignl with
9241 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9242 be made. */
9244 static tree
9245 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9247 tree tem;
9249 if (!validate_arg (arg1, REAL_TYPE)
9250 || !validate_arg (arg2, REAL_TYPE))
9251 return NULL_TREE;
9253 /* copysign(X,X) is X. */
9254 if (operand_equal_p (arg1, arg2, 0))
9255 return fold_convert (type, arg1);
9257 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9258 if (TREE_CODE (arg1) == REAL_CST
9259 && TREE_CODE (arg2) == REAL_CST
9260 && !TREE_OVERFLOW (arg1)
9261 && !TREE_OVERFLOW (arg2))
9263 REAL_VALUE_TYPE c1, c2;
9265 c1 = TREE_REAL_CST (arg1);
9266 c2 = TREE_REAL_CST (arg2);
9267 /* c1.sign := c2.sign. */
9268 real_copysign (&c1, &c2);
9269 return build_real (type, c1);
9272 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9273 Remember to evaluate Y for side-effects. */
9274 if (tree_expr_nonnegative_p (arg2))
9275 return omit_one_operand (type,
9276 fold_build1 (ABS_EXPR, type, arg1),
9277 arg2);
9279 /* Strip sign changing operations for the first argument. */
9280 tem = fold_strip_sign_ops (arg1);
9281 if (tem)
9282 return build_call_expr (fndecl, 2, tem, arg2);
9284 return NULL_TREE;
9287 /* Fold a call to builtin isascii with argument ARG. */
9289 static tree
9290 fold_builtin_isascii (tree arg)
9292 if (!validate_arg (arg, INTEGER_TYPE))
9293 return NULL_TREE;
9294 else
9296 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9297 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9298 build_int_cst (NULL_TREE,
9299 ~ (unsigned HOST_WIDE_INT) 0x7f));
9300 return fold_build2 (EQ_EXPR, integer_type_node,
9301 arg, integer_zero_node);
9305 /* Fold a call to builtin toascii with argument ARG. */
9307 static tree
9308 fold_builtin_toascii (tree arg)
9310 if (!validate_arg (arg, INTEGER_TYPE))
9311 return NULL_TREE;
9313 /* Transform toascii(c) -> (c & 0x7f). */
9314 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9315 build_int_cst (NULL_TREE, 0x7f));
9318 /* Fold a call to builtin isdigit with argument ARG. */
9320 static tree
9321 fold_builtin_isdigit (tree arg)
9323 if (!validate_arg (arg, INTEGER_TYPE))
9324 return NULL_TREE;
9325 else
9327 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9328 /* According to the C standard, isdigit is unaffected by locale.
9329 However, it definitely is affected by the target character set. */
9330 unsigned HOST_WIDE_INT target_digit0
9331 = lang_hooks.to_target_charset ('0');
9333 if (target_digit0 == 0)
9334 return NULL_TREE;
9336 arg = fold_convert (unsigned_type_node, arg);
9337 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9338 build_int_cst (unsigned_type_node, target_digit0));
9339 return fold_build2 (LE_EXPR, integer_type_node, arg,
9340 build_int_cst (unsigned_type_node, 9));
9344 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9346 static tree
9347 fold_builtin_fabs (tree arg, tree type)
9349 if (!validate_arg (arg, REAL_TYPE))
9350 return NULL_TREE;
9352 arg = fold_convert (type, arg);
9353 if (TREE_CODE (arg) == REAL_CST)
9354 return fold_abs_const (arg, type);
9355 return fold_build1 (ABS_EXPR, type, arg);
9358 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9360 static tree
9361 fold_builtin_abs (tree arg, tree type)
9363 if (!validate_arg (arg, INTEGER_TYPE))
9364 return NULL_TREE;
9366 arg = fold_convert (type, arg);
9367 if (TREE_CODE (arg) == INTEGER_CST)
9368 return fold_abs_const (arg, type);
9369 return fold_build1 (ABS_EXPR, type, arg);
9372 /* Fold a call to builtin fmin or fmax. */
9374 static tree
9375 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9377 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9379 /* Calculate the result when the argument is a constant. */
9380 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9382 if (res)
9383 return res;
9385 /* If either argument is NaN, return the other one. Avoid the
9386 transformation if we get (and honor) a signalling NaN. Using
9387 omit_one_operand() ensures we create a non-lvalue. */
9388 if (TREE_CODE (arg0) == REAL_CST
9389 && real_isnan (&TREE_REAL_CST (arg0))
9390 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9391 || ! TREE_REAL_CST (arg0).signalling))
9392 return omit_one_operand (type, arg1, arg0);
9393 if (TREE_CODE (arg1) == REAL_CST
9394 && real_isnan (&TREE_REAL_CST (arg1))
9395 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9396 || ! TREE_REAL_CST (arg1).signalling))
9397 return omit_one_operand (type, arg0, arg1);
9399 /* Transform fmin/fmax(x,x) -> x. */
9400 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9401 return omit_one_operand (type, arg0, arg1);
9403 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9404 functions to return the numeric arg if the other one is NaN.
9405 These tree codes don't honor that, so only transform if
9406 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9407 handled, so we don't have to worry about it either. */
9408 if (flag_finite_math_only)
9409 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9410 fold_convert (type, arg0),
9411 fold_convert (type, arg1));
9413 return NULL_TREE;
9416 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9418 static tree
9419 fold_builtin_carg (tree arg, tree type)
9421 if (validate_arg (arg, COMPLEX_TYPE))
9423 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9425 if (atan2_fn)
9427 tree new_arg = builtin_save_expr (arg);
9428 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9429 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9430 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9434 return NULL_TREE;
9437 /* Fold a call to builtin logb/ilogb. */
9439 static tree
9440 fold_builtin_logb (tree arg, tree rettype)
9442 if (! validate_arg (arg, REAL_TYPE))
9443 return NULL_TREE;
9445 STRIP_NOPS (arg);
9447 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9449 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9451 switch (value->cl)
9453 case rvc_nan:
9454 case rvc_inf:
9455 /* If arg is Inf or NaN and we're logb, return it. */
9456 if (TREE_CODE (rettype) == REAL_TYPE)
9457 return fold_convert (rettype, arg);
9458 /* Fall through... */
9459 case rvc_zero:
9460 /* Zero may set errno and/or raise an exception for logb, also
9461 for ilogb we don't know FP_ILOGB0. */
9462 return NULL_TREE;
9463 case rvc_normal:
9464 /* For normal numbers, proceed iff radix == 2. In GCC,
9465 normalized significands are in the range [0.5, 1.0). We
9466 want the exponent as if they were [1.0, 2.0) so get the
9467 exponent and subtract 1. */
9468 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9469 return fold_convert (rettype, build_int_cst (NULL_TREE,
9470 REAL_EXP (value)-1));
9471 break;
9475 return NULL_TREE;
9478 /* Fold a call to builtin significand, if radix == 2. */
9480 static tree
9481 fold_builtin_significand (tree arg, tree rettype)
9483 if (! validate_arg (arg, REAL_TYPE))
9484 return NULL_TREE;
9486 STRIP_NOPS (arg);
9488 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9490 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9492 switch (value->cl)
9494 case rvc_zero:
9495 case rvc_nan:
9496 case rvc_inf:
9497 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9498 return fold_convert (rettype, arg);
9499 case rvc_normal:
9500 /* For normal numbers, proceed iff radix == 2. */
9501 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9503 REAL_VALUE_TYPE result = *value;
9504 /* In GCC, normalized significands are in the range [0.5,
9505 1.0). We want them to be [1.0, 2.0) so set the
9506 exponent to 1. */
9507 SET_REAL_EXP (&result, 1);
9508 return build_real (rettype, result);
9510 break;
9514 return NULL_TREE;
9517 /* Fold a call to builtin frexp, we can assume the base is 2. */
9519 static tree
9520 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9522 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9523 return NULL_TREE;
9525 STRIP_NOPS (arg0);
9527 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9528 return NULL_TREE;
9530 arg1 = build_fold_indirect_ref (arg1);
9532 /* Proceed if a valid pointer type was passed in. */
9533 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9535 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9536 tree frac, exp;
9538 switch (value->cl)
9540 case rvc_zero:
9541 /* For +-0, return (*exp = 0, +-0). */
9542 exp = integer_zero_node;
9543 frac = arg0;
9544 break;
9545 case rvc_nan:
9546 case rvc_inf:
9547 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9548 return omit_one_operand (rettype, arg0, arg1);
9549 case rvc_normal:
9551 /* Since the frexp function always expects base 2, and in
9552 GCC normalized significands are already in the range
9553 [0.5, 1.0), we have exactly what frexp wants. */
9554 REAL_VALUE_TYPE frac_rvt = *value;
9555 SET_REAL_EXP (&frac_rvt, 0);
9556 frac = build_real (rettype, frac_rvt);
9557 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9559 break;
9560 default:
9561 gcc_unreachable ();
9564 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9565 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9566 TREE_SIDE_EFFECTS (arg1) = 1;
9567 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9570 return NULL_TREE;
9573 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9574 then we can assume the base is two. If it's false, then we have to
9575 check the mode of the TYPE parameter in certain cases. */
9577 static tree
9578 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9580 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9582 STRIP_NOPS (arg0);
9583 STRIP_NOPS (arg1);
9585 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9586 if (real_zerop (arg0) || integer_zerop (arg1)
9587 || (TREE_CODE (arg0) == REAL_CST
9588 && !real_isfinite (&TREE_REAL_CST (arg0))))
9589 return omit_one_operand (type, arg0, arg1);
9591 /* If both arguments are constant, then try to evaluate it. */
9592 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9593 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9594 && host_integerp (arg1, 0))
9596 /* Bound the maximum adjustment to twice the range of the
9597 mode's valid exponents. Use abs to ensure the range is
9598 positive as a sanity check. */
9599 const long max_exp_adj = 2 *
9600 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9601 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9603 /* Get the user-requested adjustment. */
9604 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9606 /* The requested adjustment must be inside this range. This
9607 is a preliminary cap to avoid things like overflow, we
9608 may still fail to compute the result for other reasons. */
9609 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9611 REAL_VALUE_TYPE initial_result;
9613 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9615 /* Ensure we didn't overflow. */
9616 if (! real_isinf (&initial_result))
9618 const REAL_VALUE_TYPE trunc_result
9619 = real_value_truncate (TYPE_MODE (type), initial_result);
9621 /* Only proceed if the target mode can hold the
9622 resulting value. */
9623 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9624 return build_real (type, trunc_result);
9630 return NULL_TREE;
9633 /* Fold a call to builtin modf. */
9635 static tree
9636 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9638 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9639 return NULL_TREE;
9641 STRIP_NOPS (arg0);
9643 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9644 return NULL_TREE;
9646 arg1 = build_fold_indirect_ref (arg1);
9648 /* Proceed if a valid pointer type was passed in. */
9649 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9651 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9652 REAL_VALUE_TYPE trunc, frac;
9654 switch (value->cl)
9656 case rvc_nan:
9657 case rvc_zero:
9658 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9659 trunc = frac = *value;
9660 break;
9661 case rvc_inf:
9662 /* For +-Inf, return (*arg1 = arg0, +-0). */
9663 frac = dconst0;
9664 frac.sign = value->sign;
9665 trunc = *value;
9666 break;
9667 case rvc_normal:
9668 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9669 real_trunc (&trunc, VOIDmode, value);
9670 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9671 /* If the original number was negative and already
9672 integral, then the fractional part is -0.0. */
9673 if (value->sign && frac.cl == rvc_zero)
9674 frac.sign = value->sign;
9675 break;
9678 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9679 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9680 build_real (rettype, trunc));
9681 TREE_SIDE_EFFECTS (arg1) = 1;
9682 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9683 build_real (rettype, frac));
9686 return NULL_TREE;
9689 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9690 ARG is the argument for the call. */
9692 static tree
9693 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9695 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9696 REAL_VALUE_TYPE r;
9698 if (!validate_arg (arg, REAL_TYPE))
9699 return NULL_TREE;
9701 switch (builtin_index)
9703 case BUILT_IN_ISINF:
9704 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9705 return omit_one_operand (type, integer_zero_node, arg);
9707 if (TREE_CODE (arg) == REAL_CST)
9709 r = TREE_REAL_CST (arg);
9710 if (real_isinf (&r))
9711 return real_compare (GT_EXPR, &r, &dconst0)
9712 ? integer_one_node : integer_minus_one_node;
9713 else
9714 return integer_zero_node;
9717 return NULL_TREE;
9719 case BUILT_IN_ISINF_SIGN:
9721 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9722 /* In a boolean context, GCC will fold the inner COND_EXPR to
9723 1. So e.g. "if (isinf_sign(x))" would be folded to just
9724 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9725 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9726 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9727 tree tmp = NULL_TREE;
9729 arg = builtin_save_expr (arg);
9731 if (signbit_fn && isinf_fn)
9733 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9734 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9736 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9737 signbit_call, integer_zero_node);
9738 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9739 isinf_call, integer_zero_node);
9741 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9742 integer_minus_one_node, integer_one_node);
9743 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9744 integer_zero_node);
9747 return tmp;
9750 case BUILT_IN_ISFINITE:
9751 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9752 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9753 return omit_one_operand (type, integer_one_node, arg);
9755 if (TREE_CODE (arg) == REAL_CST)
9757 r = TREE_REAL_CST (arg);
9758 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9761 return NULL_TREE;
9763 case BUILT_IN_ISNAN:
9764 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9765 return omit_one_operand (type, integer_zero_node, arg);
9767 if (TREE_CODE (arg) == REAL_CST)
9769 r = TREE_REAL_CST (arg);
9770 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9773 arg = builtin_save_expr (arg);
9774 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9776 default:
9777 gcc_unreachable ();
9781 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9782 This builtin will generate code to return the appropriate floating
9783 point classification depending on the value of the floating point
9784 number passed in. The possible return values must be supplied as
9785 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9786 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9787 one floating point argument which is "type generic". */
9789 static tree
9790 fold_builtin_fpclassify (tree exp)
9792 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9793 arg, type, res, tmp;
9794 enum machine_mode mode;
9795 REAL_VALUE_TYPE r;
9796 char buf[128];
9798 /* Verify the required arguments in the original call. */
9799 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9800 INTEGER_TYPE, INTEGER_TYPE,
9801 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9802 return NULL_TREE;
9804 fp_nan = CALL_EXPR_ARG (exp, 0);
9805 fp_infinite = CALL_EXPR_ARG (exp, 1);
9806 fp_normal = CALL_EXPR_ARG (exp, 2);
9807 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9808 fp_zero = CALL_EXPR_ARG (exp, 4);
9809 arg = CALL_EXPR_ARG (exp, 5);
9810 type = TREE_TYPE (arg);
9811 mode = TYPE_MODE (type);
9812 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9814 /* fpclassify(x) ->
9815 isnan(x) ? FP_NAN :
9816 (fabs(x) == Inf ? FP_INFINITE :
9817 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9818 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9820 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9821 build_real (type, dconst0));
9822 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9824 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9825 real_from_string (&r, buf);
9826 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9827 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9829 if (HONOR_INFINITIES (mode))
9831 real_inf (&r);
9832 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9833 build_real (type, r));
9834 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9837 if (HONOR_NANS (mode))
9839 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9840 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9843 return res;
9846 /* Fold a call to an unordered comparison function such as
9847 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9848 being called and ARG0 and ARG1 are the arguments for the call.
9849 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9850 the opposite of the desired result. UNORDERED_CODE is used
9851 for modes that can hold NaNs and ORDERED_CODE is used for
9852 the rest. */
9854 static tree
9855 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9856 enum tree_code unordered_code,
9857 enum tree_code ordered_code)
9859 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9860 enum tree_code code;
9861 tree type0, type1;
9862 enum tree_code code0, code1;
9863 tree cmp_type = NULL_TREE;
9865 type0 = TREE_TYPE (arg0);
9866 type1 = TREE_TYPE (arg1);
9868 code0 = TREE_CODE (type0);
9869 code1 = TREE_CODE (type1);
9871 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9872 /* Choose the wider of two real types. */
9873 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9874 ? type0 : type1;
9875 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9876 cmp_type = type0;
9877 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9878 cmp_type = type1;
9880 arg0 = fold_convert (cmp_type, arg0);
9881 arg1 = fold_convert (cmp_type, arg1);
9883 if (unordered_code == UNORDERED_EXPR)
9885 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9886 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9887 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9890 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9891 : ordered_code;
9892 return fold_build1 (TRUTH_NOT_EXPR, type,
9893 fold_build2 (code, type, arg0, arg1));
9896 /* Fold a call to built-in function FNDECL with 0 arguments.
9897 IGNORE is true if the result of the function call is ignored. This
9898 function returns NULL_TREE if no simplification was possible. */
9900 static tree
9901 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9903 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9904 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9905 switch (fcode)
9907 CASE_FLT_FN (BUILT_IN_INF):
9908 case BUILT_IN_INFD32:
9909 case BUILT_IN_INFD64:
9910 case BUILT_IN_INFD128:
9911 return fold_builtin_inf (type, true);
9913 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9914 return fold_builtin_inf (type, false);
9916 case BUILT_IN_CLASSIFY_TYPE:
9917 return fold_builtin_classify_type (NULL_TREE);
9919 default:
9920 break;
9922 return NULL_TREE;
9925 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9926 IGNORE is true if the result of the function call is ignored. This
9927 function returns NULL_TREE if no simplification was possible. */
9929 static tree
9930 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9932 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9933 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9934 switch (fcode)
9937 case BUILT_IN_CONSTANT_P:
9939 tree val = fold_builtin_constant_p (arg0);
9941 /* Gimplification will pull the CALL_EXPR for the builtin out of
9942 an if condition. When not optimizing, we'll not CSE it back.
9943 To avoid link error types of regressions, return false now. */
9944 if (!val && !optimize)
9945 val = integer_zero_node;
9947 return val;
9950 case BUILT_IN_CLASSIFY_TYPE:
9951 return fold_builtin_classify_type (arg0);
9953 case BUILT_IN_STRLEN:
9954 return fold_builtin_strlen (arg0);
9956 CASE_FLT_FN (BUILT_IN_FABS):
9957 return fold_builtin_fabs (arg0, type);
9959 case BUILT_IN_ABS:
9960 case BUILT_IN_LABS:
9961 case BUILT_IN_LLABS:
9962 case BUILT_IN_IMAXABS:
9963 return fold_builtin_abs (arg0, type);
9965 CASE_FLT_FN (BUILT_IN_CONJ):
9966 if (validate_arg (arg0, COMPLEX_TYPE))
9967 return fold_build1 (CONJ_EXPR, type, arg0);
9968 break;
9970 CASE_FLT_FN (BUILT_IN_CREAL):
9971 if (validate_arg (arg0, COMPLEX_TYPE))
9972 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9973 break;
9975 CASE_FLT_FN (BUILT_IN_CIMAG):
9976 if (validate_arg (arg0, COMPLEX_TYPE))
9977 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9978 break;
9980 CASE_FLT_FN (BUILT_IN_CCOS):
9981 CASE_FLT_FN (BUILT_IN_CCOSH):
9982 /* These functions are "even", i.e. f(x) == f(-x). */
9983 if (validate_arg (arg0, COMPLEX_TYPE))
9985 tree narg = fold_strip_sign_ops (arg0);
9986 if (narg)
9987 return build_call_expr (fndecl, 1, narg);
9989 break;
9991 CASE_FLT_FN (BUILT_IN_CABS):
9992 return fold_builtin_cabs (arg0, type, fndecl);
9994 CASE_FLT_FN (BUILT_IN_CARG):
9995 return fold_builtin_carg (arg0, type);
9997 CASE_FLT_FN (BUILT_IN_SQRT):
9998 return fold_builtin_sqrt (arg0, type);
10000 CASE_FLT_FN (BUILT_IN_CBRT):
10001 return fold_builtin_cbrt (arg0, type);
10003 CASE_FLT_FN (BUILT_IN_ASIN):
10004 if (validate_arg (arg0, REAL_TYPE))
10005 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10006 &dconstm1, &dconst1, true);
10007 break;
10009 CASE_FLT_FN (BUILT_IN_ACOS):
10010 if (validate_arg (arg0, REAL_TYPE))
10011 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10012 &dconstm1, &dconst1, true);
10013 break;
10015 CASE_FLT_FN (BUILT_IN_ATAN):
10016 if (validate_arg (arg0, REAL_TYPE))
10017 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10018 break;
10020 CASE_FLT_FN (BUILT_IN_ASINH):
10021 if (validate_arg (arg0, REAL_TYPE))
10022 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10023 break;
10025 CASE_FLT_FN (BUILT_IN_ACOSH):
10026 if (validate_arg (arg0, REAL_TYPE))
10027 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10028 &dconst1, NULL, true);
10029 break;
10031 CASE_FLT_FN (BUILT_IN_ATANH):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10034 &dconstm1, &dconst1, false);
10035 break;
10037 CASE_FLT_FN (BUILT_IN_SIN):
10038 if (validate_arg (arg0, REAL_TYPE))
10039 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10040 break;
10042 CASE_FLT_FN (BUILT_IN_COS):
10043 return fold_builtin_cos (arg0, type, fndecl);
10044 break;
10046 CASE_FLT_FN (BUILT_IN_TAN):
10047 return fold_builtin_tan (arg0, type);
10049 CASE_FLT_FN (BUILT_IN_CEXP):
10050 return fold_builtin_cexp (arg0, type);
10052 CASE_FLT_FN (BUILT_IN_CEXPI):
10053 if (validate_arg (arg0, REAL_TYPE))
10054 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10055 break;
10057 CASE_FLT_FN (BUILT_IN_SINH):
10058 if (validate_arg (arg0, REAL_TYPE))
10059 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10060 break;
10062 CASE_FLT_FN (BUILT_IN_COSH):
10063 return fold_builtin_cosh (arg0, type, fndecl);
10065 CASE_FLT_FN (BUILT_IN_TANH):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10068 break;
10070 CASE_FLT_FN (BUILT_IN_ERF):
10071 if (validate_arg (arg0, REAL_TYPE))
10072 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10073 break;
10075 CASE_FLT_FN (BUILT_IN_ERFC):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10078 break;
10080 CASE_FLT_FN (BUILT_IN_TGAMMA):
10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10083 break;
10085 CASE_FLT_FN (BUILT_IN_EXP):
10086 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10088 CASE_FLT_FN (BUILT_IN_EXP2):
10089 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10091 CASE_FLT_FN (BUILT_IN_EXP10):
10092 CASE_FLT_FN (BUILT_IN_POW10):
10093 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10095 CASE_FLT_FN (BUILT_IN_EXPM1):
10096 if (validate_arg (arg0, REAL_TYPE))
10097 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10098 break;
10100 CASE_FLT_FN (BUILT_IN_LOG):
10101 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10103 CASE_FLT_FN (BUILT_IN_LOG2):
10104 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10106 CASE_FLT_FN (BUILT_IN_LOG10):
10107 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10109 CASE_FLT_FN (BUILT_IN_LOG1P):
10110 if (validate_arg (arg0, REAL_TYPE))
10111 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10112 &dconstm1, NULL, false);
10113 break;
10115 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10116 CASE_FLT_FN (BUILT_IN_J0):
10117 if (validate_arg (arg0, REAL_TYPE))
10118 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10119 NULL, NULL, 0);
10120 break;
10122 CASE_FLT_FN (BUILT_IN_J1):
10123 if (validate_arg (arg0, REAL_TYPE))
10124 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10125 NULL, NULL, 0);
10126 break;
10128 CASE_FLT_FN (BUILT_IN_Y0):
10129 if (validate_arg (arg0, REAL_TYPE))
10130 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10131 &dconst0, NULL, false);
10132 break;
10134 CASE_FLT_FN (BUILT_IN_Y1):
10135 if (validate_arg (arg0, REAL_TYPE))
10136 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10137 &dconst0, NULL, false);
10138 break;
10139 #endif
10141 CASE_FLT_FN (BUILT_IN_NAN):
10142 case BUILT_IN_NAND32:
10143 case BUILT_IN_NAND64:
10144 case BUILT_IN_NAND128:
10145 return fold_builtin_nan (arg0, type, true);
10147 CASE_FLT_FN (BUILT_IN_NANS):
10148 return fold_builtin_nan (arg0, type, false);
10150 CASE_FLT_FN (BUILT_IN_FLOOR):
10151 return fold_builtin_floor (fndecl, arg0);
10153 CASE_FLT_FN (BUILT_IN_CEIL):
10154 return fold_builtin_ceil (fndecl, arg0);
10156 CASE_FLT_FN (BUILT_IN_TRUNC):
10157 return fold_builtin_trunc (fndecl, arg0);
10159 CASE_FLT_FN (BUILT_IN_ROUND):
10160 return fold_builtin_round (fndecl, arg0);
10162 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10163 CASE_FLT_FN (BUILT_IN_RINT):
10164 return fold_trunc_transparent_mathfn (fndecl, arg0);
10166 CASE_FLT_FN (BUILT_IN_LCEIL):
10167 CASE_FLT_FN (BUILT_IN_LLCEIL):
10168 CASE_FLT_FN (BUILT_IN_LFLOOR):
10169 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10170 CASE_FLT_FN (BUILT_IN_LROUND):
10171 CASE_FLT_FN (BUILT_IN_LLROUND):
10172 return fold_builtin_int_roundingfn (fndecl, arg0);
10174 CASE_FLT_FN (BUILT_IN_LRINT):
10175 CASE_FLT_FN (BUILT_IN_LLRINT):
10176 return fold_fixed_mathfn (fndecl, arg0);
10178 case BUILT_IN_BSWAP32:
10179 case BUILT_IN_BSWAP64:
10180 return fold_builtin_bswap (fndecl, arg0);
10182 CASE_INT_FN (BUILT_IN_FFS):
10183 CASE_INT_FN (BUILT_IN_CLZ):
10184 CASE_INT_FN (BUILT_IN_CTZ):
10185 CASE_INT_FN (BUILT_IN_POPCOUNT):
10186 CASE_INT_FN (BUILT_IN_PARITY):
10187 return fold_builtin_bitop (fndecl, arg0);
10189 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10190 return fold_builtin_signbit (arg0, type);
10192 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10193 return fold_builtin_significand (arg0, type);
10195 CASE_FLT_FN (BUILT_IN_ILOGB):
10196 CASE_FLT_FN (BUILT_IN_LOGB):
10197 return fold_builtin_logb (arg0, type);
10199 case BUILT_IN_ISASCII:
10200 return fold_builtin_isascii (arg0);
10202 case BUILT_IN_TOASCII:
10203 return fold_builtin_toascii (arg0);
10205 case BUILT_IN_ISDIGIT:
10206 return fold_builtin_isdigit (arg0);
10208 CASE_FLT_FN (BUILT_IN_FINITE):
10209 case BUILT_IN_FINITED32:
10210 case BUILT_IN_FINITED64:
10211 case BUILT_IN_FINITED128:
10212 case BUILT_IN_ISFINITE:
10213 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10215 CASE_FLT_FN (BUILT_IN_ISINF):
10216 case BUILT_IN_ISINFD32:
10217 case BUILT_IN_ISINFD64:
10218 case BUILT_IN_ISINFD128:
10219 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10221 case BUILT_IN_ISINF_SIGN:
10222 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10224 CASE_FLT_FN (BUILT_IN_ISNAN):
10225 case BUILT_IN_ISNAND32:
10226 case BUILT_IN_ISNAND64:
10227 case BUILT_IN_ISNAND128:
10228 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10230 case BUILT_IN_PRINTF:
10231 case BUILT_IN_PRINTF_UNLOCKED:
10232 case BUILT_IN_VPRINTF:
10233 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10235 default:
10236 break;
10239 return NULL_TREE;
10243 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10244 IGNORE is true if the result of the function call is ignored. This
10245 function returns NULL_TREE if no simplification was possible. */
10247 static tree
10248 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10250 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10251 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10253 switch (fcode)
10255 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10256 CASE_FLT_FN (BUILT_IN_JN):
10257 if (validate_arg (arg0, INTEGER_TYPE)
10258 && validate_arg (arg1, REAL_TYPE))
10259 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10260 break;
10262 CASE_FLT_FN (BUILT_IN_YN):
10263 if (validate_arg (arg0, INTEGER_TYPE)
10264 && validate_arg (arg1, REAL_TYPE))
10265 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10266 &dconst0, false);
10267 break;
10269 CASE_FLT_FN (BUILT_IN_DREM):
10270 CASE_FLT_FN (BUILT_IN_REMAINDER):
10271 if (validate_arg (arg0, REAL_TYPE)
10272 && validate_arg(arg1, REAL_TYPE))
10273 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10274 break;
10276 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10277 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10278 if (validate_arg (arg0, REAL_TYPE)
10279 && validate_arg(arg1, POINTER_TYPE))
10280 return do_mpfr_lgamma_r (arg0, arg1, type);
10281 break;
10282 #endif
10284 CASE_FLT_FN (BUILT_IN_ATAN2):
10285 if (validate_arg (arg0, REAL_TYPE)
10286 && validate_arg(arg1, REAL_TYPE))
10287 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10288 break;
10290 CASE_FLT_FN (BUILT_IN_FDIM):
10291 if (validate_arg (arg0, REAL_TYPE)
10292 && validate_arg(arg1, REAL_TYPE))
10293 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10294 break;
10296 CASE_FLT_FN (BUILT_IN_HYPOT):
10297 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10299 CASE_FLT_FN (BUILT_IN_LDEXP):
10300 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10301 CASE_FLT_FN (BUILT_IN_SCALBN):
10302 CASE_FLT_FN (BUILT_IN_SCALBLN):
10303 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10305 CASE_FLT_FN (BUILT_IN_FREXP):
10306 return fold_builtin_frexp (arg0, arg1, type);
10308 CASE_FLT_FN (BUILT_IN_MODF):
10309 return fold_builtin_modf (arg0, arg1, type);
10311 case BUILT_IN_BZERO:
10312 return fold_builtin_bzero (arg0, arg1, ignore);
10314 case BUILT_IN_FPUTS:
10315 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10317 case BUILT_IN_FPUTS_UNLOCKED:
10318 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10320 case BUILT_IN_STRSTR:
10321 return fold_builtin_strstr (arg0, arg1, type);
10323 case BUILT_IN_STRCAT:
10324 return fold_builtin_strcat (arg0, arg1);
10326 case BUILT_IN_STRSPN:
10327 return fold_builtin_strspn (arg0, arg1);
10329 case BUILT_IN_STRCSPN:
10330 return fold_builtin_strcspn (arg0, arg1);
10332 case BUILT_IN_STRCHR:
10333 case BUILT_IN_INDEX:
10334 return fold_builtin_strchr (arg0, arg1, type);
10336 case BUILT_IN_STRRCHR:
10337 case BUILT_IN_RINDEX:
10338 return fold_builtin_strrchr (arg0, arg1, type);
10340 case BUILT_IN_STRCPY:
10341 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10343 case BUILT_IN_STRCMP:
10344 return fold_builtin_strcmp (arg0, arg1);
10346 case BUILT_IN_STRPBRK:
10347 return fold_builtin_strpbrk (arg0, arg1, type);
10349 case BUILT_IN_EXPECT:
10350 return fold_builtin_expect (arg0, arg1);
10352 CASE_FLT_FN (BUILT_IN_POW):
10353 return fold_builtin_pow (fndecl, arg0, arg1, type);
10355 CASE_FLT_FN (BUILT_IN_POWI):
10356 return fold_builtin_powi (fndecl, arg0, arg1, type);
10358 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10359 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10361 CASE_FLT_FN (BUILT_IN_FMIN):
10362 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10364 CASE_FLT_FN (BUILT_IN_FMAX):
10365 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10367 case BUILT_IN_ISGREATER:
10368 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10369 case BUILT_IN_ISGREATEREQUAL:
10370 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10371 case BUILT_IN_ISLESS:
10372 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10373 case BUILT_IN_ISLESSEQUAL:
10374 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10375 case BUILT_IN_ISLESSGREATER:
10376 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10377 case BUILT_IN_ISUNORDERED:
10378 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10379 NOP_EXPR);
10381 /* We do the folding for va_start in the expander. */
10382 case BUILT_IN_VA_START:
10383 break;
10385 case BUILT_IN_SPRINTF:
10386 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10388 case BUILT_IN_OBJECT_SIZE:
10389 return fold_builtin_object_size (arg0, arg1);
10391 case BUILT_IN_PRINTF:
10392 case BUILT_IN_PRINTF_UNLOCKED:
10393 case BUILT_IN_VPRINTF:
10394 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10396 case BUILT_IN_PRINTF_CHK:
10397 case BUILT_IN_VPRINTF_CHK:
10398 if (!validate_arg (arg0, INTEGER_TYPE)
10399 || TREE_SIDE_EFFECTS (arg0))
10400 return NULL_TREE;
10401 else
10402 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10403 break;
10405 case BUILT_IN_FPRINTF:
10406 case BUILT_IN_FPRINTF_UNLOCKED:
10407 case BUILT_IN_VFPRINTF:
10408 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10409 ignore, fcode);
10411 default:
10412 break;
10414 return NULL_TREE;
10417 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10418 and ARG2. IGNORE is true if the result of the function call is ignored.
10419 This function returns NULL_TREE if no simplification was possible. */
10421 static tree
10422 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10424 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10425 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10426 switch (fcode)
10429 CASE_FLT_FN (BUILT_IN_SINCOS):
10430 return fold_builtin_sincos (arg0, arg1, arg2);
10432 CASE_FLT_FN (BUILT_IN_FMA):
10433 if (validate_arg (arg0, REAL_TYPE)
10434 && validate_arg(arg1, REAL_TYPE)
10435 && validate_arg(arg2, REAL_TYPE))
10436 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10437 break;
10439 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10440 CASE_FLT_FN (BUILT_IN_REMQUO):
10441 if (validate_arg (arg0, REAL_TYPE)
10442 && validate_arg(arg1, REAL_TYPE)
10443 && validate_arg(arg2, POINTER_TYPE))
10444 return do_mpfr_remquo (arg0, arg1, arg2);
10445 break;
10446 #endif
10448 case BUILT_IN_MEMSET:
10449 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10451 case BUILT_IN_BCOPY:
10452 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10454 case BUILT_IN_MEMCPY:
10455 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10457 case BUILT_IN_MEMPCPY:
10458 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10460 case BUILT_IN_MEMMOVE:
10461 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10463 case BUILT_IN_STRNCAT:
10464 return fold_builtin_strncat (arg0, arg1, arg2);
10466 case BUILT_IN_STRNCPY:
10467 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10469 case BUILT_IN_STRNCMP:
10470 return fold_builtin_strncmp (arg0, arg1, arg2);
10472 case BUILT_IN_MEMCHR:
10473 return fold_builtin_memchr (arg0, arg1, arg2, type);
10475 case BUILT_IN_BCMP:
10476 case BUILT_IN_MEMCMP:
10477 return fold_builtin_memcmp (arg0, arg1, arg2);;
10479 case BUILT_IN_SPRINTF:
10480 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10482 case BUILT_IN_STRCPY_CHK:
10483 case BUILT_IN_STPCPY_CHK:
10484 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10485 ignore, fcode);
10487 case BUILT_IN_STRCAT_CHK:
10488 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10490 case BUILT_IN_PRINTF_CHK:
10491 case BUILT_IN_VPRINTF_CHK:
10492 if (!validate_arg (arg0, INTEGER_TYPE)
10493 || TREE_SIDE_EFFECTS (arg0))
10494 return NULL_TREE;
10495 else
10496 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10497 break;
10499 case BUILT_IN_FPRINTF:
10500 case BUILT_IN_FPRINTF_UNLOCKED:
10501 case BUILT_IN_VFPRINTF:
10502 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10504 case BUILT_IN_FPRINTF_CHK:
10505 case BUILT_IN_VFPRINTF_CHK:
10506 if (!validate_arg (arg1, INTEGER_TYPE)
10507 || TREE_SIDE_EFFECTS (arg1))
10508 return NULL_TREE;
10509 else
10510 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10511 ignore, fcode);
10513 default:
10514 break;
10516 return NULL_TREE;
10519 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10520 ARG2, and ARG3. IGNORE is true if the result of the function call is
10521 ignored. This function returns NULL_TREE if no simplification was
10522 possible. */
10524 static tree
10525 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10526 bool ignore)
10528 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10530 switch (fcode)
10532 case BUILT_IN_MEMCPY_CHK:
10533 case BUILT_IN_MEMPCPY_CHK:
10534 case BUILT_IN_MEMMOVE_CHK:
10535 case BUILT_IN_MEMSET_CHK:
10536 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10537 NULL_TREE, ignore,
10538 DECL_FUNCTION_CODE (fndecl));
10540 case BUILT_IN_STRNCPY_CHK:
10541 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10543 case BUILT_IN_STRNCAT_CHK:
10544 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10546 case BUILT_IN_FPRINTF_CHK:
10547 case BUILT_IN_VFPRINTF_CHK:
10548 if (!validate_arg (arg1, INTEGER_TYPE)
10549 || TREE_SIDE_EFFECTS (arg1))
10550 return NULL_TREE;
10551 else
10552 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10553 ignore, fcode);
10554 break;
10556 default:
10557 break;
10559 return NULL_TREE;
10562 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10563 arguments, where NARGS <= 4. IGNORE is true if the result of the
10564 function call is ignored. This function returns NULL_TREE if no
10565 simplification was possible. Note that this only folds builtins with
10566 fixed argument patterns. Foldings that do varargs-to-varargs
10567 transformations, or that match calls with more than 4 arguments,
10568 need to be handled with fold_builtin_varargs instead. */
10570 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10572 static tree
10573 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10575 tree ret = NULL_TREE;
10577 switch (nargs)
10579 case 0:
10580 ret = fold_builtin_0 (fndecl, ignore);
10581 break;
10582 case 1:
10583 ret = fold_builtin_1 (fndecl, args[0], ignore);
10584 break;
10585 case 2:
10586 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10587 break;
10588 case 3:
10589 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10590 break;
10591 case 4:
10592 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10593 ignore);
10594 break;
10595 default:
10596 break;
10598 if (ret)
10600 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10601 TREE_NO_WARNING (ret) = 1;
10602 return ret;
10604 return NULL_TREE;
10607 /* Builtins with folding operations that operate on "..." arguments
10608 need special handling; we need to store the arguments in a convenient
10609 data structure before attempting any folding. Fortunately there are
10610 only a few builtins that fall into this category. FNDECL is the
10611 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10612 result of the function call is ignored. */
10614 static tree
10615 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10617 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10618 tree ret = NULL_TREE;
10620 switch (fcode)
10622 case BUILT_IN_SPRINTF_CHK:
10623 case BUILT_IN_VSPRINTF_CHK:
10624 ret = fold_builtin_sprintf_chk (exp, fcode);
10625 break;
10627 case BUILT_IN_SNPRINTF_CHK:
10628 case BUILT_IN_VSNPRINTF_CHK:
10629 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10630 break;
10632 case BUILT_IN_FPCLASSIFY:
10633 ret = fold_builtin_fpclassify (exp);
10634 break;
10636 default:
10637 break;
10639 if (ret)
10641 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10642 TREE_NO_WARNING (ret) = 1;
10643 return ret;
10645 return NULL_TREE;
10648 /* A wrapper function for builtin folding that prevents warnings for
10649 "statement without effect" and the like, caused by removing the
10650 call node earlier than the warning is generated. */
10652 tree
10653 fold_call_expr (tree exp, bool ignore)
10655 tree ret = NULL_TREE;
10656 tree fndecl = get_callee_fndecl (exp);
10657 if (fndecl
10658 && TREE_CODE (fndecl) == FUNCTION_DECL
10659 && DECL_BUILT_IN (fndecl)
10660 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10661 yet. Defer folding until we see all the arguments
10662 (after inlining). */
10663 && !CALL_EXPR_VA_ARG_PACK (exp))
10665 int nargs = call_expr_nargs (exp);
10667 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10668 instead last argument is __builtin_va_arg_pack (). Defer folding
10669 even in that case, until arguments are finalized. */
10670 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10672 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10673 if (fndecl2
10674 && TREE_CODE (fndecl2) == FUNCTION_DECL
10675 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10676 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10677 return NULL_TREE;
10680 /* FIXME: Don't use a list in this interface. */
10681 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10682 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10683 else
10685 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10687 tree *args = CALL_EXPR_ARGP (exp);
10688 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10690 if (!ret)
10691 ret = fold_builtin_varargs (fndecl, exp, ignore);
10692 if (ret)
10694 /* Propagate location information from original call to
10695 expansion of builtin. Otherwise things like
10696 maybe_emit_chk_warning, that operate on the expansion
10697 of a builtin, will use the wrong location information. */
10698 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10700 tree realret = ret;
10701 if (TREE_CODE (ret) == NOP_EXPR)
10702 realret = TREE_OPERAND (ret, 0);
10703 if (CAN_HAVE_LOCATION_P (realret)
10704 && !EXPR_HAS_LOCATION (realret))
10705 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10706 return realret;
10708 return ret;
10712 return NULL_TREE;
10715 /* Conveniently construct a function call expression. FNDECL names the
10716 function to be called and ARGLIST is a TREE_LIST of arguments. */
10718 tree
10719 build_function_call_expr (tree fndecl, tree arglist)
10721 tree fntype = TREE_TYPE (fndecl);
10722 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10723 int n = list_length (arglist);
10724 tree *argarray = (tree *) alloca (n * sizeof (tree));
10725 int i;
10727 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10728 argarray[i] = TREE_VALUE (arglist);
10729 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10732 /* Conveniently construct a function call expression. FNDECL names the
10733 function to be called, N is the number of arguments, and the "..."
10734 parameters are the argument expressions. */
10736 tree
10737 build_call_expr (tree fndecl, int n, ...)
10739 va_list ap;
10740 tree fntype = TREE_TYPE (fndecl);
10741 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10742 tree *argarray = (tree *) alloca (n * sizeof (tree));
10743 int i;
10745 va_start (ap, n);
10746 for (i = 0; i < n; i++)
10747 argarray[i] = va_arg (ap, tree);
10748 va_end (ap);
10749 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10752 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10753 N arguments are passed in the array ARGARRAY. */
10755 tree
10756 fold_builtin_call_array (tree type,
10757 tree fn,
10758 int n,
10759 tree *argarray)
10761 tree ret = NULL_TREE;
10762 int i;
10763 tree exp;
10765 if (TREE_CODE (fn) == ADDR_EXPR)
10767 tree fndecl = TREE_OPERAND (fn, 0);
10768 if (TREE_CODE (fndecl) == FUNCTION_DECL
10769 && DECL_BUILT_IN (fndecl))
10771 /* If last argument is __builtin_va_arg_pack (), arguments to this
10772 function are not finalized yet. Defer folding until they are. */
10773 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10775 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10776 if (fndecl2
10777 && TREE_CODE (fndecl2) == FUNCTION_DECL
10778 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10779 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10780 return build_call_array (type, fn, n, argarray);
10782 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10784 tree arglist = NULL_TREE;
10785 for (i = n - 1; i >= 0; i--)
10786 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10787 ret = targetm.fold_builtin (fndecl, arglist, false);
10788 if (ret)
10789 return ret;
10791 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10793 /* First try the transformations that don't require consing up
10794 an exp. */
10795 ret = fold_builtin_n (fndecl, argarray, n, false);
10796 if (ret)
10797 return ret;
10800 /* If we got this far, we need to build an exp. */
10801 exp = build_call_array (type, fn, n, argarray);
10802 ret = fold_builtin_varargs (fndecl, exp, false);
10803 return ret ? ret : exp;
10807 return build_call_array (type, fn, n, argarray);
10810 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10811 along with N new arguments specified as the "..." parameters. SKIP
10812 is the number of arguments in EXP to be omitted. This function is used
10813 to do varargs-to-varargs transformations. */
10815 static tree
10816 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10818 int oldnargs = call_expr_nargs (exp);
10819 int nargs = oldnargs - skip + n;
10820 tree fntype = TREE_TYPE (fndecl);
10821 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10822 tree *buffer;
10824 if (n > 0)
10826 int i, j;
10827 va_list ap;
10829 buffer = XALLOCAVEC (tree, nargs);
10830 va_start (ap, n);
10831 for (i = 0; i < n; i++)
10832 buffer[i] = va_arg (ap, tree);
10833 va_end (ap);
10834 for (j = skip; j < oldnargs; j++, i++)
10835 buffer[i] = CALL_EXPR_ARG (exp, j);
10837 else
10838 buffer = CALL_EXPR_ARGP (exp) + skip;
10840 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10843 /* Validate a single argument ARG against a tree code CODE representing
10844 a type. */
10846 static bool
10847 validate_arg (const_tree arg, enum tree_code code)
10849 if (!arg)
10850 return false;
10851 else if (code == POINTER_TYPE)
10852 return POINTER_TYPE_P (TREE_TYPE (arg));
10853 else if (code == INTEGER_TYPE)
10854 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10855 return code == TREE_CODE (TREE_TYPE (arg));
10858 /* This function validates the types of a function call argument list
10859 against a specified list of tree_codes. If the last specifier is a 0,
10860 that represents an ellipses, otherwise the last specifier must be a
10861 VOID_TYPE.
10863 This is the GIMPLE version of validate_arglist. Eventually we want to
10864 completely convert builtins.c to work from GIMPLEs and the tree based
10865 validate_arglist will then be removed. */
10867 bool
10868 validate_gimple_arglist (const_gimple call, ...)
10870 enum tree_code code;
10871 bool res = 0;
10872 va_list ap;
10873 const_tree arg;
10874 size_t i;
10876 va_start (ap, call);
10877 i = 0;
10881 code = va_arg (ap, enum tree_code);
10882 switch (code)
10884 case 0:
10885 /* This signifies an ellipses, any further arguments are all ok. */
10886 res = true;
10887 goto end;
10888 case VOID_TYPE:
10889 /* This signifies an endlink, if no arguments remain, return
10890 true, otherwise return false. */
10891 res = (i == gimple_call_num_args (call));
10892 goto end;
10893 default:
10894 /* If no parameters remain or the parameter's code does not
10895 match the specified code, return false. Otherwise continue
10896 checking any remaining arguments. */
10897 arg = gimple_call_arg (call, i++);
10898 if (!validate_arg (arg, code))
10899 goto end;
10900 break;
10903 while (1);
10905 /* We need gotos here since we can only have one VA_CLOSE in a
10906 function. */
10907 end: ;
10908 va_end (ap);
10910 return res;
10913 /* This function validates the types of a function call argument list
10914 against a specified list of tree_codes. If the last specifier is a 0,
10915 that represents an ellipses, otherwise the last specifier must be a
10916 VOID_TYPE. */
10918 bool
10919 validate_arglist (const_tree callexpr, ...)
10921 enum tree_code code;
10922 bool res = 0;
10923 va_list ap;
10924 const_call_expr_arg_iterator iter;
10925 const_tree arg;
10927 va_start (ap, callexpr);
10928 init_const_call_expr_arg_iterator (callexpr, &iter);
10932 code = va_arg (ap, enum tree_code);
10933 switch (code)
10935 case 0:
10936 /* This signifies an ellipses, any further arguments are all ok. */
10937 res = true;
10938 goto end;
10939 case VOID_TYPE:
10940 /* This signifies an endlink, if no arguments remain, return
10941 true, otherwise return false. */
10942 res = !more_const_call_expr_args_p (&iter);
10943 goto end;
10944 default:
10945 /* If no parameters remain or the parameter's code does not
10946 match the specified code, return false. Otherwise continue
10947 checking any remaining arguments. */
10948 arg = next_const_call_expr_arg (&iter);
10949 if (!validate_arg (arg, code))
10950 goto end;
10951 break;
10954 while (1);
10956 /* We need gotos here since we can only have one VA_CLOSE in a
10957 function. */
10958 end: ;
10959 va_end (ap);
10961 return res;
10964 /* Default target-specific builtin expander that does nothing. */
10967 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10968 rtx target ATTRIBUTE_UNUSED,
10969 rtx subtarget ATTRIBUTE_UNUSED,
10970 enum machine_mode mode ATTRIBUTE_UNUSED,
10971 int ignore ATTRIBUTE_UNUSED)
10973 return NULL_RTX;
10976 /* Returns true is EXP represents data that would potentially reside
10977 in a readonly section. */
10979 static bool
10980 readonly_data_expr (tree exp)
10982 STRIP_NOPS (exp);
10984 if (TREE_CODE (exp) != ADDR_EXPR)
10985 return false;
10987 exp = get_base_address (TREE_OPERAND (exp, 0));
10988 if (!exp)
10989 return false;
10991 /* Make sure we call decl_readonly_section only for trees it
10992 can handle (since it returns true for everything it doesn't
10993 understand). */
10994 if (TREE_CODE (exp) == STRING_CST
10995 || TREE_CODE (exp) == CONSTRUCTOR
10996 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10997 return decl_readonly_section (exp, 0);
10998 else
10999 return false;
11002 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11003 to the call, and TYPE is its return type.
11005 Return NULL_TREE if no simplification was possible, otherwise return the
11006 simplified form of the call as a tree.
11008 The simplified form may be a constant or other expression which
11009 computes the same value, but in a more efficient manner (including
11010 calls to other builtin functions).
11012 The call may contain arguments which need to be evaluated, but
11013 which are not useful to determine the result of the call. In
11014 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11015 COMPOUND_EXPR will be an argument which must be evaluated.
11016 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11017 COMPOUND_EXPR in the chain will contain the tree for the simplified
11018 form of the builtin function call. */
11020 static tree
11021 fold_builtin_strstr (tree s1, tree s2, tree type)
11023 if (!validate_arg (s1, POINTER_TYPE)
11024 || !validate_arg (s2, POINTER_TYPE))
11025 return NULL_TREE;
11026 else
11028 tree fn;
11029 const char *p1, *p2;
11031 p2 = c_getstr (s2);
11032 if (p2 == NULL)
11033 return NULL_TREE;
11035 p1 = c_getstr (s1);
11036 if (p1 != NULL)
11038 const char *r = strstr (p1, p2);
11039 tree tem;
11041 if (r == NULL)
11042 return build_int_cst (TREE_TYPE (s1), 0);
11044 /* Return an offset into the constant string argument. */
11045 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11046 s1, size_int (r - p1));
11047 return fold_convert (type, tem);
11050 /* The argument is const char *, and the result is char *, so we need
11051 a type conversion here to avoid a warning. */
11052 if (p2[0] == '\0')
11053 return fold_convert (type, s1);
11055 if (p2[1] != '\0')
11056 return NULL_TREE;
11058 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11059 if (!fn)
11060 return NULL_TREE;
11062 /* New argument list transforming strstr(s1, s2) to
11063 strchr(s1, s2[0]). */
11064 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11068 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11069 the call, and TYPE is its return type.
11071 Return NULL_TREE if no simplification was possible, otherwise return the
11072 simplified form of the call as a tree.
11074 The simplified form may be a constant or other expression which
11075 computes the same value, but in a more efficient manner (including
11076 calls to other builtin functions).
11078 The call may contain arguments which need to be evaluated, but
11079 which are not useful to determine the result of the call. In
11080 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11081 COMPOUND_EXPR will be an argument which must be evaluated.
11082 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11083 COMPOUND_EXPR in the chain will contain the tree for the simplified
11084 form of the builtin function call. */
11086 static tree
11087 fold_builtin_strchr (tree s1, tree s2, tree type)
11089 if (!validate_arg (s1, POINTER_TYPE)
11090 || !validate_arg (s2, INTEGER_TYPE))
11091 return NULL_TREE;
11092 else
11094 const char *p1;
11096 if (TREE_CODE (s2) != INTEGER_CST)
11097 return NULL_TREE;
11099 p1 = c_getstr (s1);
11100 if (p1 != NULL)
11102 char c;
11103 const char *r;
11104 tree tem;
11106 if (target_char_cast (s2, &c))
11107 return NULL_TREE;
11109 r = strchr (p1, c);
11111 if (r == NULL)
11112 return build_int_cst (TREE_TYPE (s1), 0);
11114 /* Return an offset into the constant string argument. */
11115 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11116 s1, size_int (r - p1));
11117 return fold_convert (type, tem);
11119 return NULL_TREE;
11123 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11124 the call, and TYPE is its return type.
11126 Return NULL_TREE if no simplification was possible, otherwise return the
11127 simplified form of the call as a tree.
11129 The simplified form may be a constant or other expression which
11130 computes the same value, but in a more efficient manner (including
11131 calls to other builtin functions).
11133 The call may contain arguments which need to be evaluated, but
11134 which are not useful to determine the result of the call. In
11135 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11136 COMPOUND_EXPR will be an argument which must be evaluated.
11137 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11138 COMPOUND_EXPR in the chain will contain the tree for the simplified
11139 form of the builtin function call. */
11141 static tree
11142 fold_builtin_strrchr (tree s1, tree s2, tree type)
11144 if (!validate_arg (s1, POINTER_TYPE)
11145 || !validate_arg (s2, INTEGER_TYPE))
11146 return NULL_TREE;
11147 else
11149 tree fn;
11150 const char *p1;
11152 if (TREE_CODE (s2) != INTEGER_CST)
11153 return NULL_TREE;
11155 p1 = c_getstr (s1);
11156 if (p1 != NULL)
11158 char c;
11159 const char *r;
11160 tree tem;
11162 if (target_char_cast (s2, &c))
11163 return NULL_TREE;
11165 r = strrchr (p1, c);
11167 if (r == NULL)
11168 return build_int_cst (TREE_TYPE (s1), 0);
11170 /* Return an offset into the constant string argument. */
11171 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11172 s1, size_int (r - p1));
11173 return fold_convert (type, tem);
11176 if (! integer_zerop (s2))
11177 return NULL_TREE;
11179 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11180 if (!fn)
11181 return NULL_TREE;
11183 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11184 return build_call_expr (fn, 2, s1, s2);
11188 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11189 to the call, and TYPE is its return type.
11191 Return NULL_TREE if no simplification was possible, otherwise return the
11192 simplified form of the call as a tree.
11194 The simplified form may be a constant or other expression which
11195 computes the same value, but in a more efficient manner (including
11196 calls to other builtin functions).
11198 The call may contain arguments which need to be evaluated, but
11199 which are not useful to determine the result of the call. In
11200 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11201 COMPOUND_EXPR will be an argument which must be evaluated.
11202 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11203 COMPOUND_EXPR in the chain will contain the tree for the simplified
11204 form of the builtin function call. */
11206 static tree
11207 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11209 if (!validate_arg (s1, POINTER_TYPE)
11210 || !validate_arg (s2, POINTER_TYPE))
11211 return NULL_TREE;
11212 else
11214 tree fn;
11215 const char *p1, *p2;
11217 p2 = c_getstr (s2);
11218 if (p2 == NULL)
11219 return NULL_TREE;
11221 p1 = c_getstr (s1);
11222 if (p1 != NULL)
11224 const char *r = strpbrk (p1, p2);
11225 tree tem;
11227 if (r == NULL)
11228 return build_int_cst (TREE_TYPE (s1), 0);
11230 /* Return an offset into the constant string argument. */
11231 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11232 s1, size_int (r - p1));
11233 return fold_convert (type, tem);
11236 if (p2[0] == '\0')
11237 /* strpbrk(x, "") == NULL.
11238 Evaluate and ignore s1 in case it had side-effects. */
11239 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11241 if (p2[1] != '\0')
11242 return NULL_TREE; /* Really call strpbrk. */
11244 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11245 if (!fn)
11246 return NULL_TREE;
11248 /* New argument list transforming strpbrk(s1, s2) to
11249 strchr(s1, s2[0]). */
11250 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11254 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11255 to the call.
11257 Return NULL_TREE if no simplification was possible, otherwise return the
11258 simplified form of the call as a tree.
11260 The simplified form may be a constant or other expression which
11261 computes the same value, but in a more efficient manner (including
11262 calls to other builtin functions).
11264 The call may contain arguments which need to be evaluated, but
11265 which are not useful to determine the result of the call. In
11266 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11267 COMPOUND_EXPR will be an argument which must be evaluated.
11268 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11269 COMPOUND_EXPR in the chain will contain the tree for the simplified
11270 form of the builtin function call. */
11272 static tree
11273 fold_builtin_strcat (tree dst, tree src)
11275 if (!validate_arg (dst, POINTER_TYPE)
11276 || !validate_arg (src, POINTER_TYPE))
11277 return NULL_TREE;
11278 else
11280 const char *p = c_getstr (src);
11282 /* If the string length is zero, return the dst parameter. */
11283 if (p && *p == '\0')
11284 return dst;
11286 return NULL_TREE;
11290 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11291 arguments to the call.
11293 Return NULL_TREE if no simplification was possible, otherwise return the
11294 simplified form of the call as a tree.
11296 The simplified form may be a constant or other expression which
11297 computes the same value, but in a more efficient manner (including
11298 calls to other builtin functions).
11300 The call may contain arguments which need to be evaluated, but
11301 which are not useful to determine the result of the call. In
11302 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11303 COMPOUND_EXPR will be an argument which must be evaluated.
11304 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11305 COMPOUND_EXPR in the chain will contain the tree for the simplified
11306 form of the builtin function call. */
11308 static tree
11309 fold_builtin_strncat (tree dst, tree src, tree len)
11311 if (!validate_arg (dst, POINTER_TYPE)
11312 || !validate_arg (src, POINTER_TYPE)
11313 || !validate_arg (len, INTEGER_TYPE))
11314 return NULL_TREE;
11315 else
11317 const char *p = c_getstr (src);
11319 /* If the requested length is zero, or the src parameter string
11320 length is zero, return the dst parameter. */
11321 if (integer_zerop (len) || (p && *p == '\0'))
11322 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11324 /* If the requested len is greater than or equal to the string
11325 length, call strcat. */
11326 if (TREE_CODE (len) == INTEGER_CST && p
11327 && compare_tree_int (len, strlen (p)) >= 0)
11329 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11331 /* If the replacement _DECL isn't initialized, don't do the
11332 transformation. */
11333 if (!fn)
11334 return NULL_TREE;
11336 return build_call_expr (fn, 2, dst, src);
11338 return NULL_TREE;
11342 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11343 to the call.
11345 Return NULL_TREE if no simplification was possible, otherwise return the
11346 simplified form of the call as a tree.
11348 The simplified form may be a constant or other expression which
11349 computes the same value, but in a more efficient manner (including
11350 calls to other builtin functions).
11352 The call may contain arguments which need to be evaluated, but
11353 which are not useful to determine the result of the call. In
11354 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11355 COMPOUND_EXPR will be an argument which must be evaluated.
11356 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11357 COMPOUND_EXPR in the chain will contain the tree for the simplified
11358 form of the builtin function call. */
11360 static tree
11361 fold_builtin_strspn (tree s1, tree s2)
11363 if (!validate_arg (s1, POINTER_TYPE)
11364 || !validate_arg (s2, POINTER_TYPE))
11365 return NULL_TREE;
11366 else
11368 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11370 /* If both arguments are constants, evaluate at compile-time. */
11371 if (p1 && p2)
11373 const size_t r = strspn (p1, p2);
11374 return size_int (r);
11377 /* If either argument is "", return NULL_TREE. */
11378 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11379 /* Evaluate and ignore both arguments in case either one has
11380 side-effects. */
11381 return omit_two_operands (integer_type_node, integer_zero_node,
11382 s1, s2);
11383 return NULL_TREE;
11387 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11388 to the call.
11390 Return NULL_TREE if no simplification was possible, otherwise return the
11391 simplified form of the call as a tree.
11393 The simplified form may be a constant or other expression which
11394 computes the same value, but in a more efficient manner (including
11395 calls to other builtin functions).
11397 The call may contain arguments which need to be evaluated, but
11398 which are not useful to determine the result of the call. In
11399 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11400 COMPOUND_EXPR will be an argument which must be evaluated.
11401 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11402 COMPOUND_EXPR in the chain will contain the tree for the simplified
11403 form of the builtin function call. */
11405 static tree
11406 fold_builtin_strcspn (tree s1, tree s2)
11408 if (!validate_arg (s1, POINTER_TYPE)
11409 || !validate_arg (s2, POINTER_TYPE))
11410 return NULL_TREE;
11411 else
11413 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11415 /* If both arguments are constants, evaluate at compile-time. */
11416 if (p1 && p2)
11418 const size_t r = strcspn (p1, p2);
11419 return size_int (r);
11422 /* If the first argument is "", return NULL_TREE. */
11423 if (p1 && *p1 == '\0')
11425 /* Evaluate and ignore argument s2 in case it has
11426 side-effects. */
11427 return omit_one_operand (integer_type_node,
11428 integer_zero_node, s2);
11431 /* If the second argument is "", return __builtin_strlen(s1). */
11432 if (p2 && *p2 == '\0')
11434 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11436 /* If the replacement _DECL isn't initialized, don't do the
11437 transformation. */
11438 if (!fn)
11439 return NULL_TREE;
11441 return build_call_expr (fn, 1, s1);
11443 return NULL_TREE;
11447 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11448 to the call. IGNORE is true if the value returned
11449 by the builtin will be ignored. UNLOCKED is true is true if this
11450 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11451 the known length of the string. Return NULL_TREE if no simplification
11452 was possible. */
11454 tree
11455 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11457 /* If we're using an unlocked function, assume the other unlocked
11458 functions exist explicitly. */
11459 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11460 : implicit_built_in_decls[BUILT_IN_FPUTC];
11461 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11462 : implicit_built_in_decls[BUILT_IN_FWRITE];
11464 /* If the return value is used, don't do the transformation. */
11465 if (!ignore)
11466 return NULL_TREE;
11468 /* Verify the arguments in the original call. */
11469 if (!validate_arg (arg0, POINTER_TYPE)
11470 || !validate_arg (arg1, POINTER_TYPE))
11471 return NULL_TREE;
11473 if (! len)
11474 len = c_strlen (arg0, 0);
11476 /* Get the length of the string passed to fputs. If the length
11477 can't be determined, punt. */
11478 if (!len
11479 || TREE_CODE (len) != INTEGER_CST)
11480 return NULL_TREE;
11482 switch (compare_tree_int (len, 1))
11484 case -1: /* length is 0, delete the call entirely . */
11485 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11487 case 0: /* length is 1, call fputc. */
11489 const char *p = c_getstr (arg0);
11491 if (p != NULL)
11493 if (fn_fputc)
11494 return build_call_expr (fn_fputc, 2,
11495 build_int_cst (NULL_TREE, p[0]), arg1);
11496 else
11497 return NULL_TREE;
11500 /* FALLTHROUGH */
11501 case 1: /* length is greater than 1, call fwrite. */
11503 /* If optimizing for size keep fputs. */
11504 if (optimize_function_for_size_p (cfun))
11505 return NULL_TREE;
11506 /* New argument list transforming fputs(string, stream) to
11507 fwrite(string, 1, len, stream). */
11508 if (fn_fwrite)
11509 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11510 else
11511 return NULL_TREE;
11513 default:
11514 gcc_unreachable ();
11516 return NULL_TREE;
11519 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11520 produced. False otherwise. This is done so that we don't output the error
11521 or warning twice or three times. */
11523 bool
11524 fold_builtin_next_arg (tree exp, bool va_start_p)
11526 tree fntype = TREE_TYPE (current_function_decl);
11527 int nargs = call_expr_nargs (exp);
11528 tree arg;
11530 if (TYPE_ARG_TYPES (fntype) == 0
11531 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11532 == void_type_node))
11534 error ("%<va_start%> used in function with fixed args");
11535 return true;
11538 if (va_start_p)
11540 if (va_start_p && (nargs != 2))
11542 error ("wrong number of arguments to function %<va_start%>");
11543 return true;
11545 arg = CALL_EXPR_ARG (exp, 1);
11547 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11548 when we checked the arguments and if needed issued a warning. */
11549 else
11551 if (nargs == 0)
11553 /* Evidently an out of date version of <stdarg.h>; can't validate
11554 va_start's second argument, but can still work as intended. */
11555 warning (0, "%<__builtin_next_arg%> called without an argument");
11556 return true;
11558 else if (nargs > 1)
11560 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11561 return true;
11563 arg = CALL_EXPR_ARG (exp, 0);
11566 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11567 or __builtin_next_arg (0) the first time we see it, after checking
11568 the arguments and if needed issuing a warning. */
11569 if (!integer_zerop (arg))
11571 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11573 /* Strip off all nops for the sake of the comparison. This
11574 is not quite the same as STRIP_NOPS. It does more.
11575 We must also strip off INDIRECT_EXPR for C++ reference
11576 parameters. */
11577 while (CONVERT_EXPR_P (arg)
11578 || TREE_CODE (arg) == INDIRECT_REF)
11579 arg = TREE_OPERAND (arg, 0);
11580 if (arg != last_parm)
11582 /* FIXME: Sometimes with the tree optimizers we can get the
11583 not the last argument even though the user used the last
11584 argument. We just warn and set the arg to be the last
11585 argument so that we will get wrong-code because of
11586 it. */
11587 warning (0, "second parameter of %<va_start%> not last named argument");
11590 /* Undefined by C99 7.15.1.4p4 (va_start):
11591 "If the parameter parmN is declared with the register storage
11592 class, with a function or array type, or with a type that is
11593 not compatible with the type that results after application of
11594 the default argument promotions, the behavior is undefined."
11596 else if (DECL_REGISTER (arg))
11597 warning (0, "undefined behaviour when second parameter of "
11598 "%<va_start%> is declared with %<register%> storage");
11600 /* We want to verify the second parameter just once before the tree
11601 optimizers are run and then avoid keeping it in the tree,
11602 as otherwise we could warn even for correct code like:
11603 void foo (int i, ...)
11604 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11605 if (va_start_p)
11606 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11607 else
11608 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11610 return false;
11614 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11615 ORIG may be null if this is a 2-argument call. We don't attempt to
11616 simplify calls with more than 3 arguments.
11618 Return NULL_TREE if no simplification was possible, otherwise return the
11619 simplified form of the call as a tree. If IGNORED is true, it means that
11620 the caller does not use the returned value of the function. */
11622 static tree
11623 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11625 tree call, retval;
11626 const char *fmt_str = NULL;
11628 /* Verify the required arguments in the original call. We deal with two
11629 types of sprintf() calls: 'sprintf (str, fmt)' and
11630 'sprintf (dest, "%s", orig)'. */
11631 if (!validate_arg (dest, POINTER_TYPE)
11632 || !validate_arg (fmt, POINTER_TYPE))
11633 return NULL_TREE;
11634 if (orig && !validate_arg (orig, POINTER_TYPE))
11635 return NULL_TREE;
11637 /* Check whether the format is a literal string constant. */
11638 fmt_str = c_getstr (fmt);
11639 if (fmt_str == NULL)
11640 return NULL_TREE;
11642 call = NULL_TREE;
11643 retval = NULL_TREE;
11645 if (!init_target_chars ())
11646 return NULL_TREE;
11648 /* If the format doesn't contain % args or %%, use strcpy. */
11649 if (strchr (fmt_str, target_percent) == NULL)
11651 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11653 if (!fn)
11654 return NULL_TREE;
11656 /* Don't optimize sprintf (buf, "abc", ptr++). */
11657 if (orig)
11658 return NULL_TREE;
11660 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11661 'format' is known to contain no % formats. */
11662 call = build_call_expr (fn, 2, dest, fmt);
11663 if (!ignored)
11664 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11667 /* If the format is "%s", use strcpy if the result isn't used. */
11668 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11670 tree fn;
11671 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11673 if (!fn)
11674 return NULL_TREE;
11676 /* Don't crash on sprintf (str1, "%s"). */
11677 if (!orig)
11678 return NULL_TREE;
11680 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11681 if (!ignored)
11683 retval = c_strlen (orig, 1);
11684 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11685 return NULL_TREE;
11687 call = build_call_expr (fn, 2, dest, orig);
11690 if (call && retval)
11692 retval = fold_convert
11693 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11694 retval);
11695 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11697 else
11698 return call;
11701 /* Expand a call EXP to __builtin_object_size. */
11704 expand_builtin_object_size (tree exp)
11706 tree ost;
11707 int object_size_type;
11708 tree fndecl = get_callee_fndecl (exp);
11710 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11712 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11713 exp, fndecl);
11714 expand_builtin_trap ();
11715 return const0_rtx;
11718 ost = CALL_EXPR_ARG (exp, 1);
11719 STRIP_NOPS (ost);
11721 if (TREE_CODE (ost) != INTEGER_CST
11722 || tree_int_cst_sgn (ost) < 0
11723 || compare_tree_int (ost, 3) > 0)
11725 error ("%Klast argument of %D is not integer constant between 0 and 3",
11726 exp, fndecl);
11727 expand_builtin_trap ();
11728 return const0_rtx;
11731 object_size_type = tree_low_cst (ost, 0);
11733 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11736 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11737 FCODE is the BUILT_IN_* to use.
11738 Return NULL_RTX if we failed; the caller should emit a normal call,
11739 otherwise try to get the result in TARGET, if convenient (and in
11740 mode MODE if that's convenient). */
11742 static rtx
11743 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11744 enum built_in_function fcode)
11746 tree dest, src, len, size;
11748 if (!validate_arglist (exp,
11749 POINTER_TYPE,
11750 fcode == BUILT_IN_MEMSET_CHK
11751 ? INTEGER_TYPE : POINTER_TYPE,
11752 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11753 return NULL_RTX;
11755 dest = CALL_EXPR_ARG (exp, 0);
11756 src = CALL_EXPR_ARG (exp, 1);
11757 len = CALL_EXPR_ARG (exp, 2);
11758 size = CALL_EXPR_ARG (exp, 3);
11760 if (! host_integerp (size, 1))
11761 return NULL_RTX;
11763 if (host_integerp (len, 1) || integer_all_onesp (size))
11765 tree fn;
11767 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11769 warning (0, "%Kcall to %D will always overflow destination buffer",
11770 exp, get_callee_fndecl (exp));
11771 return NULL_RTX;
11774 fn = NULL_TREE;
11775 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11776 mem{cpy,pcpy,move,set} is available. */
11777 switch (fcode)
11779 case BUILT_IN_MEMCPY_CHK:
11780 fn = built_in_decls[BUILT_IN_MEMCPY];
11781 break;
11782 case BUILT_IN_MEMPCPY_CHK:
11783 fn = built_in_decls[BUILT_IN_MEMPCPY];
11784 break;
11785 case BUILT_IN_MEMMOVE_CHK:
11786 fn = built_in_decls[BUILT_IN_MEMMOVE];
11787 break;
11788 case BUILT_IN_MEMSET_CHK:
11789 fn = built_in_decls[BUILT_IN_MEMSET];
11790 break;
11791 default:
11792 break;
11795 if (! fn)
11796 return NULL_RTX;
11798 fn = build_call_expr (fn, 3, dest, src, len);
11799 STRIP_TYPE_NOPS (fn);
11800 while (TREE_CODE (fn) == COMPOUND_EXPR)
11802 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11803 EXPAND_NORMAL);
11804 fn = TREE_OPERAND (fn, 1);
11806 if (TREE_CODE (fn) == CALL_EXPR)
11807 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11808 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11810 else if (fcode == BUILT_IN_MEMSET_CHK)
11811 return NULL_RTX;
11812 else
11814 unsigned int dest_align
11815 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11817 /* If DEST is not a pointer type, call the normal function. */
11818 if (dest_align == 0)
11819 return NULL_RTX;
11821 /* If SRC and DEST are the same (and not volatile), do nothing. */
11822 if (operand_equal_p (src, dest, 0))
11824 tree expr;
11826 if (fcode != BUILT_IN_MEMPCPY_CHK)
11828 /* Evaluate and ignore LEN in case it has side-effects. */
11829 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11830 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11833 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11834 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11837 /* __memmove_chk special case. */
11838 if (fcode == BUILT_IN_MEMMOVE_CHK)
11840 unsigned int src_align
11841 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11843 if (src_align == 0)
11844 return NULL_RTX;
11846 /* If src is categorized for a readonly section we can use
11847 normal __memcpy_chk. */
11848 if (readonly_data_expr (src))
11850 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11851 if (!fn)
11852 return NULL_RTX;
11853 fn = build_call_expr (fn, 4, dest, src, len, size);
11854 STRIP_TYPE_NOPS (fn);
11855 while (TREE_CODE (fn) == COMPOUND_EXPR)
11857 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11858 EXPAND_NORMAL);
11859 fn = TREE_OPERAND (fn, 1);
11861 if (TREE_CODE (fn) == CALL_EXPR)
11862 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11863 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11866 return NULL_RTX;
11870 /* Emit warning if a buffer overflow is detected at compile time. */
11872 static void
11873 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11875 int is_strlen = 0;
11876 tree len, size;
11878 switch (fcode)
11880 case BUILT_IN_STRCPY_CHK:
11881 case BUILT_IN_STPCPY_CHK:
11882 /* For __strcat_chk the warning will be emitted only if overflowing
11883 by at least strlen (dest) + 1 bytes. */
11884 case BUILT_IN_STRCAT_CHK:
11885 len = CALL_EXPR_ARG (exp, 1);
11886 size = CALL_EXPR_ARG (exp, 2);
11887 is_strlen = 1;
11888 break;
11889 case BUILT_IN_STRNCAT_CHK:
11890 case BUILT_IN_STRNCPY_CHK:
11891 len = CALL_EXPR_ARG (exp, 2);
11892 size = CALL_EXPR_ARG (exp, 3);
11893 break;
11894 case BUILT_IN_SNPRINTF_CHK:
11895 case BUILT_IN_VSNPRINTF_CHK:
11896 len = CALL_EXPR_ARG (exp, 1);
11897 size = CALL_EXPR_ARG (exp, 3);
11898 break;
11899 default:
11900 gcc_unreachable ();
11903 if (!len || !size)
11904 return;
11906 if (! host_integerp (size, 1) || integer_all_onesp (size))
11907 return;
11909 if (is_strlen)
11911 len = c_strlen (len, 1);
11912 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11913 return;
11915 else if (fcode == BUILT_IN_STRNCAT_CHK)
11917 tree src = CALL_EXPR_ARG (exp, 1);
11918 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11919 return;
11920 src = c_strlen (src, 1);
11921 if (! src || ! host_integerp (src, 1))
11923 warning (0, "%Kcall to %D might overflow destination buffer",
11924 exp, get_callee_fndecl (exp));
11925 return;
11927 else if (tree_int_cst_lt (src, size))
11928 return;
11930 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11931 return;
11933 warning (0, "%Kcall to %D will always overflow destination buffer",
11934 exp, get_callee_fndecl (exp));
11937 /* Emit warning if a buffer overflow is detected at compile time
11938 in __sprintf_chk/__vsprintf_chk calls. */
11940 static void
11941 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11943 tree dest, size, len, fmt, flag;
11944 const char *fmt_str;
11945 int nargs = call_expr_nargs (exp);
11947 /* Verify the required arguments in the original call. */
11949 if (nargs < 4)
11950 return;
11951 dest = CALL_EXPR_ARG (exp, 0);
11952 flag = CALL_EXPR_ARG (exp, 1);
11953 size = CALL_EXPR_ARG (exp, 2);
11954 fmt = CALL_EXPR_ARG (exp, 3);
11956 if (! host_integerp (size, 1) || integer_all_onesp (size))
11957 return;
11959 /* Check whether the format is a literal string constant. */
11960 fmt_str = c_getstr (fmt);
11961 if (fmt_str == NULL)
11962 return;
11964 if (!init_target_chars ())
11965 return;
11967 /* If the format doesn't contain % args or %%, we know its size. */
11968 if (strchr (fmt_str, target_percent) == 0)
11969 len = build_int_cstu (size_type_node, strlen (fmt_str));
11970 /* If the format is "%s" and first ... argument is a string literal,
11971 we know it too. */
11972 else if (fcode == BUILT_IN_SPRINTF_CHK
11973 && strcmp (fmt_str, target_percent_s) == 0)
11975 tree arg;
11977 if (nargs < 5)
11978 return;
11979 arg = CALL_EXPR_ARG (exp, 4);
11980 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11981 return;
11983 len = c_strlen (arg, 1);
11984 if (!len || ! host_integerp (len, 1))
11985 return;
11987 else
11988 return;
11990 if (! tree_int_cst_lt (len, size))
11992 warning (0, "%Kcall to %D will always overflow destination buffer",
11993 exp, get_callee_fndecl (exp));
11997 /* Emit warning if a free is called with address of a variable. */
11999 static void
12000 maybe_emit_free_warning (tree exp)
12002 tree arg = CALL_EXPR_ARG (exp, 0);
12004 STRIP_NOPS (arg);
12005 if (TREE_CODE (arg) != ADDR_EXPR)
12006 return;
12008 arg = get_base_address (TREE_OPERAND (arg, 0));
12009 if (arg == NULL || INDIRECT_REF_P (arg))
12010 return;
12012 if (SSA_VAR_P (arg))
12013 warning (0, "%Kattempt to free a non-heap object %qD", exp, arg);
12014 else
12015 warning (0, "%Kattempt to free a non-heap object", exp);
12018 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12019 if possible. */
12021 tree
12022 fold_builtin_object_size (tree ptr, tree ost)
12024 tree ret = NULL_TREE;
12025 int object_size_type;
12027 if (!validate_arg (ptr, POINTER_TYPE)
12028 || !validate_arg (ost, INTEGER_TYPE))
12029 return NULL_TREE;
12031 STRIP_NOPS (ost);
12033 if (TREE_CODE (ost) != INTEGER_CST
12034 || tree_int_cst_sgn (ost) < 0
12035 || compare_tree_int (ost, 3) > 0)
12036 return NULL_TREE;
12038 object_size_type = tree_low_cst (ost, 0);
12040 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12041 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12042 and (size_t) 0 for types 2 and 3. */
12043 if (TREE_SIDE_EFFECTS (ptr))
12044 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12046 if (TREE_CODE (ptr) == ADDR_EXPR)
12047 ret = build_int_cstu (size_type_node,
12048 compute_builtin_object_size (ptr, object_size_type));
12050 else if (TREE_CODE (ptr) == SSA_NAME)
12052 unsigned HOST_WIDE_INT bytes;
12054 /* If object size is not known yet, delay folding until
12055 later. Maybe subsequent passes will help determining
12056 it. */
12057 bytes = compute_builtin_object_size (ptr, object_size_type);
12058 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12059 ? -1 : 0))
12060 ret = build_int_cstu (size_type_node, bytes);
12063 if (ret)
12065 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12066 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12067 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12068 ret = NULL_TREE;
12071 return ret;
12074 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12075 DEST, SRC, LEN, and SIZE are the arguments to the call.
12076 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12077 code of the builtin. If MAXLEN is not NULL, it is maximum length
12078 passed as third argument. */
12080 tree
12081 fold_builtin_memory_chk (tree fndecl,
12082 tree dest, tree src, tree len, tree size,
12083 tree maxlen, bool ignore,
12084 enum built_in_function fcode)
12086 tree fn;
12088 if (!validate_arg (dest, POINTER_TYPE)
12089 || !validate_arg (src,
12090 (fcode == BUILT_IN_MEMSET_CHK
12091 ? INTEGER_TYPE : POINTER_TYPE))
12092 || !validate_arg (len, INTEGER_TYPE)
12093 || !validate_arg (size, INTEGER_TYPE))
12094 return NULL_TREE;
12096 /* If SRC and DEST are the same (and not volatile), return DEST
12097 (resp. DEST+LEN for __mempcpy_chk). */
12098 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12100 if (fcode != BUILT_IN_MEMPCPY_CHK)
12101 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12102 else
12104 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12105 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12109 if (! host_integerp (size, 1))
12110 return NULL_TREE;
12112 if (! integer_all_onesp (size))
12114 if (! host_integerp (len, 1))
12116 /* If LEN is not constant, try MAXLEN too.
12117 For MAXLEN only allow optimizing into non-_ocs function
12118 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12119 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12121 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12123 /* (void) __mempcpy_chk () can be optimized into
12124 (void) __memcpy_chk (). */
12125 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12126 if (!fn)
12127 return NULL_TREE;
12129 return build_call_expr (fn, 4, dest, src, len, size);
12131 return NULL_TREE;
12134 else
12135 maxlen = len;
12137 if (tree_int_cst_lt (size, maxlen))
12138 return NULL_TREE;
12141 fn = NULL_TREE;
12142 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12143 mem{cpy,pcpy,move,set} is available. */
12144 switch (fcode)
12146 case BUILT_IN_MEMCPY_CHK:
12147 fn = built_in_decls[BUILT_IN_MEMCPY];
12148 break;
12149 case BUILT_IN_MEMPCPY_CHK:
12150 fn = built_in_decls[BUILT_IN_MEMPCPY];
12151 break;
12152 case BUILT_IN_MEMMOVE_CHK:
12153 fn = built_in_decls[BUILT_IN_MEMMOVE];
12154 break;
12155 case BUILT_IN_MEMSET_CHK:
12156 fn = built_in_decls[BUILT_IN_MEMSET];
12157 break;
12158 default:
12159 break;
12162 if (!fn)
12163 return NULL_TREE;
12165 return build_call_expr (fn, 3, dest, src, len);
12168 /* Fold a call to the __st[rp]cpy_chk builtin.
12169 DEST, SRC, and SIZE are the arguments to the call.
12170 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12171 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12172 strings passed as second argument. */
12174 tree
12175 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12176 tree maxlen, bool ignore,
12177 enum built_in_function fcode)
12179 tree len, fn;
12181 if (!validate_arg (dest, POINTER_TYPE)
12182 || !validate_arg (src, POINTER_TYPE)
12183 || !validate_arg (size, INTEGER_TYPE))
12184 return NULL_TREE;
12186 /* If SRC and DEST are the same (and not volatile), return DEST. */
12187 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12188 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12190 if (! host_integerp (size, 1))
12191 return NULL_TREE;
12193 if (! integer_all_onesp (size))
12195 len = c_strlen (src, 1);
12196 if (! len || ! host_integerp (len, 1))
12198 /* If LEN is not constant, try MAXLEN too.
12199 For MAXLEN only allow optimizing into non-_ocs function
12200 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12201 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12203 if (fcode == BUILT_IN_STPCPY_CHK)
12205 if (! ignore)
12206 return NULL_TREE;
12208 /* If return value of __stpcpy_chk is ignored,
12209 optimize into __strcpy_chk. */
12210 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12211 if (!fn)
12212 return NULL_TREE;
12214 return build_call_expr (fn, 3, dest, src, size);
12217 if (! len || TREE_SIDE_EFFECTS (len))
12218 return NULL_TREE;
12220 /* If c_strlen returned something, but not a constant,
12221 transform __strcpy_chk into __memcpy_chk. */
12222 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12223 if (!fn)
12224 return NULL_TREE;
12226 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12227 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12228 build_call_expr (fn, 4,
12229 dest, src, len, size));
12232 else
12233 maxlen = len;
12235 if (! tree_int_cst_lt (maxlen, size))
12236 return NULL_TREE;
12239 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12240 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12241 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12242 if (!fn)
12243 return NULL_TREE;
12245 return build_call_expr (fn, 2, dest, src);
12248 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12249 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12250 length passed as third argument. */
12252 tree
12253 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12254 tree maxlen)
12256 tree fn;
12258 if (!validate_arg (dest, POINTER_TYPE)
12259 || !validate_arg (src, POINTER_TYPE)
12260 || !validate_arg (len, INTEGER_TYPE)
12261 || !validate_arg (size, INTEGER_TYPE))
12262 return NULL_TREE;
12264 if (! host_integerp (size, 1))
12265 return NULL_TREE;
12267 if (! integer_all_onesp (size))
12269 if (! host_integerp (len, 1))
12271 /* If LEN is not constant, try MAXLEN too.
12272 For MAXLEN only allow optimizing into non-_ocs function
12273 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12274 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12275 return NULL_TREE;
12277 else
12278 maxlen = len;
12280 if (tree_int_cst_lt (size, maxlen))
12281 return NULL_TREE;
12284 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12285 fn = built_in_decls[BUILT_IN_STRNCPY];
12286 if (!fn)
12287 return NULL_TREE;
12289 return build_call_expr (fn, 3, dest, src, len);
12292 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12293 are the arguments to the call. */
12295 static tree
12296 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12298 tree fn;
12299 const char *p;
12301 if (!validate_arg (dest, POINTER_TYPE)
12302 || !validate_arg (src, POINTER_TYPE)
12303 || !validate_arg (size, INTEGER_TYPE))
12304 return NULL_TREE;
12306 p = c_getstr (src);
12307 /* If the SRC parameter is "", return DEST. */
12308 if (p && *p == '\0')
12309 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12311 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12312 return NULL_TREE;
12314 /* If __builtin_strcat_chk is used, assume strcat is available. */
12315 fn = built_in_decls[BUILT_IN_STRCAT];
12316 if (!fn)
12317 return NULL_TREE;
12319 return build_call_expr (fn, 2, dest, src);
12322 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12323 LEN, and SIZE. */
12325 static tree
12326 fold_builtin_strncat_chk (tree fndecl,
12327 tree dest, tree src, tree len, tree size)
12329 tree fn;
12330 const char *p;
12332 if (!validate_arg (dest, POINTER_TYPE)
12333 || !validate_arg (src, POINTER_TYPE)
12334 || !validate_arg (size, INTEGER_TYPE)
12335 || !validate_arg (size, INTEGER_TYPE))
12336 return NULL_TREE;
12338 p = c_getstr (src);
12339 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12340 if (p && *p == '\0')
12341 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12342 else if (integer_zerop (len))
12343 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12345 if (! host_integerp (size, 1))
12346 return NULL_TREE;
12348 if (! integer_all_onesp (size))
12350 tree src_len = c_strlen (src, 1);
12351 if (src_len
12352 && host_integerp (src_len, 1)
12353 && host_integerp (len, 1)
12354 && ! tree_int_cst_lt (len, src_len))
12356 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12357 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12358 if (!fn)
12359 return NULL_TREE;
12361 return build_call_expr (fn, 3, dest, src, size);
12363 return NULL_TREE;
12366 /* If __builtin_strncat_chk is used, assume strncat is available. */
12367 fn = built_in_decls[BUILT_IN_STRNCAT];
12368 if (!fn)
12369 return NULL_TREE;
12371 return build_call_expr (fn, 3, dest, src, len);
12374 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12375 a normal call should be emitted rather than expanding the function
12376 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12378 static tree
12379 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12381 tree dest, size, len, fn, fmt, flag;
12382 const char *fmt_str;
12383 int nargs = call_expr_nargs (exp);
12385 /* Verify the required arguments in the original call. */
12386 if (nargs < 4)
12387 return NULL_TREE;
12388 dest = CALL_EXPR_ARG (exp, 0);
12389 if (!validate_arg (dest, POINTER_TYPE))
12390 return NULL_TREE;
12391 flag = CALL_EXPR_ARG (exp, 1);
12392 if (!validate_arg (flag, INTEGER_TYPE))
12393 return NULL_TREE;
12394 size = CALL_EXPR_ARG (exp, 2);
12395 if (!validate_arg (size, INTEGER_TYPE))
12396 return NULL_TREE;
12397 fmt = CALL_EXPR_ARG (exp, 3);
12398 if (!validate_arg (fmt, POINTER_TYPE))
12399 return NULL_TREE;
12401 if (! host_integerp (size, 1))
12402 return NULL_TREE;
12404 len = NULL_TREE;
12406 if (!init_target_chars ())
12407 return NULL_TREE;
12409 /* Check whether the format is a literal string constant. */
12410 fmt_str = c_getstr (fmt);
12411 if (fmt_str != NULL)
12413 /* If the format doesn't contain % args or %%, we know the size. */
12414 if (strchr (fmt_str, target_percent) == 0)
12416 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12417 len = build_int_cstu (size_type_node, strlen (fmt_str));
12419 /* If the format is "%s" and first ... argument is a string literal,
12420 we know the size too. */
12421 else if (fcode == BUILT_IN_SPRINTF_CHK
12422 && strcmp (fmt_str, target_percent_s) == 0)
12424 tree arg;
12426 if (nargs == 5)
12428 arg = CALL_EXPR_ARG (exp, 4);
12429 if (validate_arg (arg, POINTER_TYPE))
12431 len = c_strlen (arg, 1);
12432 if (! len || ! host_integerp (len, 1))
12433 len = NULL_TREE;
12439 if (! integer_all_onesp (size))
12441 if (! len || ! tree_int_cst_lt (len, size))
12442 return NULL_TREE;
12445 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12446 or if format doesn't contain % chars or is "%s". */
12447 if (! integer_zerop (flag))
12449 if (fmt_str == NULL)
12450 return NULL_TREE;
12451 if (strchr (fmt_str, target_percent) != NULL
12452 && strcmp (fmt_str, target_percent_s))
12453 return NULL_TREE;
12456 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12457 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12458 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12459 if (!fn)
12460 return NULL_TREE;
12462 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12465 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12466 a normal call should be emitted rather than expanding the function
12467 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12468 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12469 passed as second argument. */
12471 tree
12472 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12473 enum built_in_function fcode)
12475 tree dest, size, len, fn, fmt, flag;
12476 const char *fmt_str;
12478 /* Verify the required arguments in the original call. */
12479 if (call_expr_nargs (exp) < 5)
12480 return NULL_TREE;
12481 dest = CALL_EXPR_ARG (exp, 0);
12482 if (!validate_arg (dest, POINTER_TYPE))
12483 return NULL_TREE;
12484 len = CALL_EXPR_ARG (exp, 1);
12485 if (!validate_arg (len, INTEGER_TYPE))
12486 return NULL_TREE;
12487 flag = CALL_EXPR_ARG (exp, 2);
12488 if (!validate_arg (flag, INTEGER_TYPE))
12489 return NULL_TREE;
12490 size = CALL_EXPR_ARG (exp, 3);
12491 if (!validate_arg (size, INTEGER_TYPE))
12492 return NULL_TREE;
12493 fmt = CALL_EXPR_ARG (exp, 4);
12494 if (!validate_arg (fmt, POINTER_TYPE))
12495 return NULL_TREE;
12497 if (! host_integerp (size, 1))
12498 return NULL_TREE;
12500 if (! integer_all_onesp (size))
12502 if (! host_integerp (len, 1))
12504 /* If LEN is not constant, try MAXLEN too.
12505 For MAXLEN only allow optimizing into non-_ocs function
12506 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12507 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12508 return NULL_TREE;
12510 else
12511 maxlen = len;
12513 if (tree_int_cst_lt (size, maxlen))
12514 return NULL_TREE;
12517 if (!init_target_chars ())
12518 return NULL_TREE;
12520 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12521 or if format doesn't contain % chars or is "%s". */
12522 if (! integer_zerop (flag))
12524 fmt_str = c_getstr (fmt);
12525 if (fmt_str == NULL)
12526 return NULL_TREE;
12527 if (strchr (fmt_str, target_percent) != NULL
12528 && strcmp (fmt_str, target_percent_s))
12529 return NULL_TREE;
12532 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12533 available. */
12534 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12535 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12536 if (!fn)
12537 return NULL_TREE;
12539 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12542 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12543 FMT and ARG are the arguments to the call; we don't fold cases with
12544 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12546 Return NULL_TREE if no simplification was possible, otherwise return the
12547 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12548 code of the function to be simplified. */
12550 static tree
12551 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12552 enum built_in_function fcode)
12554 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12555 const char *fmt_str = NULL;
12557 /* If the return value is used, don't do the transformation. */
12558 if (! ignore)
12559 return NULL_TREE;
12561 /* Verify the required arguments in the original call. */
12562 if (!validate_arg (fmt, POINTER_TYPE))
12563 return NULL_TREE;
12565 /* Check whether the format is a literal string constant. */
12566 fmt_str = c_getstr (fmt);
12567 if (fmt_str == NULL)
12568 return NULL_TREE;
12570 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12572 /* If we're using an unlocked function, assume the other
12573 unlocked functions exist explicitly. */
12574 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12575 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12577 else
12579 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12580 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12583 if (!init_target_chars ())
12584 return NULL_TREE;
12586 if (strcmp (fmt_str, target_percent_s) == 0
12587 || strchr (fmt_str, target_percent) == NULL)
12589 const char *str;
12591 if (strcmp (fmt_str, target_percent_s) == 0)
12593 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12594 return NULL_TREE;
12596 if (!arg || !validate_arg (arg, POINTER_TYPE))
12597 return NULL_TREE;
12599 str = c_getstr (arg);
12600 if (str == NULL)
12601 return NULL_TREE;
12603 else
12605 /* The format specifier doesn't contain any '%' characters. */
12606 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12607 && arg)
12608 return NULL_TREE;
12609 str = fmt_str;
12612 /* If the string was "", printf does nothing. */
12613 if (str[0] == '\0')
12614 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12616 /* If the string has length of 1, call putchar. */
12617 if (str[1] == '\0')
12619 /* Given printf("c"), (where c is any one character,)
12620 convert "c"[0] to an int and pass that to the replacement
12621 function. */
12622 newarg = build_int_cst (NULL_TREE, str[0]);
12623 if (fn_putchar)
12624 call = build_call_expr (fn_putchar, 1, newarg);
12626 else
12628 /* If the string was "string\n", call puts("string"). */
12629 size_t len = strlen (str);
12630 if ((unsigned char)str[len - 1] == target_newline)
12632 /* Create a NUL-terminated string that's one char shorter
12633 than the original, stripping off the trailing '\n'. */
12634 char *newstr = XALLOCAVEC (char, len);
12635 memcpy (newstr, str, len - 1);
12636 newstr[len - 1] = 0;
12638 newarg = build_string_literal (len, newstr);
12639 if (fn_puts)
12640 call = build_call_expr (fn_puts, 1, newarg);
12642 else
12643 /* We'd like to arrange to call fputs(string,stdout) here,
12644 but we need stdout and don't have a way to get it yet. */
12645 return NULL_TREE;
12649 /* The other optimizations can be done only on the non-va_list variants. */
12650 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12651 return NULL_TREE;
12653 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12654 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12656 if (!arg || !validate_arg (arg, POINTER_TYPE))
12657 return NULL_TREE;
12658 if (fn_puts)
12659 call = build_call_expr (fn_puts, 1, arg);
12662 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12663 else if (strcmp (fmt_str, target_percent_c) == 0)
12665 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12666 return NULL_TREE;
12667 if (fn_putchar)
12668 call = build_call_expr (fn_putchar, 1, arg);
12671 if (!call)
12672 return NULL_TREE;
12674 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12677 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12678 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12679 more than 3 arguments, and ARG may be null in the 2-argument case.
12681 Return NULL_TREE if no simplification was possible, otherwise return the
12682 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12683 code of the function to be simplified. */
12685 static tree
12686 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12687 enum built_in_function fcode)
12689 tree fn_fputc, fn_fputs, call = NULL_TREE;
12690 const char *fmt_str = NULL;
12692 /* If the return value is used, don't do the transformation. */
12693 if (! ignore)
12694 return NULL_TREE;
12696 /* Verify the required arguments in the original call. */
12697 if (!validate_arg (fp, POINTER_TYPE))
12698 return NULL_TREE;
12699 if (!validate_arg (fmt, POINTER_TYPE))
12700 return NULL_TREE;
12702 /* Check whether the format is a literal string constant. */
12703 fmt_str = c_getstr (fmt);
12704 if (fmt_str == NULL)
12705 return NULL_TREE;
12707 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12709 /* If we're using an unlocked function, assume the other
12710 unlocked functions exist explicitly. */
12711 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12712 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12714 else
12716 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12717 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12720 if (!init_target_chars ())
12721 return NULL_TREE;
12723 /* If the format doesn't contain % args or %%, use strcpy. */
12724 if (strchr (fmt_str, target_percent) == NULL)
12726 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12727 && arg)
12728 return NULL_TREE;
12730 /* If the format specifier was "", fprintf does nothing. */
12731 if (fmt_str[0] == '\0')
12733 /* If FP has side-effects, just wait until gimplification is
12734 done. */
12735 if (TREE_SIDE_EFFECTS (fp))
12736 return NULL_TREE;
12738 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12741 /* When "string" doesn't contain %, replace all cases of
12742 fprintf (fp, string) with fputs (string, fp). The fputs
12743 builtin will take care of special cases like length == 1. */
12744 if (fn_fputs)
12745 call = build_call_expr (fn_fputs, 2, fmt, fp);
12748 /* The other optimizations can be done only on the non-va_list variants. */
12749 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12750 return NULL_TREE;
12752 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12753 else if (strcmp (fmt_str, target_percent_s) == 0)
12755 if (!arg || !validate_arg (arg, POINTER_TYPE))
12756 return NULL_TREE;
12757 if (fn_fputs)
12758 call = build_call_expr (fn_fputs, 2, arg, fp);
12761 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12762 else if (strcmp (fmt_str, target_percent_c) == 0)
12764 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12765 return NULL_TREE;
12766 if (fn_fputc)
12767 call = build_call_expr (fn_fputc, 2, arg, fp);
12770 if (!call)
12771 return NULL_TREE;
12772 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12775 /* Initialize format string characters in the target charset. */
12777 static bool
12778 init_target_chars (void)
12780 static bool init;
12781 if (!init)
12783 target_newline = lang_hooks.to_target_charset ('\n');
12784 target_percent = lang_hooks.to_target_charset ('%');
12785 target_c = lang_hooks.to_target_charset ('c');
12786 target_s = lang_hooks.to_target_charset ('s');
12787 if (target_newline == 0 || target_percent == 0 || target_c == 0
12788 || target_s == 0)
12789 return false;
12791 target_percent_c[0] = target_percent;
12792 target_percent_c[1] = target_c;
12793 target_percent_c[2] = '\0';
12795 target_percent_s[0] = target_percent;
12796 target_percent_s[1] = target_s;
12797 target_percent_s[2] = '\0';
12799 target_percent_s_newline[0] = target_percent;
12800 target_percent_s_newline[1] = target_s;
12801 target_percent_s_newline[2] = target_newline;
12802 target_percent_s_newline[3] = '\0';
12804 init = true;
12806 return true;
12809 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12810 and no overflow/underflow occurred. INEXACT is true if M was not
12811 exactly calculated. TYPE is the tree type for the result. This
12812 function assumes that you cleared the MPFR flags and then
12813 calculated M to see if anything subsequently set a flag prior to
12814 entering this function. Return NULL_TREE if any checks fail. */
12816 static tree
12817 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12819 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12820 overflow/underflow occurred. If -frounding-math, proceed iff the
12821 result of calling FUNC was exact. */
12822 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12823 && (!flag_rounding_math || !inexact))
12825 REAL_VALUE_TYPE rr;
12827 real_from_mpfr (&rr, m, type, GMP_RNDN);
12828 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12829 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12830 but the mpft_t is not, then we underflowed in the
12831 conversion. */
12832 if (real_isfinite (&rr)
12833 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12835 REAL_VALUE_TYPE rmode;
12837 real_convert (&rmode, TYPE_MODE (type), &rr);
12838 /* Proceed iff the specified mode can hold the value. */
12839 if (real_identical (&rmode, &rr))
12840 return build_real (type, rmode);
12843 return NULL_TREE;
12846 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12847 FUNC on it and return the resulting value as a tree with type TYPE.
12848 If MIN and/or MAX are not NULL, then the supplied ARG must be
12849 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12850 acceptable values, otherwise they are not. The mpfr precision is
12851 set to the precision of TYPE. We assume that function FUNC returns
12852 zero if the result could be calculated exactly within the requested
12853 precision. */
12855 static tree
12856 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12857 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12858 bool inclusive)
12860 tree result = NULL_TREE;
12862 STRIP_NOPS (arg);
12864 /* To proceed, MPFR must exactly represent the target floating point
12865 format, which only happens when the target base equals two. */
12866 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12867 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12869 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12871 if (real_isfinite (ra)
12872 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12873 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12875 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12876 const int prec = fmt->p;
12877 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12878 int inexact;
12879 mpfr_t m;
12881 mpfr_init2 (m, prec);
12882 mpfr_from_real (m, ra, GMP_RNDN);
12883 mpfr_clear_flags ();
12884 inexact = func (m, m, rnd);
12885 result = do_mpfr_ckconv (m, type, inexact);
12886 mpfr_clear (m);
12890 return result;
12893 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12894 FUNC on it and return the resulting value as a tree with type TYPE.
12895 The mpfr precision is set to the precision of TYPE. We assume that
12896 function FUNC returns zero if the result could be calculated
12897 exactly within the requested precision. */
12899 static tree
12900 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12901 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12903 tree result = NULL_TREE;
12905 STRIP_NOPS (arg1);
12906 STRIP_NOPS (arg2);
12908 /* To proceed, MPFR must exactly represent the target floating point
12909 format, which only happens when the target base equals two. */
12910 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12911 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12912 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12914 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12915 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12917 if (real_isfinite (ra1) && real_isfinite (ra2))
12919 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12920 const int prec = fmt->p;
12921 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12922 int inexact;
12923 mpfr_t m1, m2;
12925 mpfr_inits2 (prec, m1, m2, NULL);
12926 mpfr_from_real (m1, ra1, GMP_RNDN);
12927 mpfr_from_real (m2, ra2, GMP_RNDN);
12928 mpfr_clear_flags ();
12929 inexact = func (m1, m1, m2, rnd);
12930 result = do_mpfr_ckconv (m1, type, inexact);
12931 mpfr_clears (m1, m2, NULL);
12935 return result;
12938 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12939 FUNC on it and return the resulting value as a tree with type TYPE.
12940 The mpfr precision is set to the precision of TYPE. We assume that
12941 function FUNC returns zero if the result could be calculated
12942 exactly within the requested precision. */
12944 static tree
12945 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12946 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12948 tree result = NULL_TREE;
12950 STRIP_NOPS (arg1);
12951 STRIP_NOPS (arg2);
12952 STRIP_NOPS (arg3);
12954 /* To proceed, MPFR must exactly represent the target floating point
12955 format, which only happens when the target base equals two. */
12956 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12957 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12958 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12959 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12961 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12962 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12963 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12965 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12967 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12968 const int prec = fmt->p;
12969 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12970 int inexact;
12971 mpfr_t m1, m2, m3;
12973 mpfr_inits2 (prec, m1, m2, m3, NULL);
12974 mpfr_from_real (m1, ra1, GMP_RNDN);
12975 mpfr_from_real (m2, ra2, GMP_RNDN);
12976 mpfr_from_real (m3, ra3, GMP_RNDN);
12977 mpfr_clear_flags ();
12978 inexact = func (m1, m1, m2, m3, rnd);
12979 result = do_mpfr_ckconv (m1, type, inexact);
12980 mpfr_clears (m1, m2, m3, NULL);
12984 return result;
12987 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12988 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12989 If ARG_SINP and ARG_COSP are NULL then the result is returned
12990 as a complex value.
12991 The type is taken from the type of ARG and is used for setting the
12992 precision of the calculation and results. */
12994 static tree
12995 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12997 tree const type = TREE_TYPE (arg);
12998 tree result = NULL_TREE;
13000 STRIP_NOPS (arg);
13002 /* To proceed, MPFR must exactly represent the target floating point
13003 format, which only happens when the target base equals two. */
13004 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13005 && TREE_CODE (arg) == REAL_CST
13006 && !TREE_OVERFLOW (arg))
13008 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13010 if (real_isfinite (ra))
13012 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13013 const int prec = fmt->p;
13014 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13015 tree result_s, result_c;
13016 int inexact;
13017 mpfr_t m, ms, mc;
13019 mpfr_inits2 (prec, m, ms, mc, NULL);
13020 mpfr_from_real (m, ra, GMP_RNDN);
13021 mpfr_clear_flags ();
13022 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13023 result_s = do_mpfr_ckconv (ms, type, inexact);
13024 result_c = do_mpfr_ckconv (mc, type, inexact);
13025 mpfr_clears (m, ms, mc, NULL);
13026 if (result_s && result_c)
13028 /* If we are to return in a complex value do so. */
13029 if (!arg_sinp && !arg_cosp)
13030 return build_complex (build_complex_type (type),
13031 result_c, result_s);
13033 /* Dereference the sin/cos pointer arguments. */
13034 arg_sinp = build_fold_indirect_ref (arg_sinp);
13035 arg_cosp = build_fold_indirect_ref (arg_cosp);
13036 /* Proceed if valid pointer type were passed in. */
13037 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13038 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13040 /* Set the values. */
13041 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13042 result_s);
13043 TREE_SIDE_EFFECTS (result_s) = 1;
13044 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13045 result_c);
13046 TREE_SIDE_EFFECTS (result_c) = 1;
13047 /* Combine the assignments into a compound expr. */
13048 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13049 result_s, result_c));
13054 return result;
13057 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
13058 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13059 two-argument mpfr order N Bessel function FUNC on them and return
13060 the resulting value as a tree with type TYPE. The mpfr precision
13061 is set to the precision of TYPE. We assume that function FUNC
13062 returns zero if the result could be calculated exactly within the
13063 requested precision. */
13064 static tree
13065 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13066 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13067 const REAL_VALUE_TYPE *min, bool inclusive)
13069 tree result = NULL_TREE;
13071 STRIP_NOPS (arg1);
13072 STRIP_NOPS (arg2);
13074 /* To proceed, MPFR must exactly represent the target floating point
13075 format, which only happens when the target base equals two. */
13076 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13077 && host_integerp (arg1, 0)
13078 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13080 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13081 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13083 if (n == (long)n
13084 && real_isfinite (ra)
13085 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13087 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13088 const int prec = fmt->p;
13089 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13090 int inexact;
13091 mpfr_t m;
13093 mpfr_init2 (m, prec);
13094 mpfr_from_real (m, ra, GMP_RNDN);
13095 mpfr_clear_flags ();
13096 inexact = func (m, n, m, rnd);
13097 result = do_mpfr_ckconv (m, type, inexact);
13098 mpfr_clear (m);
13102 return result;
13105 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13106 the pointer *(ARG_QUO) and return the result. The type is taken
13107 from the type of ARG0 and is used for setting the precision of the
13108 calculation and results. */
13110 static tree
13111 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13113 tree const type = TREE_TYPE (arg0);
13114 tree result = NULL_TREE;
13116 STRIP_NOPS (arg0);
13117 STRIP_NOPS (arg1);
13119 /* To proceed, MPFR must exactly represent the target floating point
13120 format, which only happens when the target base equals two. */
13121 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13122 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13123 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13125 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13126 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13128 if (real_isfinite (ra0) && real_isfinite (ra1))
13130 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13131 const int prec = fmt->p;
13132 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13133 tree result_rem;
13134 long integer_quo;
13135 mpfr_t m0, m1;
13137 mpfr_inits2 (prec, m0, m1, NULL);
13138 mpfr_from_real (m0, ra0, GMP_RNDN);
13139 mpfr_from_real (m1, ra1, GMP_RNDN);
13140 mpfr_clear_flags ();
13141 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13142 /* Remquo is independent of the rounding mode, so pass
13143 inexact=0 to do_mpfr_ckconv(). */
13144 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13145 mpfr_clears (m0, m1, NULL);
13146 if (result_rem)
13148 /* MPFR calculates quo in the host's long so it may
13149 return more bits in quo than the target int can hold
13150 if sizeof(host long) > sizeof(target int). This can
13151 happen even for native compilers in LP64 mode. In
13152 these cases, modulo the quo value with the largest
13153 number that the target int can hold while leaving one
13154 bit for the sign. */
13155 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13156 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13158 /* Dereference the quo pointer argument. */
13159 arg_quo = build_fold_indirect_ref (arg_quo);
13160 /* Proceed iff a valid pointer type was passed in. */
13161 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13163 /* Set the value. */
13164 tree result_quo = fold_build2 (MODIFY_EXPR,
13165 TREE_TYPE (arg_quo), arg_quo,
13166 build_int_cst (NULL, integer_quo));
13167 TREE_SIDE_EFFECTS (result_quo) = 1;
13168 /* Combine the quo assignment with the rem. */
13169 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13170 result_quo, result_rem));
13175 return result;
13178 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13179 resulting value as a tree with type TYPE. The mpfr precision is
13180 set to the precision of TYPE. We assume that this mpfr function
13181 returns zero if the result could be calculated exactly within the
13182 requested precision. In addition, the integer pointer represented
13183 by ARG_SG will be dereferenced and set to the appropriate signgam
13184 (-1,1) value. */
13186 static tree
13187 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13189 tree result = NULL_TREE;
13191 STRIP_NOPS (arg);
13193 /* To proceed, MPFR must exactly represent the target floating point
13194 format, which only happens when the target base equals two. Also
13195 verify ARG is a constant and that ARG_SG is an int pointer. */
13196 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13197 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13198 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13199 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13201 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13203 /* In addition to NaN and Inf, the argument cannot be zero or a
13204 negative integer. */
13205 if (real_isfinite (ra)
13206 && ra->cl != rvc_zero
13207 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13209 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13210 const int prec = fmt->p;
13211 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13212 int inexact, sg;
13213 mpfr_t m;
13214 tree result_lg;
13216 mpfr_init2 (m, prec);
13217 mpfr_from_real (m, ra, GMP_RNDN);
13218 mpfr_clear_flags ();
13219 inexact = mpfr_lgamma (m, &sg, m, rnd);
13220 result_lg = do_mpfr_ckconv (m, type, inexact);
13221 mpfr_clear (m);
13222 if (result_lg)
13224 tree result_sg;
13226 /* Dereference the arg_sg pointer argument. */
13227 arg_sg = build_fold_indirect_ref (arg_sg);
13228 /* Assign the signgam value into *arg_sg. */
13229 result_sg = fold_build2 (MODIFY_EXPR,
13230 TREE_TYPE (arg_sg), arg_sg,
13231 build_int_cst (NULL, sg));
13232 TREE_SIDE_EFFECTS (result_sg) = 1;
13233 /* Combine the signgam assignment with the lgamma result. */
13234 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13235 result_sg, result_lg));
13240 return result;
13242 #endif
13244 /* FIXME tuples.
13245 The functions below provide an alternate interface for folding
13246 builtin function calls presented as GIMPLE_CALL statements rather
13247 than as CALL_EXPRs. The folded result is still expressed as a
13248 tree. There is too much code duplication in the handling of
13249 varargs functions, and a more intrusive re-factoring would permit
13250 better sharing of code between the tree and statement-based
13251 versions of these functions. */
13253 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13254 along with N new arguments specified as the "..." parameters. SKIP
13255 is the number of arguments in STMT to be omitted. This function is used
13256 to do varargs-to-varargs transformations. */
13258 static tree
13259 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13261 int oldnargs = gimple_call_num_args (stmt);
13262 int nargs = oldnargs - skip + n;
13263 tree fntype = TREE_TYPE (fndecl);
13264 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13265 tree *buffer;
13266 int i, j;
13267 va_list ap;
13269 buffer = XALLOCAVEC (tree, nargs);
13270 va_start (ap, n);
13271 for (i = 0; i < n; i++)
13272 buffer[i] = va_arg (ap, tree);
13273 va_end (ap);
13274 for (j = skip; j < oldnargs; j++, i++)
13275 buffer[i] = gimple_call_arg (stmt, j);
13277 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13280 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13281 a normal call should be emitted rather than expanding the function
13282 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13284 static tree
13285 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13287 tree dest, size, len, fn, fmt, flag;
13288 const char *fmt_str;
13289 int nargs = gimple_call_num_args (stmt);
13291 /* Verify the required arguments in the original call. */
13292 if (nargs < 4)
13293 return NULL_TREE;
13294 dest = gimple_call_arg (stmt, 0);
13295 if (!validate_arg (dest, POINTER_TYPE))
13296 return NULL_TREE;
13297 flag = gimple_call_arg (stmt, 1);
13298 if (!validate_arg (flag, INTEGER_TYPE))
13299 return NULL_TREE;
13300 size = gimple_call_arg (stmt, 2);
13301 if (!validate_arg (size, INTEGER_TYPE))
13302 return NULL_TREE;
13303 fmt = gimple_call_arg (stmt, 3);
13304 if (!validate_arg (fmt, POINTER_TYPE))
13305 return NULL_TREE;
13307 if (! host_integerp (size, 1))
13308 return NULL_TREE;
13310 len = NULL_TREE;
13312 if (!init_target_chars ())
13313 return NULL_TREE;
13315 /* Check whether the format is a literal string constant. */
13316 fmt_str = c_getstr (fmt);
13317 if (fmt_str != NULL)
13319 /* If the format doesn't contain % args or %%, we know the size. */
13320 if (strchr (fmt_str, target_percent) == 0)
13322 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13323 len = build_int_cstu (size_type_node, strlen (fmt_str));
13325 /* If the format is "%s" and first ... argument is a string literal,
13326 we know the size too. */
13327 else if (fcode == BUILT_IN_SPRINTF_CHK
13328 && strcmp (fmt_str, target_percent_s) == 0)
13330 tree arg;
13332 if (nargs == 5)
13334 arg = gimple_call_arg (stmt, 4);
13335 if (validate_arg (arg, POINTER_TYPE))
13337 len = c_strlen (arg, 1);
13338 if (! len || ! host_integerp (len, 1))
13339 len = NULL_TREE;
13345 if (! integer_all_onesp (size))
13347 if (! len || ! tree_int_cst_lt (len, size))
13348 return NULL_TREE;
13351 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13352 or if format doesn't contain % chars or is "%s". */
13353 if (! integer_zerop (flag))
13355 if (fmt_str == NULL)
13356 return NULL_TREE;
13357 if (strchr (fmt_str, target_percent) != NULL
13358 && strcmp (fmt_str, target_percent_s))
13359 return NULL_TREE;
13362 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13363 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13364 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13365 if (!fn)
13366 return NULL_TREE;
13368 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13371 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13372 a normal call should be emitted rather than expanding the function
13373 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13374 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13375 passed as second argument. */
13377 tree
13378 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13379 enum built_in_function fcode)
13381 tree dest, size, len, fn, fmt, flag;
13382 const char *fmt_str;
13384 /* Verify the required arguments in the original call. */
13385 if (gimple_call_num_args (stmt) < 5)
13386 return NULL_TREE;
13387 dest = gimple_call_arg (stmt, 0);
13388 if (!validate_arg (dest, POINTER_TYPE))
13389 return NULL_TREE;
13390 len = gimple_call_arg (stmt, 1);
13391 if (!validate_arg (len, INTEGER_TYPE))
13392 return NULL_TREE;
13393 flag = gimple_call_arg (stmt, 2);
13394 if (!validate_arg (flag, INTEGER_TYPE))
13395 return NULL_TREE;
13396 size = gimple_call_arg (stmt, 3);
13397 if (!validate_arg (size, INTEGER_TYPE))
13398 return NULL_TREE;
13399 fmt = gimple_call_arg (stmt, 4);
13400 if (!validate_arg (fmt, POINTER_TYPE))
13401 return NULL_TREE;
13403 if (! host_integerp (size, 1))
13404 return NULL_TREE;
13406 if (! integer_all_onesp (size))
13408 if (! host_integerp (len, 1))
13410 /* If LEN is not constant, try MAXLEN too.
13411 For MAXLEN only allow optimizing into non-_ocs function
13412 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13413 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13414 return NULL_TREE;
13416 else
13417 maxlen = len;
13419 if (tree_int_cst_lt (size, maxlen))
13420 return NULL_TREE;
13423 if (!init_target_chars ())
13424 return NULL_TREE;
13426 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13427 or if format doesn't contain % chars or is "%s". */
13428 if (! integer_zerop (flag))
13430 fmt_str = c_getstr (fmt);
13431 if (fmt_str == NULL)
13432 return NULL_TREE;
13433 if (strchr (fmt_str, target_percent) != NULL
13434 && strcmp (fmt_str, target_percent_s))
13435 return NULL_TREE;
13438 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13439 available. */
13440 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13441 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13442 if (!fn)
13443 return NULL_TREE;
13445 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13448 /* Builtins with folding operations that operate on "..." arguments
13449 need special handling; we need to store the arguments in a convenient
13450 data structure before attempting any folding. Fortunately there are
13451 only a few builtins that fall into this category. FNDECL is the
13452 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13453 result of the function call is ignored. */
13455 static tree
13456 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13458 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13459 tree ret = NULL_TREE;
13461 switch (fcode)
13463 case BUILT_IN_SPRINTF_CHK:
13464 case BUILT_IN_VSPRINTF_CHK:
13465 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13466 break;
13468 case BUILT_IN_SNPRINTF_CHK:
13469 case BUILT_IN_VSNPRINTF_CHK:
13470 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13472 default:
13473 break;
13475 if (ret)
13477 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13478 TREE_NO_WARNING (ret) = 1;
13479 return ret;
13481 return NULL_TREE;
13484 /* A wrapper function for builtin folding that prevents warnings for
13485 "statement without effect" and the like, caused by removing the
13486 call node earlier than the warning is generated. */
13488 tree
13489 fold_call_stmt (gimple stmt, bool ignore)
13491 tree ret = NULL_TREE;
13492 tree fndecl = gimple_call_fndecl (stmt);
13493 if (fndecl
13494 && TREE_CODE (fndecl) == FUNCTION_DECL
13495 && DECL_BUILT_IN (fndecl)
13496 && !gimple_call_va_arg_pack_p (stmt))
13498 int nargs = gimple_call_num_args (stmt);
13500 /* FIXME: Don't use a list in this interface. */
13501 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13503 tree arglist = NULL_TREE;
13504 int i;
13505 for (i = nargs - 1; i >= 0; i--)
13506 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13507 return targetm.fold_builtin (fndecl, arglist, ignore);
13509 else
13511 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13513 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13514 int i;
13515 for (i = 0; i < nargs; i++)
13516 args[i] = gimple_call_arg (stmt, i);
13517 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13519 if (!ret)
13520 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13521 if (ret)
13523 /* Propagate location information from original call to
13524 expansion of builtin. Otherwise things like
13525 maybe_emit_chk_warning, that operate on the expansion
13526 of a builtin, will use the wrong location information. */
13527 if (gimple_has_location (stmt))
13529 tree realret = ret;
13530 if (TREE_CODE (ret) == NOP_EXPR)
13531 realret = TREE_OPERAND (ret, 0);
13532 if (CAN_HAVE_LOCATION_P (realret)
13533 && !EXPR_HAS_LOCATION (realret))
13534 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13535 return realret;
13537 return ret;
13541 return NULL_TREE;