Merge from trunk @ 138209
[official-gcc.git] / gcc / builtins.c
blob288ad59717edc59f2a9fbdef72d8acb7d6f4dd03
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
278 while (1)
280 switch (TREE_CODE (exp))
282 CASE_CONVERT:
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
285 return align;
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
289 break;
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
294 ALIGN. */
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
296 return align;
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
300 != 0)
301 max_align >>= 1;
303 exp = TREE_OPERAND (exp, 0);
304 break;
306 case ADDR_EXPR:
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
309 inner = max_align;
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
313 tree offset;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319 if (bitpos)
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
325 the alignment. */
326 unsigned offset_bits
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
328 * BITS_PER_UNIT);
330 if (offset_bits)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
338 the alignment. */
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
341 * BITS_PER_UNIT);
343 if (offset_factor)
344 inner = MIN (inner, (offset_factor & -offset_factor));
346 else if (offset)
347 inner = MIN (inner, BITS_PER_UNIT);
349 if (DECL_P (exp))
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
354 #endif
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
358 else
359 align = MIN (align, inner);
360 return MIN (align, max_align);
362 default:
363 return align;
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
384 tree
385 c_strlen (tree src, int only_value)
387 tree offset_node;
388 HOST_WIDE_INT offset;
389 int max;
390 const char *ptr;
392 STRIP_NOPS (src);
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
396 tree len1, len2;
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
401 return len1;
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
409 if (src == 0)
410 return NULL_TREE;
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
420 int i;
422 for (i = 0; i < max; i++)
423 if (ptr[i] == 0)
424 return NULL_TREE;
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
439 offset = 0;
440 else if (! host_integerp (offset_node, 0))
441 offset = -1;
442 else
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
446 runtime. */
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
455 return NULL_TREE;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
470 static const char *
471 c_getstr (tree src)
473 tree offset_node;
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
525 static int
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542 if (val != hostval)
543 return 1;
545 *p = hostval;
546 return 0;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
553 static tree
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 int i;
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
596 #endif
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
750 #endif
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
775 static void
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
799 else
800 #endif
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 else
821 #endif
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
846 if (JUMP_P (insn))
848 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
849 break;
851 else if (CALL_P (insn))
852 break;
856 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
857 and the address of the save area. */
859 static rtx
860 expand_builtin_nonlocal_goto (tree exp)
862 tree t_label, t_save_area;
863 rtx r_label, r_save_area, r_fp, r_sp, insn;
865 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
866 return NULL_RTX;
868 t_label = CALL_EXPR_ARG (exp, 0);
869 t_save_area = CALL_EXPR_ARG (exp, 1);
871 r_label = expand_normal (t_label);
872 r_label = convert_memory_address (Pmode, r_label);
873 r_save_area = expand_normal (t_save_area);
874 r_save_area = convert_memory_address (Pmode, r_save_area);
875 /* Copy the address of the save location to a register just in case it was based
876 on the frame pointer. */
877 r_save_area = copy_to_reg (r_save_area);
878 r_fp = gen_rtx_MEM (Pmode, r_save_area);
879 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
880 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
882 crtl->has_nonlocal_goto = 1;
884 #ifdef HAVE_nonlocal_goto
885 /* ??? We no longer need to pass the static chain value, afaik. */
886 if (HAVE_nonlocal_goto)
887 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
888 else
889 #endif
891 r_label = copy_to_reg (r_label);
893 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
894 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
896 /* Restore frame pointer for containing function.
897 This sets the actual hard register used for the frame pointer
898 to the location of the function's incoming static chain info.
899 The non-local goto handler will then adjust it to contain the
900 proper value and reload the argument pointer, if needed. */
901 emit_move_insn (hard_frame_pointer_rtx, r_fp);
902 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
904 /* USE of hard_frame_pointer_rtx added for consistency;
905 not clear if really needed. */
906 emit_use (hard_frame_pointer_rtx);
907 emit_use (stack_pointer_rtx);
909 /* If the architecture is using a GP register, we must
910 conservatively assume that the target function makes use of it.
911 The prologue of functions with nonlocal gotos must therefore
912 initialize the GP register to the appropriate value, and we
913 must then make sure that this value is live at the point
914 of the jump. (Note that this doesn't necessarily apply
915 to targets with a nonlocal_goto pattern; they are free
916 to implement it in their own way. Note also that this is
917 a no-op if the GP register is a global invariant.) */
918 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
919 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
920 emit_use (pic_offset_table_rtx);
922 emit_indirect_jump (r_label);
925 /* Search backwards to the jump insn and mark it as a
926 non-local goto. */
927 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
929 if (JUMP_P (insn))
931 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
932 break;
934 else if (CALL_P (insn))
935 break;
938 return const0_rtx;
941 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
942 (not all will be used on all machines) that was passed to __builtin_setjmp.
943 It updates the stack pointer in that block to correspond to the current
944 stack pointer. */
946 static void
947 expand_builtin_update_setjmp_buf (rtx buf_addr)
949 enum machine_mode sa_mode = Pmode;
950 rtx stack_save;
953 #ifdef HAVE_save_stack_nonlocal
954 if (HAVE_save_stack_nonlocal)
955 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
956 #endif
957 #ifdef STACK_SAVEAREA_MODE
958 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
959 #endif
961 stack_save
962 = gen_rtx_MEM (sa_mode,
963 memory_address
964 (sa_mode,
965 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
967 #ifdef HAVE_setjmp
968 if (HAVE_setjmp)
969 emit_insn (gen_setjmp ());
970 #endif
972 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
975 /* Expand a call to __builtin_prefetch. For a target that does not support
976 data prefetch, evaluate the memory address argument in case it has side
977 effects. */
979 static void
980 expand_builtin_prefetch (tree exp)
982 tree arg0, arg1, arg2;
983 int nargs;
984 rtx op0, op1, op2;
986 if (!validate_arglist (exp, POINTER_TYPE, 0))
987 return;
989 arg0 = CALL_EXPR_ARG (exp, 0);
991 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
992 zero (read) and argument 2 (locality) defaults to 3 (high degree of
993 locality). */
994 nargs = call_expr_nargs (exp);
995 if (nargs > 1)
996 arg1 = CALL_EXPR_ARG (exp, 1);
997 else
998 arg1 = integer_zero_node;
999 if (nargs > 2)
1000 arg2 = CALL_EXPR_ARG (exp, 2);
1001 else
1002 arg2 = build_int_cst (NULL_TREE, 3);
1004 /* Argument 0 is an address. */
1005 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1007 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1008 if (TREE_CODE (arg1) != INTEGER_CST)
1010 error ("second argument to %<__builtin_prefetch%> must be a constant");
1011 arg1 = integer_zero_node;
1013 op1 = expand_normal (arg1);
1014 /* Argument 1 must be either zero or one. */
1015 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1017 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1018 " using zero");
1019 op1 = const0_rtx;
1022 /* Argument 2 (locality) must be a compile-time constant int. */
1023 if (TREE_CODE (arg2) != INTEGER_CST)
1025 error ("third argument to %<__builtin_prefetch%> must be a constant");
1026 arg2 = integer_zero_node;
1028 op2 = expand_normal (arg2);
1029 /* Argument 2 must be 0, 1, 2, or 3. */
1030 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1032 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1033 op2 = const0_rtx;
1036 #ifdef HAVE_prefetch
1037 if (HAVE_prefetch)
1039 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1040 (op0,
1041 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1042 || (GET_MODE (op0) != Pmode))
1044 op0 = convert_memory_address (Pmode, op0);
1045 op0 = force_reg (Pmode, op0);
1047 emit_insn (gen_prefetch (op0, op1, op2));
1049 #endif
1051 /* Don't do anything with direct references to volatile memory, but
1052 generate code to handle other side effects. */
1053 if (!MEM_P (op0) && side_effects_p (op0))
1054 emit_insn (op0);
1057 /* Get a MEM rtx for expression EXP which is the address of an operand
1058 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1059 the maximum length of the block of memory that might be accessed or
1060 NULL if unknown. */
1062 static rtx
1063 get_memory_rtx (tree exp, tree len)
1065 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1066 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1068 /* Get an expression we can use to find the attributes to assign to MEM.
1069 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1070 we can. First remove any nops. */
1071 while (CONVERT_EXPR_P (exp)
1072 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1073 exp = TREE_OPERAND (exp, 0);
1075 if (TREE_CODE (exp) == ADDR_EXPR)
1076 exp = TREE_OPERAND (exp, 0);
1077 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1078 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1079 else
1080 exp = NULL;
1082 /* Honor attributes derived from exp, except for the alias set
1083 (as builtin stringops may alias with anything) and the size
1084 (as stringops may access multiple array elements). */
1085 if (exp)
1087 set_mem_attributes (mem, exp, 0);
1089 /* Allow the string and memory builtins to overflow from one
1090 field into another, see http://gcc.gnu.org/PR23561.
1091 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1092 memory accessed by the string or memory builtin will fit
1093 within the field. */
1094 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1096 tree mem_expr = MEM_EXPR (mem);
1097 HOST_WIDE_INT offset = -1, length = -1;
1098 tree inner = exp;
1100 while (TREE_CODE (inner) == ARRAY_REF
1101 || CONVERT_EXPR_P (inner)
1102 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1103 || TREE_CODE (inner) == SAVE_EXPR)
1104 inner = TREE_OPERAND (inner, 0);
1106 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1108 if (MEM_OFFSET (mem)
1109 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1110 offset = INTVAL (MEM_OFFSET (mem));
1112 if (offset >= 0 && len && host_integerp (len, 0))
1113 length = tree_low_cst (len, 0);
1115 while (TREE_CODE (inner) == COMPONENT_REF)
1117 tree field = TREE_OPERAND (inner, 1);
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 /* Bitfields are generally not byte-addressable. */
1122 gcc_assert (!DECL_BIT_FIELD (field)
1123 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1124 % BITS_PER_UNIT) == 0
1125 && host_integerp (DECL_SIZE (field), 0)
1126 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1127 % BITS_PER_UNIT) == 0));
1129 /* If we can prove that the memory starting at XEXP (mem, 0) and
1130 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1131 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1132 fields without DECL_SIZE_UNIT like flexible array members. */
1133 if (length >= 0
1134 && DECL_SIZE_UNIT (field)
1135 && host_integerp (DECL_SIZE_UNIT (field), 0))
1137 HOST_WIDE_INT size
1138 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1139 if (offset <= size
1140 && length <= size
1141 && offset + length <= size)
1142 break;
1145 if (offset >= 0
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1149 / BITS_PER_UNIT;
1150 else
1152 offset = -1;
1153 length = -1;
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1161 offset = -1;
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1172 return mem;
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1200 static int
1201 apply_args_size (void)
1203 static int size = -1;
1204 int align;
1205 unsigned int regno;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1209 if (size < 0)
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1233 else
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1239 return size;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1245 static int
1246 apply_result_size (void)
1248 static int size = -1;
1249 int align, regno;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1253 if (size < 0)
1255 size = 0;
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1270 else
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1277 #endif
1279 return size;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1287 static rtx
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1292 rtx reg, mem;
1293 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1295 size = nelts = 0;
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1316 static rtx
1317 expand_builtin_apply_args_1 (void)
1319 rtx registers, tem;
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1355 NULL_RTX);
1356 #endif
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1379 saved. */
1381 static rtx
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1392 rtx temp;
1393 rtx seq;
1395 start_sequence ();
1396 temp = expand_builtin_apply_args_1 ();
1397 seq = get_insns ();
1398 end_sequence ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1405 function. */
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1409 return temp;
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1416 static rtx
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1437 #endif
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1441 manipulations. */
1442 do_pending_stack_adjust ();
1443 NO_DEFER_POP;
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1449 else
1450 #endif
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1460 else
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1462 #endif
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1470 apply_args_size ();
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1476 if (struct_value)
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1496 if (struct_value)
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1520 else
1521 #endif
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1525 rtx valreg = 0;
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1545 else
1546 #endif
1547 gcc_unreachable ();
1549 /* Find the CALL insn we just emitted, and attach the register usage
1550 information. */
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1558 else
1559 #endif
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1562 OK_DEFER_POP;
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1571 static void
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1576 rtx reg;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1588 emit_barrier ();
1589 return;
1591 #endif
1593 /* Restore the return value and note that each value is used. */
1594 size = 0;
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1605 emit_use (reg);
1606 call_fusage = get_insns ();
1607 end_sequence ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1615 of the function. */
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1638 case UNION_TYPE:
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1649 static rtx
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1676 static tree
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1683 switch (fn)
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1769 default:
1770 return NULL_TREE;
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1779 else
1780 return NULL_TREE;
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1785 tree
1786 mathfn_built_in (tree type, enum built_in_function fn)
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1793 errno to EDOM. */
1795 static void
1796 expand_errno_check (tree exp, rtx target)
1798 rtx lab = gen_label_rtx ();
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1803 0, lab);
1805 #ifdef TARGET_EDOM
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1811 #else
1812 rtx errno_rtx
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1814 #endif
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1816 emit_label (lab);
1817 return;
1819 #endif
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1826 NO_DEFER_POP;
1827 expand_call (exp, target, 0);
1828 OK_DEFER_POP;
1829 emit_label (lab);
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1838 static rtx
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1846 tree arg;
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1849 return NULL_RTX;
1851 arg = CALL_EXPR_ARG (exp, 0);
1853 switch (DECL_FUNCTION_CODE (fndecl))
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1858 break;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1897 break;
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1901 default:
1902 gcc_unreachable ();
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1909 errno_set = false;
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1914 target = gen_reg_rtx (mode);
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1923 start_sequence ();
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1929 if (target != 0)
1931 if (errno_set)
1932 expand_errno_check (exp, target);
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1936 end_sequence ();
1937 emit_insn (insns);
1938 return target;
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1944 end_sequence ();
1947 before_call = get_last_insn ();
1949 return expand_call (exp, target, target == const0_rtx);
1952 /* Expand a call to the builtin binary math functions (pow and atan2).
1953 Return NULL_RTX if a normal call should be emitted rather than expanding the
1954 function in-line. EXP is the expression that is a call to the builtin
1955 function; if convenient, the result should be placed in TARGET.
1956 SUBTARGET may be used as the target for computing one of EXP's
1957 operands. */
1959 static rtx
1960 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1962 optab builtin_optab;
1963 rtx op0, op1, insns;
1964 int op1_type = REAL_TYPE;
1965 tree fndecl = get_callee_fndecl (exp);
1966 tree arg0, arg1;
1967 enum machine_mode mode;
1968 bool errno_set = true;
1970 switch (DECL_FUNCTION_CODE (fndecl))
1972 CASE_FLT_FN (BUILT_IN_SCALBN):
1973 CASE_FLT_FN (BUILT_IN_SCALBLN):
1974 CASE_FLT_FN (BUILT_IN_LDEXP):
1975 op1_type = INTEGER_TYPE;
1976 default:
1977 break;
1980 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1981 return NULL_RTX;
1983 arg0 = CALL_EXPR_ARG (exp, 0);
1984 arg1 = CALL_EXPR_ARG (exp, 1);
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 CASE_FLT_FN (BUILT_IN_POW):
1989 builtin_optab = pow_optab; break;
1990 CASE_FLT_FN (BUILT_IN_ATAN2):
1991 builtin_optab = atan2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_SCALB):
1993 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1994 return 0;
1995 builtin_optab = scalb_optab; break;
1996 CASE_FLT_FN (BUILT_IN_SCALBN):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN):
1998 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1999 return 0;
2000 /* Fall through... */
2001 CASE_FLT_FN (BUILT_IN_LDEXP):
2002 builtin_optab = ldexp_optab; break;
2003 CASE_FLT_FN (BUILT_IN_FMOD):
2004 builtin_optab = fmod_optab; break;
2005 CASE_FLT_FN (BUILT_IN_REMAINDER):
2006 CASE_FLT_FN (BUILT_IN_DREM):
2007 builtin_optab = remainder_optab; break;
2008 default:
2009 gcc_unreachable ();
2012 /* Make a suitable register to place result in. */
2013 mode = TYPE_MODE (TREE_TYPE (exp));
2015 /* Before working hard, check whether the instruction is available. */
2016 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2017 return NULL_RTX;
2019 target = gen_reg_rtx (mode);
2021 if (! flag_errno_math || ! HONOR_NANS (mode))
2022 errno_set = false;
2024 /* Always stabilize the argument list. */
2025 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2026 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2028 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2029 op1 = expand_normal (arg1);
2031 start_sequence ();
2033 /* Compute into TARGET.
2034 Set TARGET to wherever the result comes back. */
2035 target = expand_binop (mode, builtin_optab, op0, op1,
2036 target, 0, OPTAB_DIRECT);
2038 /* If we were unable to expand via the builtin, stop the sequence
2039 (without outputting the insns) and call to the library function
2040 with the stabilized argument list. */
2041 if (target == 0)
2043 end_sequence ();
2044 return expand_call (exp, target, target == const0_rtx);
2047 if (errno_set)
2048 expand_errno_check (exp, target);
2050 /* Output the entire sequence. */
2051 insns = get_insns ();
2052 end_sequence ();
2053 emit_insn (insns);
2055 return target;
2058 /* Expand a call to the builtin sin and cos math functions.
2059 Return NULL_RTX if a normal call should be emitted rather than expanding the
2060 function in-line. EXP is the expression that is a call to the builtin
2061 function; if convenient, the result should be placed in TARGET.
2062 SUBTARGET may be used as the target for computing one of EXP's
2063 operands. */
2065 static rtx
2066 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2068 optab builtin_optab;
2069 rtx op0, insns;
2070 tree fndecl = get_callee_fndecl (exp);
2071 enum machine_mode mode;
2072 tree arg;
2074 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2075 return NULL_RTX;
2077 arg = CALL_EXPR_ARG (exp, 0);
2079 switch (DECL_FUNCTION_CODE (fndecl))
2081 CASE_FLT_FN (BUILT_IN_SIN):
2082 CASE_FLT_FN (BUILT_IN_COS):
2083 builtin_optab = sincos_optab; break;
2084 default:
2085 gcc_unreachable ();
2088 /* Make a suitable register to place result in. */
2089 mode = TYPE_MODE (TREE_TYPE (exp));
2091 /* Check if sincos insn is available, otherwise fallback
2092 to sin or cos insn. */
2093 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 switch (DECL_FUNCTION_CODE (fndecl))
2096 CASE_FLT_FN (BUILT_IN_SIN):
2097 builtin_optab = sin_optab; break;
2098 CASE_FLT_FN (BUILT_IN_COS):
2099 builtin_optab = cos_optab; break;
2100 default:
2101 gcc_unreachable ();
2104 /* Before working hard, check whether the instruction is available. */
2105 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2107 target = gen_reg_rtx (mode);
2109 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2110 need to expand the argument again. This way, we will not perform
2111 side-effects more the once. */
2112 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2116 start_sequence ();
2118 /* Compute into TARGET.
2119 Set TARGET to wherever the result comes back. */
2120 if (builtin_optab == sincos_optab)
2122 int result;
2124 switch (DECL_FUNCTION_CODE (fndecl))
2126 CASE_FLT_FN (BUILT_IN_SIN):
2127 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2128 break;
2129 CASE_FLT_FN (BUILT_IN_COS):
2130 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2131 break;
2132 default:
2133 gcc_unreachable ();
2135 gcc_assert (result);
2137 else
2139 target = expand_unop (mode, builtin_optab, op0, target, 0);
2142 if (target != 0)
2144 /* Output the entire sequence. */
2145 insns = get_insns ();
2146 end_sequence ();
2147 emit_insn (insns);
2148 return target;
2151 /* If we were unable to expand via the builtin, stop the sequence
2152 (without outputting the insns) and call to the library function
2153 with the stabilized argument list. */
2154 end_sequence ();
2157 target = expand_call (exp, target, target == const0_rtx);
2159 return target;
2162 /* Expand a call to one of the builtin math functions that operate on
2163 floating point argument and output an integer result (ilogb, isinf,
2164 isnan, etc).
2165 Return 0 if a normal call should be emitted rather than expanding the
2166 function in-line. EXP is the expression that is a call to the builtin
2167 function; if convenient, the result should be placed in TARGET.
2168 SUBTARGET may be used as the target for computing one of EXP's operands. */
2170 static rtx
2171 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2173 optab builtin_optab = 0;
2174 enum insn_code icode = CODE_FOR_nothing;
2175 rtx op0;
2176 tree fndecl = get_callee_fndecl (exp);
2177 enum machine_mode mode;
2178 bool errno_set = false;
2179 tree arg;
2181 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2182 return NULL_RTX;
2184 arg = CALL_EXPR_ARG (exp, 0);
2186 switch (DECL_FUNCTION_CODE (fndecl))
2188 CASE_FLT_FN (BUILT_IN_ILOGB):
2189 errno_set = true; builtin_optab = ilogb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_ISINF):
2191 builtin_optab = isinf_optab; break;
2192 case BUILT_IN_ISNORMAL:
2193 case BUILT_IN_ISFINITE:
2194 CASE_FLT_FN (BUILT_IN_FINITE):
2195 /* These builtins have no optabs (yet). */
2196 break;
2197 default:
2198 gcc_unreachable ();
2201 /* There's no easy way to detect the case we need to set EDOM. */
2202 if (flag_errno_math && errno_set)
2203 return NULL_RTX;
2205 /* Optab mode depends on the mode of the input argument. */
2206 mode = TYPE_MODE (TREE_TYPE (arg));
2208 if (builtin_optab)
2209 icode = optab_handler (builtin_optab, mode)->insn_code;
2211 /* Before working hard, check whether the instruction is available. */
2212 if (icode != CODE_FOR_nothing)
2214 /* Make a suitable register to place result in. */
2215 if (!target
2216 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2217 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2219 gcc_assert (insn_data[icode].operand[0].predicate
2220 (target, GET_MODE (target)));
2222 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2223 need to expand the argument again. This way, we will not perform
2224 side-effects more the once. */
2225 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2227 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2229 if (mode != GET_MODE (op0))
2230 op0 = convert_to_mode (mode, op0, 0);
2232 /* Compute into TARGET.
2233 Set TARGET to wherever the result comes back. */
2234 emit_unop_insn (icode, target, op0, UNKNOWN);
2235 return target;
2238 /* If there is no optab, try generic code. */
2239 switch (DECL_FUNCTION_CODE (fndecl))
2241 tree result;
2243 CASE_FLT_FN (BUILT_IN_ISINF):
2245 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2246 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2247 tree const type = TREE_TYPE (arg);
2248 REAL_VALUE_TYPE r;
2249 char buf[128];
2251 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2252 real_from_string (&r, buf);
2253 result = build_call_expr (isgr_fn, 2,
2254 fold_build1 (ABS_EXPR, type, arg),
2255 build_real (type, r));
2256 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 case BUILT_IN_ISFINITE:
2261 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2262 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2263 tree const type = TREE_TYPE (arg);
2264 REAL_VALUE_TYPE r;
2265 char buf[128];
2267 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2268 real_from_string (&r, buf);
2269 result = build_call_expr (isle_fn, 2,
2270 fold_build1 (ABS_EXPR, type, arg),
2271 build_real (type, r));
2272 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2274 case BUILT_IN_ISNORMAL:
2276 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2277 islessequal(fabs(x),DBL_MAX). */
2278 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2279 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2280 tree const type = TREE_TYPE (arg);
2281 REAL_VALUE_TYPE rmax, rmin;
2282 char buf[128];
2284 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2285 real_from_string (&rmax, buf);
2286 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2287 real_from_string (&rmin, buf);
2288 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2289 result = build_call_expr (isle_fn, 2, arg,
2290 build_real (type, rmax));
2291 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2292 build_call_expr (isge_fn, 2, arg,
2293 build_real (type, rmin)));
2294 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2296 default:
2297 break;
2300 target = expand_call (exp, target, target == const0_rtx);
2302 return target;
2305 /* Expand a call to the builtin sincos math function.
2306 Return NULL_RTX if a normal call should be emitted rather than expanding the
2307 function in-line. EXP is the expression that is a call to the builtin
2308 function. */
2310 static rtx
2311 expand_builtin_sincos (tree exp)
2313 rtx op0, op1, op2, target1, target2;
2314 enum machine_mode mode;
2315 tree arg, sinp, cosp;
2316 int result;
2318 if (!validate_arglist (exp, REAL_TYPE,
2319 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2320 return NULL_RTX;
2322 arg = CALL_EXPR_ARG (exp, 0);
2323 sinp = CALL_EXPR_ARG (exp, 1);
2324 cosp = CALL_EXPR_ARG (exp, 2);
2326 /* Make a suitable register to place result in. */
2327 mode = TYPE_MODE (TREE_TYPE (arg));
2329 /* Check if sincos insn is available, otherwise emit the call. */
2330 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2331 return NULL_RTX;
2333 target1 = gen_reg_rtx (mode);
2334 target2 = gen_reg_rtx (mode);
2336 op0 = expand_normal (arg);
2337 op1 = expand_normal (build_fold_indirect_ref (sinp));
2338 op2 = expand_normal (build_fold_indirect_ref (cosp));
2340 /* Compute into target1 and target2.
2341 Set TARGET to wherever the result comes back. */
2342 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2343 gcc_assert (result);
2345 /* Move target1 and target2 to the memory locations indicated
2346 by op1 and op2. */
2347 emit_move_insn (op1, target1);
2348 emit_move_insn (op2, target2);
2350 return const0_rtx;
2353 /* Expand a call to the internal cexpi builtin to the sincos math function.
2354 EXP is the expression that is a call to the builtin function; if convenient,
2355 the result should be placed in TARGET. SUBTARGET may be used as the target
2356 for computing one of EXP's operands. */
2358 static rtx
2359 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2361 tree fndecl = get_callee_fndecl (exp);
2362 tree arg, type;
2363 enum machine_mode mode;
2364 rtx op0, op1, op2;
2366 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2367 return NULL_RTX;
2369 arg = CALL_EXPR_ARG (exp, 0);
2370 type = TREE_TYPE (arg);
2371 mode = TYPE_MODE (TREE_TYPE (arg));
2373 /* Try expanding via a sincos optab, fall back to emitting a libcall
2374 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2375 is only generated from sincos, cexp or if we have either of them. */
2376 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2378 op1 = gen_reg_rtx (mode);
2379 op2 = gen_reg_rtx (mode);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 /* Compute into op1 and op2. */
2384 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2386 else if (TARGET_HAS_SINCOS)
2388 tree call, fn = NULL_TREE;
2389 tree top1, top2;
2390 rtx op1a, op2a;
2392 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2393 fn = built_in_decls[BUILT_IN_SINCOSF];
2394 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2395 fn = built_in_decls[BUILT_IN_SINCOS];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2397 fn = built_in_decls[BUILT_IN_SINCOSL];
2398 else
2399 gcc_unreachable ();
2401 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2402 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2403 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2404 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2405 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2406 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2408 /* Make sure not to fold the sincos call again. */
2409 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2410 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2411 call, 3, arg, top1, top2));
2413 else
2415 tree call, fn = NULL_TREE, narg;
2416 tree ctype = build_complex_type (type);
2418 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2419 fn = built_in_decls[BUILT_IN_CEXPF];
2420 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2421 fn = built_in_decls[BUILT_IN_CEXP];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2423 fn = built_in_decls[BUILT_IN_CEXPL];
2424 else
2425 gcc_unreachable ();
2427 /* If we don't have a decl for cexp create one. This is the
2428 friendliest fallback if the user calls __builtin_cexpi
2429 without full target C99 function support. */
2430 if (fn == NULL_TREE)
2432 tree fntype;
2433 const char *name = NULL;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2436 name = "cexpf";
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2438 name = "cexp";
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2440 name = "cexpl";
2442 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2443 fn = build_fn_decl (name, fntype);
2446 narg = fold_build2 (COMPLEX_EXPR, ctype,
2447 build_real (type, dconst0), arg);
2449 /* Make sure not to fold the cexp call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 return expand_expr (build_call_nary (ctype, call, 1, narg),
2452 target, VOIDmode, EXPAND_NORMAL);
2455 /* Now build the proper return type. */
2456 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2457 make_tree (TREE_TYPE (arg), op2),
2458 make_tree (TREE_TYPE (arg), op1)),
2459 target, VOIDmode, EXPAND_NORMAL);
2462 /* Expand a call to one of the builtin rounding functions gcc defines
2463 as an extension (lfloor and lceil). As these are gcc extensions we
2464 do not need to worry about setting errno to EDOM.
2465 If expanding via optab fails, lower expression to (int)(floor(x)).
2466 EXP is the expression that is a call to the builtin function;
2467 if convenient, the result should be placed in TARGET. */
2469 static rtx
2470 expand_builtin_int_roundingfn (tree exp, rtx target)
2472 convert_optab builtin_optab;
2473 rtx op0, insns, tmp;
2474 tree fndecl = get_callee_fndecl (exp);
2475 enum built_in_function fallback_fn;
2476 tree fallback_fndecl;
2477 enum machine_mode mode;
2478 tree arg;
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2481 gcc_unreachable ();
2483 arg = CALL_EXPR_ARG (exp, 0);
2485 switch (DECL_FUNCTION_CODE (fndecl))
2487 CASE_FLT_FN (BUILT_IN_LCEIL):
2488 CASE_FLT_FN (BUILT_IN_LLCEIL):
2489 builtin_optab = lceil_optab;
2490 fallback_fn = BUILT_IN_CEIL;
2491 break;
2493 CASE_FLT_FN (BUILT_IN_LFLOOR):
2494 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2495 builtin_optab = lfloor_optab;
2496 fallback_fn = BUILT_IN_FLOOR;
2497 break;
2499 default:
2500 gcc_unreachable ();
2503 /* Make a suitable register to place result in. */
2504 mode = TYPE_MODE (TREE_TYPE (exp));
2506 target = gen_reg_rtx (mode);
2508 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2509 need to expand the argument again. This way, we will not perform
2510 side-effects more the once. */
2511 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2513 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2515 start_sequence ();
2517 /* Compute into TARGET. */
2518 if (expand_sfix_optab (target, op0, builtin_optab))
2520 /* Output the entire sequence. */
2521 insns = get_insns ();
2522 end_sequence ();
2523 emit_insn (insns);
2524 return target;
2527 /* If we were unable to expand via the builtin, stop the sequence
2528 (without outputting the insns). */
2529 end_sequence ();
2531 /* Fall back to floating point rounding optab. */
2532 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2534 /* For non-C99 targets we may end up without a fallback fndecl here
2535 if the user called __builtin_lfloor directly. In this case emit
2536 a call to the floor/ceil variants nevertheless. This should result
2537 in the best user experience for not full C99 targets. */
2538 if (fallback_fndecl == NULL_TREE)
2540 tree fntype;
2541 const char *name = NULL;
2543 switch (DECL_FUNCTION_CODE (fndecl))
2545 case BUILT_IN_LCEIL:
2546 case BUILT_IN_LLCEIL:
2547 name = "ceil";
2548 break;
2549 case BUILT_IN_LCEILF:
2550 case BUILT_IN_LLCEILF:
2551 name = "ceilf";
2552 break;
2553 case BUILT_IN_LCEILL:
2554 case BUILT_IN_LLCEILL:
2555 name = "ceill";
2556 break;
2557 case BUILT_IN_LFLOOR:
2558 case BUILT_IN_LLFLOOR:
2559 name = "floor";
2560 break;
2561 case BUILT_IN_LFLOORF:
2562 case BUILT_IN_LLFLOORF:
2563 name = "floorf";
2564 break;
2565 case BUILT_IN_LFLOORL:
2566 case BUILT_IN_LLFLOORL:
2567 name = "floorl";
2568 break;
2569 default:
2570 gcc_unreachable ();
2573 fntype = build_function_type_list (TREE_TYPE (arg),
2574 TREE_TYPE (arg), NULL_TREE);
2575 fallback_fndecl = build_fn_decl (name, fntype);
2578 exp = build_call_expr (fallback_fndecl, 1, arg);
2580 tmp = expand_normal (exp);
2582 /* Truncate the result of floating point optab to integer
2583 via expand_fix (). */
2584 target = gen_reg_rtx (mode);
2585 expand_fix (target, tmp, 0);
2587 return target;
2590 /* Expand a call to one of the builtin math functions doing integer
2591 conversion (lrint).
2592 Return 0 if a normal call should be emitted rather than expanding the
2593 function in-line. EXP is the expression that is a call to the builtin
2594 function; if convenient, the result should be placed in TARGET. */
2596 static rtx
2597 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2599 convert_optab builtin_optab;
2600 rtx op0, insns;
2601 tree fndecl = get_callee_fndecl (exp);
2602 tree arg;
2603 enum machine_mode mode;
2605 /* There's no easy way to detect the case we need to set EDOM. */
2606 if (flag_errno_math)
2607 return NULL_RTX;
2609 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2610 gcc_unreachable ();
2612 arg = CALL_EXPR_ARG (exp, 0);
2614 switch (DECL_FUNCTION_CODE (fndecl))
2616 CASE_FLT_FN (BUILT_IN_LRINT):
2617 CASE_FLT_FN (BUILT_IN_LLRINT):
2618 builtin_optab = lrint_optab; break;
2619 CASE_FLT_FN (BUILT_IN_LROUND):
2620 CASE_FLT_FN (BUILT_IN_LLROUND):
2621 builtin_optab = lround_optab; break;
2622 default:
2623 gcc_unreachable ();
2626 /* Make a suitable register to place result in. */
2627 mode = TYPE_MODE (TREE_TYPE (exp));
2629 target = gen_reg_rtx (mode);
2631 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2632 need to expand the argument again. This way, we will not perform
2633 side-effects more the once. */
2634 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2636 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2638 start_sequence ();
2640 if (expand_sfix_optab (target, op0, builtin_optab))
2642 /* Output the entire sequence. */
2643 insns = get_insns ();
2644 end_sequence ();
2645 emit_insn (insns);
2646 return target;
2649 /* If we were unable to expand via the builtin, stop the sequence
2650 (without outputting the insns) and call to the library function
2651 with the stabilized argument list. */
2652 end_sequence ();
2654 target = expand_call (exp, target, target == const0_rtx);
2656 return target;
2659 /* To evaluate powi(x,n), the floating point value x raised to the
2660 constant integer exponent n, we use a hybrid algorithm that
2661 combines the "window method" with look-up tables. For an
2662 introduction to exponentiation algorithms and "addition chains",
2663 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2664 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2665 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2666 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2668 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2669 multiplications to inline before calling the system library's pow
2670 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2671 so this default never requires calling pow, powf or powl. */
2673 #ifndef POWI_MAX_MULTS
2674 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2675 #endif
2677 /* The size of the "optimal power tree" lookup table. All
2678 exponents less than this value are simply looked up in the
2679 powi_table below. This threshold is also used to size the
2680 cache of pseudo registers that hold intermediate results. */
2681 #define POWI_TABLE_SIZE 256
2683 /* The size, in bits of the window, used in the "window method"
2684 exponentiation algorithm. This is equivalent to a radix of
2685 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2686 #define POWI_WINDOW_SIZE 3
2688 /* The following table is an efficient representation of an
2689 "optimal power tree". For each value, i, the corresponding
2690 value, j, in the table states than an optimal evaluation
2691 sequence for calculating pow(x,i) can be found by evaluating
2692 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2693 100 integers is given in Knuth's "Seminumerical algorithms". */
2695 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2697 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2698 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2699 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2700 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2701 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2702 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2703 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2704 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2705 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2706 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2707 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2708 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2709 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2710 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2711 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2712 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2713 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2714 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2715 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2716 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2717 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2718 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2719 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2720 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2721 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2722 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2723 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2724 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2725 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2726 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2727 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2728 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2732 /* Return the number of multiplications required to calculate
2733 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2734 subroutine of powi_cost. CACHE is an array indicating
2735 which exponents have already been calculated. */
2737 static int
2738 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2740 /* If we've already calculated this exponent, then this evaluation
2741 doesn't require any additional multiplications. */
2742 if (cache[n])
2743 return 0;
2745 cache[n] = true;
2746 return powi_lookup_cost (n - powi_table[n], cache)
2747 + powi_lookup_cost (powi_table[n], cache) + 1;
2750 /* Return the number of multiplications required to calculate
2751 powi(x,n) for an arbitrary x, given the exponent N. This
2752 function needs to be kept in sync with expand_powi below. */
2754 static int
2755 powi_cost (HOST_WIDE_INT n)
2757 bool cache[POWI_TABLE_SIZE];
2758 unsigned HOST_WIDE_INT digit;
2759 unsigned HOST_WIDE_INT val;
2760 int result;
2762 if (n == 0)
2763 return 0;
2765 /* Ignore the reciprocal when calculating the cost. */
2766 val = (n < 0) ? -n : n;
2768 /* Initialize the exponent cache. */
2769 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2770 cache[1] = true;
2772 result = 0;
2774 while (val >= POWI_TABLE_SIZE)
2776 if (val & 1)
2778 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2779 result += powi_lookup_cost (digit, cache)
2780 + POWI_WINDOW_SIZE + 1;
2781 val >>= POWI_WINDOW_SIZE;
2783 else
2785 val >>= 1;
2786 result++;
2790 return result + powi_lookup_cost (val, cache);
2793 /* Recursive subroutine of expand_powi. This function takes the array,
2794 CACHE, of already calculated exponents and an exponent N and returns
2795 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2797 static rtx
2798 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2800 unsigned HOST_WIDE_INT digit;
2801 rtx target, result;
2802 rtx op0, op1;
2804 if (n < POWI_TABLE_SIZE)
2806 if (cache[n])
2807 return cache[n];
2809 target = gen_reg_rtx (mode);
2810 cache[n] = target;
2812 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2813 op1 = expand_powi_1 (mode, powi_table[n], cache);
2815 else if (n & 1)
2817 target = gen_reg_rtx (mode);
2818 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2819 op0 = expand_powi_1 (mode, n - digit, cache);
2820 op1 = expand_powi_1 (mode, digit, cache);
2822 else
2824 target = gen_reg_rtx (mode);
2825 op0 = expand_powi_1 (mode, n >> 1, cache);
2826 op1 = op0;
2829 result = expand_mult (mode, op0, op1, target, 0);
2830 if (result != target)
2831 emit_move_insn (target, result);
2832 return target;
2835 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2836 floating point operand in mode MODE, and N is the exponent. This
2837 function needs to be kept in sync with powi_cost above. */
2839 static rtx
2840 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2842 unsigned HOST_WIDE_INT val;
2843 rtx cache[POWI_TABLE_SIZE];
2844 rtx result;
2846 if (n == 0)
2847 return CONST1_RTX (mode);
2849 val = (n < 0) ? -n : n;
2851 memset (cache, 0, sizeof (cache));
2852 cache[1] = x;
2854 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2856 /* If the original exponent was negative, reciprocate the result. */
2857 if (n < 0)
2858 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2859 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2861 return result;
2864 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2865 a normal call should be emitted rather than expanding the function
2866 in-line. EXP is the expression that is a call to the builtin
2867 function; if convenient, the result should be placed in TARGET. */
2869 static rtx
2870 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2872 tree arg0, arg1;
2873 tree fn, narg0;
2874 tree type = TREE_TYPE (exp);
2875 REAL_VALUE_TYPE cint, c, c2;
2876 HOST_WIDE_INT n;
2877 rtx op, op2;
2878 enum machine_mode mode = TYPE_MODE (type);
2880 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2881 return NULL_RTX;
2883 arg0 = CALL_EXPR_ARG (exp, 0);
2884 arg1 = CALL_EXPR_ARG (exp, 1);
2886 if (TREE_CODE (arg1) != REAL_CST
2887 || TREE_OVERFLOW (arg1))
2888 return expand_builtin_mathfn_2 (exp, target, subtarget);
2890 /* Handle constant exponents. */
2892 /* For integer valued exponents we can expand to an optimal multiplication
2893 sequence using expand_powi. */
2894 c = TREE_REAL_CST (arg1);
2895 n = real_to_integer (&c);
2896 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2897 if (real_identical (&c, &cint)
2898 && ((n >= -1 && n <= 2)
2899 || (flag_unsafe_math_optimizations
2900 && !optimize_size
2901 && powi_cost (n) <= POWI_MAX_MULTS)))
2903 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2904 if (n != 1)
2906 op = force_reg (mode, op);
2907 op = expand_powi (op, mode, n);
2909 return op;
2912 narg0 = builtin_save_expr (arg0);
2914 /* If the exponent is not integer valued, check if it is half of an integer.
2915 In this case we can expand to sqrt (x) * x**(n/2). */
2916 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2917 if (fn != NULL_TREE)
2919 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2920 n = real_to_integer (&c2);
2921 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2922 if (real_identical (&c2, &cint)
2923 && ((flag_unsafe_math_optimizations
2924 && !optimize_size
2925 && powi_cost (n/2) <= POWI_MAX_MULTS)
2926 || n == 1))
2928 tree call_expr = build_call_expr (fn, 1, narg0);
2929 /* Use expand_expr in case the newly built call expression
2930 was folded to a non-call. */
2931 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2932 if (n != 1)
2934 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2935 op2 = force_reg (mode, op2);
2936 op2 = expand_powi (op2, mode, abs (n / 2));
2937 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2938 0, OPTAB_LIB_WIDEN);
2939 /* If the original exponent was negative, reciprocate the
2940 result. */
2941 if (n < 0)
2942 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2943 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2945 return op;
2949 /* Try if the exponent is a third of an integer. In this case
2950 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2951 different from pow (x, 1./3.) due to rounding and behavior
2952 with negative x we need to constrain this transformation to
2953 unsafe math and positive x or finite math. */
2954 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2955 if (fn != NULL_TREE
2956 && flag_unsafe_math_optimizations
2957 && (tree_expr_nonnegative_p (arg0)
2958 || !HONOR_NANS (mode)))
2960 REAL_VALUE_TYPE dconst3;
2961 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2962 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2963 real_round (&c2, mode, &c2);
2964 n = real_to_integer (&c2);
2965 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2966 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2967 real_convert (&c2, mode, &c2);
2968 if (real_identical (&c2, &c)
2969 && ((!optimize_size
2970 && powi_cost (n/3) <= POWI_MAX_MULTS)
2971 || n == 1))
2973 tree call_expr = build_call_expr (fn, 1,narg0);
2974 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2975 if (abs (n) % 3 == 2)
2976 op = expand_simple_binop (mode, MULT, op, op, op,
2977 0, OPTAB_LIB_WIDEN);
2978 if (n != 1)
2980 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2981 op2 = force_reg (mode, op2);
2982 op2 = expand_powi (op2, mode, abs (n / 3));
2983 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2984 0, OPTAB_LIB_WIDEN);
2985 /* If the original exponent was negative, reciprocate the
2986 result. */
2987 if (n < 0)
2988 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2989 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2991 return op;
2995 /* Fall back to optab expansion. */
2996 return expand_builtin_mathfn_2 (exp, target, subtarget);
2999 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3000 a normal call should be emitted rather than expanding the function
3001 in-line. EXP is the expression that is a call to the builtin
3002 function; if convenient, the result should be placed in TARGET. */
3004 static rtx
3005 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3007 tree arg0, arg1;
3008 rtx op0, op1;
3009 enum machine_mode mode;
3010 enum machine_mode mode2;
3012 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3013 return NULL_RTX;
3015 arg0 = CALL_EXPR_ARG (exp, 0);
3016 arg1 = CALL_EXPR_ARG (exp, 1);
3017 mode = TYPE_MODE (TREE_TYPE (exp));
3019 /* Handle constant power. */
3021 if (TREE_CODE (arg1) == INTEGER_CST
3022 && !TREE_OVERFLOW (arg1))
3024 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3026 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3027 Otherwise, check the number of multiplications required. */
3028 if ((TREE_INT_CST_HIGH (arg1) == 0
3029 || TREE_INT_CST_HIGH (arg1) == -1)
3030 && ((n >= -1 && n <= 2)
3031 || (! optimize_size
3032 && powi_cost (n) <= POWI_MAX_MULTS)))
3034 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3035 op0 = force_reg (mode, op0);
3036 return expand_powi (op0, mode, n);
3040 /* Emit a libcall to libgcc. */
3042 /* Mode of the 2nd argument must match that of an int. */
3043 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3045 if (target == NULL_RTX)
3046 target = gen_reg_rtx (mode);
3048 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3049 if (GET_MODE (op0) != mode)
3050 op0 = convert_to_mode (mode, op0, 0);
3051 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3052 if (GET_MODE (op1) != mode2)
3053 op1 = convert_to_mode (mode2, op1, 0);
3055 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3056 target, LCT_CONST, mode, 2,
3057 op0, mode, op1, mode2);
3059 return target;
3062 /* Expand expression EXP which is a call to the strlen builtin. Return
3063 NULL_RTX if we failed the caller should emit a normal call, otherwise
3064 try to get the result in TARGET, if convenient. */
3066 static rtx
3067 expand_builtin_strlen (tree exp, rtx target,
3068 enum machine_mode target_mode)
3070 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3071 return NULL_RTX;
3072 else
3074 rtx pat;
3075 tree len;
3076 tree src = CALL_EXPR_ARG (exp, 0);
3077 rtx result, src_reg, char_rtx, before_strlen;
3078 enum machine_mode insn_mode = target_mode, char_mode;
3079 enum insn_code icode = CODE_FOR_nothing;
3080 int align;
3082 /* If the length can be computed at compile-time, return it. */
3083 len = c_strlen (src, 0);
3084 if (len)
3085 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3087 /* If the length can be computed at compile-time and is constant
3088 integer, but there are side-effects in src, evaluate
3089 src for side-effects, then return len.
3090 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3091 can be optimized into: i++; x = 3; */
3092 len = c_strlen (src, 1);
3093 if (len && TREE_CODE (len) == INTEGER_CST)
3095 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3096 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3099 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3101 /* If SRC is not a pointer type, don't do this operation inline. */
3102 if (align == 0)
3103 return NULL_RTX;
3105 /* Bail out if we can't compute strlen in the right mode. */
3106 while (insn_mode != VOIDmode)
3108 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3109 if (icode != CODE_FOR_nothing)
3110 break;
3112 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3114 if (insn_mode == VOIDmode)
3115 return NULL_RTX;
3117 /* Make a place to write the result of the instruction. */
3118 result = target;
3119 if (! (result != 0
3120 && REG_P (result)
3121 && GET_MODE (result) == insn_mode
3122 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3123 result = gen_reg_rtx (insn_mode);
3125 /* Make a place to hold the source address. We will not expand
3126 the actual source until we are sure that the expansion will
3127 not fail -- there are trees that cannot be expanded twice. */
3128 src_reg = gen_reg_rtx (Pmode);
3130 /* Mark the beginning of the strlen sequence so we can emit the
3131 source operand later. */
3132 before_strlen = get_last_insn ();
3134 char_rtx = const0_rtx;
3135 char_mode = insn_data[(int) icode].operand[2].mode;
3136 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3137 char_mode))
3138 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3140 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3141 char_rtx, GEN_INT (align));
3142 if (! pat)
3143 return NULL_RTX;
3144 emit_insn (pat);
3146 /* Now that we are assured of success, expand the source. */
3147 start_sequence ();
3148 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3149 if (pat != src_reg)
3150 emit_move_insn (src_reg, pat);
3151 pat = get_insns ();
3152 end_sequence ();
3154 if (before_strlen)
3155 emit_insn_after (pat, before_strlen);
3156 else
3157 emit_insn_before (pat, get_insns ());
3159 /* Return the value in the proper mode for this function. */
3160 if (GET_MODE (result) == target_mode)
3161 target = result;
3162 else if (target != 0)
3163 convert_move (target, result, 0);
3164 else
3165 target = convert_to_mode (target_mode, result, 0);
3167 return target;
3171 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3172 caller should emit a normal call, otherwise try to get the result
3173 in TARGET, if convenient (and in mode MODE if that's convenient). */
3175 static rtx
3176 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3178 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3180 tree type = TREE_TYPE (exp);
3181 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3182 CALL_EXPR_ARG (exp, 1), type);
3183 if (result)
3184 return expand_expr (result, target, mode, EXPAND_NORMAL);
3186 return NULL_RTX;
3189 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3190 caller should emit a normal call, otherwise try to get the result
3191 in TARGET, if convenient (and in mode MODE if that's convenient). */
3193 static rtx
3194 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3196 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3198 tree type = TREE_TYPE (exp);
3199 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3200 CALL_EXPR_ARG (exp, 1), type);
3201 if (result)
3202 return expand_expr (result, target, mode, EXPAND_NORMAL);
3204 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3206 return NULL_RTX;
3209 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3210 caller should emit a normal call, otherwise try to get the result
3211 in TARGET, if convenient (and in mode MODE if that's convenient). */
3213 static rtx
3214 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3216 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3218 tree type = TREE_TYPE (exp);
3219 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3220 CALL_EXPR_ARG (exp, 1), type);
3221 if (result)
3222 return expand_expr (result, target, mode, EXPAND_NORMAL);
3224 return NULL_RTX;
3227 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3228 caller should emit a normal call, otherwise try to get the result
3229 in TARGET, if convenient (and in mode MODE if that's convenient). */
3231 static rtx
3232 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3234 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3236 tree type = TREE_TYPE (exp);
3237 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3238 CALL_EXPR_ARG (exp, 1), type);
3239 if (result)
3240 return expand_expr (result, target, mode, EXPAND_NORMAL);
3242 return NULL_RTX;
3245 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3246 bytes from constant string DATA + OFFSET and return it as target
3247 constant. */
3249 static rtx
3250 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3251 enum machine_mode mode)
3253 const char *str = (const char *) data;
3255 gcc_assert (offset >= 0
3256 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3257 <= strlen (str) + 1));
3259 return c_readstr (str + offset, mode);
3262 /* Expand a call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3267 static rtx
3268 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3270 tree fndecl = get_callee_fndecl (exp);
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 const char *src_str;
3281 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3282 unsigned int dest_align
3283 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3284 rtx dest_mem, src_mem, dest_addr, len_rtx;
3285 tree result = fold_builtin_memory_op (dest, src, len,
3286 TREE_TYPE (TREE_TYPE (fndecl)),
3287 false, /*endp=*/0);
3288 HOST_WIDE_INT expected_size = -1;
3289 unsigned int expected_align = 0;
3290 tree_ann_common_t ann;
3292 if (result)
3294 while (TREE_CODE (result) == COMPOUND_EXPR)
3296 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3297 EXPAND_NORMAL);
3298 result = TREE_OPERAND (result, 1);
3300 return expand_expr (result, target, mode, EXPAND_NORMAL);
3303 /* If DEST is not a pointer type, call the normal function. */
3304 if (dest_align == 0)
3305 return NULL_RTX;
3307 /* If either SRC is not a pointer type, don't do this
3308 operation in-line. */
3309 if (src_align == 0)
3310 return NULL_RTX;
3312 ann = tree_common_ann (exp);
3313 if (ann)
3314 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3316 if (expected_align < dest_align)
3317 expected_align = dest_align;
3318 dest_mem = get_memory_rtx (dest, len);
3319 set_mem_align (dest_mem, dest_align);
3320 len_rtx = expand_normal (len);
3321 src_str = c_getstr (src);
3323 /* If SRC is a string constant and block move would be done
3324 by pieces, we can avoid loading the string from memory
3325 and only stored the computed constants. */
3326 if (src_str
3327 && GET_CODE (len_rtx) == CONST_INT
3328 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3329 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3330 CONST_CAST (char *, src_str),
3331 dest_align, false))
3333 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3334 builtin_memcpy_read_str,
3335 CONST_CAST (char *, src_str),
3336 dest_align, false, 0);
3337 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3338 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3339 return dest_mem;
3342 src_mem = get_memory_rtx (src, len);
3343 set_mem_align (src_mem, src_align);
3345 /* Copy word part most expediently. */
3346 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3347 CALL_EXPR_TAILCALL (exp)
3348 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3349 expected_align, expected_size);
3351 if (dest_addr == 0)
3353 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3354 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3356 return dest_addr;
3360 /* Expand a call EXP to the mempcpy builtin.
3361 Return NULL_RTX if we failed; the caller should emit a normal call,
3362 otherwise try to get the result in TARGET, if convenient (and in
3363 mode MODE if that's convenient). If ENDP is 0 return the
3364 destination pointer, if ENDP is 1 return the end pointer ala
3365 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3366 stpcpy. */
3368 static rtx
3369 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3371 if (!validate_arglist (exp,
3372 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3373 return NULL_RTX;
3374 else
3376 tree dest = CALL_EXPR_ARG (exp, 0);
3377 tree src = CALL_EXPR_ARG (exp, 1);
3378 tree len = CALL_EXPR_ARG (exp, 2);
3379 return expand_builtin_mempcpy_args (dest, src, len,
3380 TREE_TYPE (exp),
3381 target, mode, /*endp=*/ 1);
3385 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3386 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3387 so that this can also be called without constructing an actual CALL_EXPR.
3388 TYPE is the return type of the call. The other arguments and return value
3389 are the same as for expand_builtin_mempcpy. */
3391 static rtx
3392 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3393 rtx target, enum machine_mode mode, int endp)
3395 /* If return value is ignored, transform mempcpy into memcpy. */
3396 if (target == const0_rtx)
3398 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3400 if (!fn)
3401 return NULL_RTX;
3403 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3404 target, mode, EXPAND_NORMAL);
3406 else
3408 const char *src_str;
3409 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3410 unsigned int dest_align
3411 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3412 rtx dest_mem, src_mem, len_rtx;
3413 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3415 if (result)
3417 while (TREE_CODE (result) == COMPOUND_EXPR)
3419 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3420 EXPAND_NORMAL);
3421 result = TREE_OPERAND (result, 1);
3423 return expand_expr (result, target, mode, EXPAND_NORMAL);
3426 /* If either SRC or DEST is not a pointer type, don't do this
3427 operation in-line. */
3428 if (dest_align == 0 || src_align == 0)
3429 return NULL_RTX;
3431 /* If LEN is not constant, call the normal function. */
3432 if (! host_integerp (len, 1))
3433 return NULL_RTX;
3435 len_rtx = expand_normal (len);
3436 src_str = c_getstr (src);
3438 /* If SRC is a string constant and block move would be done
3439 by pieces, we can avoid loading the string from memory
3440 and only stored the computed constants. */
3441 if (src_str
3442 && GET_CODE (len_rtx) == CONST_INT
3443 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3444 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3445 CONST_CAST (char *, src_str),
3446 dest_align, false))
3448 dest_mem = get_memory_rtx (dest, len);
3449 set_mem_align (dest_mem, dest_align);
3450 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3451 builtin_memcpy_read_str,
3452 CONST_CAST (char *, src_str),
3453 dest_align, false, endp);
3454 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3455 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3456 return dest_mem;
3459 if (GET_CODE (len_rtx) == CONST_INT
3460 && can_move_by_pieces (INTVAL (len_rtx),
3461 MIN (dest_align, src_align)))
3463 dest_mem = get_memory_rtx (dest, len);
3464 set_mem_align (dest_mem, dest_align);
3465 src_mem = get_memory_rtx (src, len);
3466 set_mem_align (src_mem, src_align);
3467 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3468 MIN (dest_align, src_align), endp);
3469 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3470 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3471 return dest_mem;
3474 return NULL_RTX;
3478 /* Expand expression EXP, which is a call to the memmove builtin. Return
3479 NULL_RTX if we failed; the caller should emit a normal call. */
3481 static rtx
3482 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3484 if (!validate_arglist (exp,
3485 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3486 return NULL_RTX;
3487 else
3489 tree dest = CALL_EXPR_ARG (exp, 0);
3490 tree src = CALL_EXPR_ARG (exp, 1);
3491 tree len = CALL_EXPR_ARG (exp, 2);
3492 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3493 target, mode, ignore);
3497 /* Helper function to do the actual work for expand_builtin_memmove. The
3498 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3499 so that this can also be called without constructing an actual CALL_EXPR.
3500 TYPE is the return type of the call. The other arguments and return value
3501 are the same as for expand_builtin_memmove. */
3503 static rtx
3504 expand_builtin_memmove_args (tree dest, tree src, tree len,
3505 tree type, rtx target, enum machine_mode mode,
3506 int ignore)
3508 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3510 if (result)
3512 STRIP_TYPE_NOPS (result);
3513 while (TREE_CODE (result) == COMPOUND_EXPR)
3515 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3516 EXPAND_NORMAL);
3517 result = TREE_OPERAND (result, 1);
3519 return expand_expr (result, target, mode, EXPAND_NORMAL);
3522 /* Otherwise, call the normal function. */
3523 return NULL_RTX;
3526 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3527 NULL_RTX if we failed the caller should emit a normal call. */
3529 static rtx
3530 expand_builtin_bcopy (tree exp, int ignore)
3532 tree type = TREE_TYPE (exp);
3533 tree src, dest, size;
3535 if (!validate_arglist (exp,
3536 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3537 return NULL_RTX;
3539 src = CALL_EXPR_ARG (exp, 0);
3540 dest = CALL_EXPR_ARG (exp, 1);
3541 size = CALL_EXPR_ARG (exp, 2);
3543 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3544 This is done this way so that if it isn't expanded inline, we fall
3545 back to calling bcopy instead of memmove. */
3546 return expand_builtin_memmove_args (dest, src,
3547 fold_convert (sizetype, size),
3548 type, const0_rtx, VOIDmode,
3549 ignore);
3552 #ifndef HAVE_movstr
3553 # define HAVE_movstr 0
3554 # define CODE_FOR_movstr CODE_FOR_nothing
3555 #endif
3557 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3558 we failed, the caller should emit a normal call, otherwise try to
3559 get the result in TARGET, if convenient. If ENDP is 0 return the
3560 destination pointer, if ENDP is 1 return the end pointer ala
3561 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3562 stpcpy. */
3564 static rtx
3565 expand_movstr (tree dest, tree src, rtx target, int endp)
3567 rtx end;
3568 rtx dest_mem;
3569 rtx src_mem;
3570 rtx insn;
3571 const struct insn_data * data;
3573 if (!HAVE_movstr)
3574 return NULL_RTX;
3576 dest_mem = get_memory_rtx (dest, NULL);
3577 src_mem = get_memory_rtx (src, NULL);
3578 if (!endp)
3580 target = force_reg (Pmode, XEXP (dest_mem, 0));
3581 dest_mem = replace_equiv_address (dest_mem, target);
3582 end = gen_reg_rtx (Pmode);
3584 else
3586 if (target == 0 || target == const0_rtx)
3588 end = gen_reg_rtx (Pmode);
3589 if (target == 0)
3590 target = end;
3592 else
3593 end = target;
3596 data = insn_data + CODE_FOR_movstr;
3598 if (data->operand[0].mode != VOIDmode)
3599 end = gen_lowpart (data->operand[0].mode, end);
3601 insn = data->genfun (end, dest_mem, src_mem);
3603 gcc_assert (insn);
3605 emit_insn (insn);
3607 /* movstr is supposed to set end to the address of the NUL
3608 terminator. If the caller requested a mempcpy-like return value,
3609 adjust it. */
3610 if (endp == 1 && target != const0_rtx)
3612 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3613 emit_move_insn (target, force_operand (tem, NULL_RTX));
3616 return target;
3619 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3620 NULL_RTX if we failed the caller should emit a normal call, otherwise
3621 try to get the result in TARGET, if convenient (and in mode MODE if that's
3622 convenient). */
3624 static rtx
3625 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3627 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3629 tree dest = CALL_EXPR_ARG (exp, 0);
3630 tree src = CALL_EXPR_ARG (exp, 1);
3631 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3633 return NULL_RTX;
3636 /* Helper function to do the actual work for expand_builtin_strcpy. The
3637 arguments to the builtin_strcpy call DEST and SRC are broken out
3638 so that this can also be called without constructing an actual CALL_EXPR.
3639 The other arguments and return value are the same as for
3640 expand_builtin_strcpy. */
3642 static rtx
3643 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3644 rtx target, enum machine_mode mode)
3646 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3647 if (result)
3648 return expand_expr (result, target, mode, EXPAND_NORMAL);
3649 return expand_movstr (dest, src, target, /*endp=*/0);
3653 /* Expand a call EXP to the stpcpy builtin.
3654 Return NULL_RTX if we failed the caller should emit a normal call,
3655 otherwise try to get the result in TARGET, if convenient (and in
3656 mode MODE if that's convenient). */
3658 static rtx
3659 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3661 tree dst, src;
3663 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3664 return NULL_RTX;
3666 dst = CALL_EXPR_ARG (exp, 0);
3667 src = CALL_EXPR_ARG (exp, 1);
3669 /* If return value is ignored, transform stpcpy into strcpy. */
3670 if (target == const0_rtx)
3672 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3673 if (!fn)
3674 return NULL_RTX;
3676 return expand_expr (build_call_expr (fn, 2, dst, src),
3677 target, mode, EXPAND_NORMAL);
3679 else
3681 tree len, lenp1;
3682 rtx ret;
3684 /* Ensure we get an actual string whose length can be evaluated at
3685 compile-time, not an expression containing a string. This is
3686 because the latter will potentially produce pessimized code
3687 when used to produce the return value. */
3688 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3689 return expand_movstr (dst, src, target, /*endp=*/2);
3691 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3692 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3693 target, mode, /*endp=*/2);
3695 if (ret)
3696 return ret;
3698 if (TREE_CODE (len) == INTEGER_CST)
3700 rtx len_rtx = expand_normal (len);
3702 if (GET_CODE (len_rtx) == CONST_INT)
3704 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3705 dst, src, target, mode);
3707 if (ret)
3709 if (! target)
3711 if (mode != VOIDmode)
3712 target = gen_reg_rtx (mode);
3713 else
3714 target = gen_reg_rtx (GET_MODE (ret));
3716 if (GET_MODE (target) != GET_MODE (ret))
3717 ret = gen_lowpart (GET_MODE (target), ret);
3719 ret = plus_constant (ret, INTVAL (len_rtx));
3720 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3721 gcc_assert (ret);
3723 return target;
3728 return expand_movstr (dst, src, target, /*endp=*/2);
3732 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3733 bytes from constant string DATA + OFFSET and return it as target
3734 constant. */
3737 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3738 enum machine_mode mode)
3740 const char *str = (const char *) data;
3742 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3743 return const0_rtx;
3745 return c_readstr (str + offset, mode);
3748 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3749 NULL_RTX if we failed the caller should emit a normal call. */
3751 static rtx
3752 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3754 tree fndecl = get_callee_fndecl (exp);
3756 if (validate_arglist (exp,
3757 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3759 tree dest = CALL_EXPR_ARG (exp, 0);
3760 tree src = CALL_EXPR_ARG (exp, 1);
3761 tree len = CALL_EXPR_ARG (exp, 2);
3762 tree slen = c_strlen (src, 1);
3763 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3765 if (result)
3767 while (TREE_CODE (result) == COMPOUND_EXPR)
3769 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3770 EXPAND_NORMAL);
3771 result = TREE_OPERAND (result, 1);
3773 return expand_expr (result, target, mode, EXPAND_NORMAL);
3776 /* We must be passed a constant len and src parameter. */
3777 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3778 return NULL_RTX;
3780 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3782 /* We're required to pad with trailing zeros if the requested
3783 len is greater than strlen(s2)+1. In that case try to
3784 use store_by_pieces, if it fails, punt. */
3785 if (tree_int_cst_lt (slen, len))
3787 unsigned int dest_align
3788 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3789 const char *p = c_getstr (src);
3790 rtx dest_mem;
3792 if (!p || dest_align == 0 || !host_integerp (len, 1)
3793 || !can_store_by_pieces (tree_low_cst (len, 1),
3794 builtin_strncpy_read_str,
3795 CONST_CAST (char *, p),
3796 dest_align, false))
3797 return NULL_RTX;
3799 dest_mem = get_memory_rtx (dest, len);
3800 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3801 builtin_strncpy_read_str,
3802 CONST_CAST (char *, p), dest_align, false, 0);
3803 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3804 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3805 return dest_mem;
3808 return NULL_RTX;
3811 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3812 bytes from constant string DATA + OFFSET and return it as target
3813 constant. */
3816 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3817 enum machine_mode mode)
3819 const char *c = (const char *) data;
3820 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3822 memset (p, *c, GET_MODE_SIZE (mode));
3824 return c_readstr (p, mode);
3827 /* Callback routine for store_by_pieces. Return the RTL of a register
3828 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3829 char value given in the RTL register data. For example, if mode is
3830 4 bytes wide, return the RTL for 0x01010101*data. */
3832 static rtx
3833 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3834 enum machine_mode mode)
3836 rtx target, coeff;
3837 size_t size;
3838 char *p;
3840 size = GET_MODE_SIZE (mode);
3841 if (size == 1)
3842 return (rtx) data;
3844 p = XALLOCAVEC (char, size);
3845 memset (p, 1, size);
3846 coeff = c_readstr (p, mode);
3848 target = convert_to_mode (mode, (rtx) data, 1);
3849 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3850 return force_reg (mode, target);
3853 /* Expand expression EXP, which is a call to the memset builtin. Return
3854 NULL_RTX if we failed the caller should emit a normal call, otherwise
3855 try to get the result in TARGET, if convenient (and in mode MODE if that's
3856 convenient). */
3858 static rtx
3859 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3861 if (!validate_arglist (exp,
3862 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3863 return NULL_RTX;
3864 else
3866 tree dest = CALL_EXPR_ARG (exp, 0);
3867 tree val = CALL_EXPR_ARG (exp, 1);
3868 tree len = CALL_EXPR_ARG (exp, 2);
3869 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3873 /* Helper function to do the actual work for expand_builtin_memset. The
3874 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3875 so that this can also be called without constructing an actual CALL_EXPR.
3876 The other arguments and return value are the same as for
3877 expand_builtin_memset. */
3879 static rtx
3880 expand_builtin_memset_args (tree dest, tree val, tree len,
3881 rtx target, enum machine_mode mode, tree orig_exp)
3883 tree fndecl, fn;
3884 enum built_in_function fcode;
3885 char c;
3886 unsigned int dest_align;
3887 rtx dest_mem, dest_addr, len_rtx;
3888 HOST_WIDE_INT expected_size = -1;
3889 unsigned int expected_align = 0;
3890 tree_ann_common_t ann;
3892 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3894 /* If DEST is not a pointer type, don't do this operation in-line. */
3895 if (dest_align == 0)
3896 return NULL_RTX;
3898 ann = tree_common_ann (orig_exp);
3899 if (ann)
3900 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3902 if (expected_align < dest_align)
3903 expected_align = dest_align;
3905 /* If the LEN parameter is zero, return DEST. */
3906 if (integer_zerop (len))
3908 /* Evaluate and ignore VAL in case it has side-effects. */
3909 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3910 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3913 /* Stabilize the arguments in case we fail. */
3914 dest = builtin_save_expr (dest);
3915 val = builtin_save_expr (val);
3916 len = builtin_save_expr (len);
3918 len_rtx = expand_normal (len);
3919 dest_mem = get_memory_rtx (dest, len);
3921 if (TREE_CODE (val) != INTEGER_CST)
3923 rtx val_rtx;
3925 val_rtx = expand_normal (val);
3926 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3927 val_rtx, 0);
3929 /* Assume that we can memset by pieces if we can store
3930 * the coefficients by pieces (in the required modes).
3931 * We can't pass builtin_memset_gen_str as that emits RTL. */
3932 c = 1;
3933 if (host_integerp (len, 1)
3934 && can_store_by_pieces (tree_low_cst (len, 1),
3935 builtin_memset_read_str, &c, dest_align,
3936 true))
3938 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3939 val_rtx);
3940 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3941 builtin_memset_gen_str, val_rtx, dest_align,
3942 true, 0);
3944 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3945 dest_align, expected_align,
3946 expected_size))
3947 goto do_libcall;
3949 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3950 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3951 return dest_mem;
3954 if (target_char_cast (val, &c))
3955 goto do_libcall;
3957 if (c)
3959 if (host_integerp (len, 1)
3960 && can_store_by_pieces (tree_low_cst (len, 1),
3961 builtin_memset_read_str, &c, dest_align,
3962 true))
3963 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3964 builtin_memset_read_str, &c, dest_align, true, 0);
3965 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3966 dest_align, expected_align,
3967 expected_size))
3968 goto do_libcall;
3970 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3971 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3972 return dest_mem;
3975 set_mem_align (dest_mem, dest_align);
3976 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3977 CALL_EXPR_TAILCALL (orig_exp)
3978 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3979 expected_align, expected_size);
3981 if (dest_addr == 0)
3983 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3984 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3987 return dest_addr;
3989 do_libcall:
3990 fndecl = get_callee_fndecl (orig_exp);
3991 fcode = DECL_FUNCTION_CODE (fndecl);
3992 if (fcode == BUILT_IN_MEMSET)
3993 fn = build_call_expr (fndecl, 3, dest, val, len);
3994 else if (fcode == BUILT_IN_BZERO)
3995 fn = build_call_expr (fndecl, 2, dest, len);
3996 else
3997 gcc_unreachable ();
3998 if (TREE_CODE (fn) == CALL_EXPR)
3999 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4000 return expand_call (fn, target, target == const0_rtx);
4003 /* Expand expression EXP, which is a call to the bzero builtin. Return
4004 NULL_RTX if we failed the caller should emit a normal call. */
4006 static rtx
4007 expand_builtin_bzero (tree exp)
4009 tree dest, size;
4011 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4012 return NULL_RTX;
4014 dest = CALL_EXPR_ARG (exp, 0);
4015 size = CALL_EXPR_ARG (exp, 1);
4017 /* New argument list transforming bzero(ptr x, int y) to
4018 memset(ptr x, int 0, size_t y). This is done this way
4019 so that if it isn't expanded inline, we fallback to
4020 calling bzero instead of memset. */
4022 return expand_builtin_memset_args (dest, integer_zero_node,
4023 fold_convert (sizetype, size),
4024 const0_rtx, VOIDmode, exp);
4027 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4028 caller should emit a normal call, otherwise try to get the result
4029 in TARGET, if convenient (and in mode MODE if that's convenient). */
4031 static rtx
4032 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4034 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4035 INTEGER_TYPE, VOID_TYPE))
4037 tree type = TREE_TYPE (exp);
4038 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4039 CALL_EXPR_ARG (exp, 1),
4040 CALL_EXPR_ARG (exp, 2), type);
4041 if (result)
4042 return expand_expr (result, target, mode, EXPAND_NORMAL);
4044 return NULL_RTX;
4047 /* Expand expression EXP, which is a call to the memcmp built-in function.
4048 Return NULL_RTX if we failed and the
4049 caller should emit a normal call, otherwise try to get the result in
4050 TARGET, if convenient (and in mode MODE, if that's convenient). */
4052 static rtx
4053 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4055 if (!validate_arglist (exp,
4056 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4057 return NULL_RTX;
4058 else
4060 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4061 CALL_EXPR_ARG (exp, 1),
4062 CALL_EXPR_ARG (exp, 2));
4063 if (result)
4064 return expand_expr (result, target, mode, EXPAND_NORMAL);
4067 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4069 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4070 rtx result;
4071 rtx insn;
4072 tree arg1 = CALL_EXPR_ARG (exp, 0);
4073 tree arg2 = CALL_EXPR_ARG (exp, 1);
4074 tree len = CALL_EXPR_ARG (exp, 2);
4076 int arg1_align
4077 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4078 int arg2_align
4079 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4080 enum machine_mode insn_mode;
4082 #ifdef HAVE_cmpmemsi
4083 if (HAVE_cmpmemsi)
4084 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4085 else
4086 #endif
4087 #ifdef HAVE_cmpstrnsi
4088 if (HAVE_cmpstrnsi)
4089 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4090 else
4091 #endif
4092 return NULL_RTX;
4094 /* If we don't have POINTER_TYPE, call the function. */
4095 if (arg1_align == 0 || arg2_align == 0)
4096 return NULL_RTX;
4098 /* Make a place to write the result of the instruction. */
4099 result = target;
4100 if (! (result != 0
4101 && REG_P (result) && GET_MODE (result) == insn_mode
4102 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4103 result = gen_reg_rtx (insn_mode);
4105 arg1_rtx = get_memory_rtx (arg1, len);
4106 arg2_rtx = get_memory_rtx (arg2, len);
4107 arg3_rtx = expand_normal (len);
4109 /* Set MEM_SIZE as appropriate. */
4110 if (GET_CODE (arg3_rtx) == CONST_INT)
4112 set_mem_size (arg1_rtx, arg3_rtx);
4113 set_mem_size (arg2_rtx, arg3_rtx);
4116 #ifdef HAVE_cmpmemsi
4117 if (HAVE_cmpmemsi)
4118 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4119 GEN_INT (MIN (arg1_align, arg2_align)));
4120 else
4121 #endif
4122 #ifdef HAVE_cmpstrnsi
4123 if (HAVE_cmpstrnsi)
4124 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4125 GEN_INT (MIN (arg1_align, arg2_align)));
4126 else
4127 #endif
4128 gcc_unreachable ();
4130 if (insn)
4131 emit_insn (insn);
4132 else
4133 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4134 TYPE_MODE (integer_type_node), 3,
4135 XEXP (arg1_rtx, 0), Pmode,
4136 XEXP (arg2_rtx, 0), Pmode,
4137 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4138 TYPE_UNSIGNED (sizetype)),
4139 TYPE_MODE (sizetype));
4141 /* Return the value in the proper mode for this function. */
4142 mode = TYPE_MODE (TREE_TYPE (exp));
4143 if (GET_MODE (result) == mode)
4144 return result;
4145 else if (target != 0)
4147 convert_move (target, result, 0);
4148 return target;
4150 else
4151 return convert_to_mode (mode, result, 0);
4153 #endif
4155 return NULL_RTX;
4158 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4159 if we failed the caller should emit a normal call, otherwise try to get
4160 the result in TARGET, if convenient. */
4162 static rtx
4163 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4165 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4166 return NULL_RTX;
4167 else
4169 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4170 CALL_EXPR_ARG (exp, 1));
4171 if (result)
4172 return expand_expr (result, target, mode, EXPAND_NORMAL);
4175 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4176 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4177 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4179 rtx arg1_rtx, arg2_rtx;
4180 rtx result, insn = NULL_RTX;
4181 tree fndecl, fn;
4182 tree arg1 = CALL_EXPR_ARG (exp, 0);
4183 tree arg2 = CALL_EXPR_ARG (exp, 1);
4185 int arg1_align
4186 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4187 int arg2_align
4188 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4190 /* If we don't have POINTER_TYPE, call the function. */
4191 if (arg1_align == 0 || arg2_align == 0)
4192 return NULL_RTX;
4194 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4195 arg1 = builtin_save_expr (arg1);
4196 arg2 = builtin_save_expr (arg2);
4198 arg1_rtx = get_memory_rtx (arg1, NULL);
4199 arg2_rtx = get_memory_rtx (arg2, NULL);
4201 #ifdef HAVE_cmpstrsi
4202 /* Try to call cmpstrsi. */
4203 if (HAVE_cmpstrsi)
4205 enum machine_mode insn_mode
4206 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4208 /* Make a place to write the result of the instruction. */
4209 result = target;
4210 if (! (result != 0
4211 && REG_P (result) && GET_MODE (result) == insn_mode
4212 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4213 result = gen_reg_rtx (insn_mode);
4215 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4216 GEN_INT (MIN (arg1_align, arg2_align)));
4218 #endif
4219 #ifdef HAVE_cmpstrnsi
4220 /* Try to determine at least one length and call cmpstrnsi. */
4221 if (!insn && HAVE_cmpstrnsi)
4223 tree len;
4224 rtx arg3_rtx;
4226 enum machine_mode insn_mode
4227 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4228 tree len1 = c_strlen (arg1, 1);
4229 tree len2 = c_strlen (arg2, 1);
4231 if (len1)
4232 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4233 if (len2)
4234 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4236 /* If we don't have a constant length for the first, use the length
4237 of the second, if we know it. We don't require a constant for
4238 this case; some cost analysis could be done if both are available
4239 but neither is constant. For now, assume they're equally cheap,
4240 unless one has side effects. If both strings have constant lengths,
4241 use the smaller. */
4243 if (!len1)
4244 len = len2;
4245 else if (!len2)
4246 len = len1;
4247 else if (TREE_SIDE_EFFECTS (len1))
4248 len = len2;
4249 else if (TREE_SIDE_EFFECTS (len2))
4250 len = len1;
4251 else if (TREE_CODE (len1) != INTEGER_CST)
4252 len = len2;
4253 else if (TREE_CODE (len2) != INTEGER_CST)
4254 len = len1;
4255 else if (tree_int_cst_lt (len1, len2))
4256 len = len1;
4257 else
4258 len = len2;
4260 /* If both arguments have side effects, we cannot optimize. */
4261 if (!len || TREE_SIDE_EFFECTS (len))
4262 goto do_libcall;
4264 arg3_rtx = expand_normal (len);
4266 /* Make a place to write the result of the instruction. */
4267 result = target;
4268 if (! (result != 0
4269 && REG_P (result) && GET_MODE (result) == insn_mode
4270 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4271 result = gen_reg_rtx (insn_mode);
4273 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4274 GEN_INT (MIN (arg1_align, arg2_align)));
4276 #endif
4278 if (insn)
4280 emit_insn (insn);
4282 /* Return the value in the proper mode for this function. */
4283 mode = TYPE_MODE (TREE_TYPE (exp));
4284 if (GET_MODE (result) == mode)
4285 return result;
4286 if (target == 0)
4287 return convert_to_mode (mode, result, 0);
4288 convert_move (target, result, 0);
4289 return target;
4292 /* Expand the library call ourselves using a stabilized argument
4293 list to avoid re-evaluating the function's arguments twice. */
4294 #ifdef HAVE_cmpstrnsi
4295 do_libcall:
4296 #endif
4297 fndecl = get_callee_fndecl (exp);
4298 fn = build_call_expr (fndecl, 2, arg1, arg2);
4299 if (TREE_CODE (fn) == CALL_EXPR)
4300 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4301 return expand_call (fn, target, target == const0_rtx);
4303 #endif
4304 return NULL_RTX;
4307 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4308 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4309 the result in TARGET, if convenient. */
4311 static rtx
4312 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4314 if (!validate_arglist (exp,
4315 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4316 return NULL_RTX;
4317 else
4319 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4320 CALL_EXPR_ARG (exp, 1),
4321 CALL_EXPR_ARG (exp, 2));
4322 if (result)
4323 return expand_expr (result, target, mode, EXPAND_NORMAL);
4326 /* If c_strlen can determine an expression for one of the string
4327 lengths, and it doesn't have side effects, then emit cmpstrnsi
4328 using length MIN(strlen(string)+1, arg3). */
4329 #ifdef HAVE_cmpstrnsi
4330 if (HAVE_cmpstrnsi)
4332 tree len, len1, len2;
4333 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4334 rtx result, insn;
4335 tree fndecl, fn;
4336 tree arg1 = CALL_EXPR_ARG (exp, 0);
4337 tree arg2 = CALL_EXPR_ARG (exp, 1);
4338 tree arg3 = CALL_EXPR_ARG (exp, 2);
4340 int arg1_align
4341 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4342 int arg2_align
4343 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4344 enum machine_mode insn_mode
4345 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4347 len1 = c_strlen (arg1, 1);
4348 len2 = c_strlen (arg2, 1);
4350 if (len1)
4351 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4352 if (len2)
4353 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4355 /* If we don't have a constant length for the first, use the length
4356 of the second, if we know it. We don't require a constant for
4357 this case; some cost analysis could be done if both are available
4358 but neither is constant. For now, assume they're equally cheap,
4359 unless one has side effects. If both strings have constant lengths,
4360 use the smaller. */
4362 if (!len1)
4363 len = len2;
4364 else if (!len2)
4365 len = len1;
4366 else if (TREE_SIDE_EFFECTS (len1))
4367 len = len2;
4368 else if (TREE_SIDE_EFFECTS (len2))
4369 len = len1;
4370 else if (TREE_CODE (len1) != INTEGER_CST)
4371 len = len2;
4372 else if (TREE_CODE (len2) != INTEGER_CST)
4373 len = len1;
4374 else if (tree_int_cst_lt (len1, len2))
4375 len = len1;
4376 else
4377 len = len2;
4379 /* If both arguments have side effects, we cannot optimize. */
4380 if (!len || TREE_SIDE_EFFECTS (len))
4381 return NULL_RTX;
4383 /* The actual new length parameter is MIN(len,arg3). */
4384 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4385 fold_convert (TREE_TYPE (len), arg3));
4387 /* If we don't have POINTER_TYPE, call the function. */
4388 if (arg1_align == 0 || arg2_align == 0)
4389 return NULL_RTX;
4391 /* Make a place to write the result of the instruction. */
4392 result = target;
4393 if (! (result != 0
4394 && REG_P (result) && GET_MODE (result) == insn_mode
4395 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4396 result = gen_reg_rtx (insn_mode);
4398 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4399 arg1 = builtin_save_expr (arg1);
4400 arg2 = builtin_save_expr (arg2);
4401 len = builtin_save_expr (len);
4403 arg1_rtx = get_memory_rtx (arg1, len);
4404 arg2_rtx = get_memory_rtx (arg2, len);
4405 arg3_rtx = expand_normal (len);
4406 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4407 GEN_INT (MIN (arg1_align, arg2_align)));
4408 if (insn)
4410 emit_insn (insn);
4412 /* Return the value in the proper mode for this function. */
4413 mode = TYPE_MODE (TREE_TYPE (exp));
4414 if (GET_MODE (result) == mode)
4415 return result;
4416 if (target == 0)
4417 return convert_to_mode (mode, result, 0);
4418 convert_move (target, result, 0);
4419 return target;
4422 /* Expand the library call ourselves using a stabilized argument
4423 list to avoid re-evaluating the function's arguments twice. */
4424 fndecl = get_callee_fndecl (exp);
4425 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4426 if (TREE_CODE (fn) == CALL_EXPR)
4427 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4428 return expand_call (fn, target, target == const0_rtx);
4430 #endif
4431 return NULL_RTX;
4434 /* Expand expression EXP, which is a call to the strcat builtin.
4435 Return NULL_RTX if we failed the caller should emit a normal call,
4436 otherwise try to get the result in TARGET, if convenient. */
4438 static rtx
4439 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4441 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4442 return NULL_RTX;
4443 else
4445 tree dst = CALL_EXPR_ARG (exp, 0);
4446 tree src = CALL_EXPR_ARG (exp, 1);
4447 const char *p = c_getstr (src);
4449 /* If the string length is zero, return the dst parameter. */
4450 if (p && *p == '\0')
4451 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4453 if (!optimize_size)
4455 /* See if we can store by pieces into (dst + strlen(dst)). */
4456 tree newsrc, newdst,
4457 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4458 rtx insns;
4460 /* Stabilize the argument list. */
4461 newsrc = builtin_save_expr (src);
4462 dst = builtin_save_expr (dst);
4464 start_sequence ();
4466 /* Create strlen (dst). */
4467 newdst = build_call_expr (strlen_fn, 1, dst);
4468 /* Create (dst p+ strlen (dst)). */
4470 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4471 newdst = builtin_save_expr (newdst);
4473 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4475 end_sequence (); /* Stop sequence. */
4476 return NULL_RTX;
4479 /* Output the entire sequence. */
4480 insns = get_insns ();
4481 end_sequence ();
4482 emit_insn (insns);
4484 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4487 return NULL_RTX;
4491 /* Expand expression EXP, which is a call to the strncat builtin.
4492 Return NULL_RTX if we failed the caller should emit a normal call,
4493 otherwise try to get the result in TARGET, if convenient. */
4495 static rtx
4496 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4498 if (validate_arglist (exp,
4499 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4501 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4502 CALL_EXPR_ARG (exp, 1),
4503 CALL_EXPR_ARG (exp, 2));
4504 if (result)
4505 return expand_expr (result, target, mode, EXPAND_NORMAL);
4507 return NULL_RTX;
4510 /* Expand expression EXP, which is a call to the strspn builtin.
4511 Return NULL_RTX if we failed the caller should emit a normal call,
4512 otherwise try to get the result in TARGET, if convenient. */
4514 static rtx
4515 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4517 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4519 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4520 CALL_EXPR_ARG (exp, 1));
4521 if (result)
4522 return expand_expr (result, target, mode, EXPAND_NORMAL);
4524 return NULL_RTX;
4527 /* Expand expression EXP, which is a call to the strcspn builtin.
4528 Return NULL_RTX if we failed the caller should emit a normal call,
4529 otherwise try to get the result in TARGET, if convenient. */
4531 static rtx
4532 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4534 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4536 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4537 CALL_EXPR_ARG (exp, 1));
4538 if (result)
4539 return expand_expr (result, target, mode, EXPAND_NORMAL);
4541 return NULL_RTX;
4544 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4545 if that's convenient. */
4548 expand_builtin_saveregs (void)
4550 rtx val, seq;
4552 /* Don't do __builtin_saveregs more than once in a function.
4553 Save the result of the first call and reuse it. */
4554 if (saveregs_value != 0)
4555 return saveregs_value;
4557 /* When this function is called, it means that registers must be
4558 saved on entry to this function. So we migrate the call to the
4559 first insn of this function. */
4561 start_sequence ();
4563 /* Do whatever the machine needs done in this case. */
4564 val = targetm.calls.expand_builtin_saveregs ();
4566 seq = get_insns ();
4567 end_sequence ();
4569 saveregs_value = val;
4571 /* Put the insns after the NOTE that starts the function. If this
4572 is inside a start_sequence, make the outer-level insn chain current, so
4573 the code is placed at the start of the function. */
4574 push_topmost_sequence ();
4575 emit_insn_after (seq, entry_of_function ());
4576 pop_topmost_sequence ();
4578 return val;
4581 /* __builtin_args_info (N) returns word N of the arg space info
4582 for the current function. The number and meanings of words
4583 is controlled by the definition of CUMULATIVE_ARGS. */
4585 static rtx
4586 expand_builtin_args_info (tree exp)
4588 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4589 int *word_ptr = (int *) &crtl->args.info;
4591 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4593 if (call_expr_nargs (exp) != 0)
4595 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4596 error ("argument of %<__builtin_args_info%> must be constant");
4597 else
4599 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4601 if (wordnum < 0 || wordnum >= nwords)
4602 error ("argument of %<__builtin_args_info%> out of range");
4603 else
4604 return GEN_INT (word_ptr[wordnum]);
4607 else
4608 error ("missing argument in %<__builtin_args_info%>");
4610 return const0_rtx;
4613 /* Expand a call to __builtin_next_arg. */
4615 static rtx
4616 expand_builtin_next_arg (void)
4618 /* Checking arguments is already done in fold_builtin_next_arg
4619 that must be called before this function. */
4620 return expand_binop (ptr_mode, add_optab,
4621 crtl->args.internal_arg_pointer,
4622 crtl->args.arg_offset_rtx,
4623 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4626 /* Make it easier for the backends by protecting the valist argument
4627 from multiple evaluations. */
4629 static tree
4630 stabilize_va_list (tree valist, int needs_lvalue)
4632 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4634 gcc_assert (vatype != NULL_TREE);
4636 if (TREE_CODE (vatype) == ARRAY_TYPE)
4638 if (TREE_SIDE_EFFECTS (valist))
4639 valist = save_expr (valist);
4641 /* For this case, the backends will be expecting a pointer to
4642 vatype, but it's possible we've actually been given an array
4643 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4644 So fix it. */
4645 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4647 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4648 valist = build_fold_addr_expr_with_type (valist, p1);
4651 else
4653 tree pt;
4655 if (! needs_lvalue)
4657 if (! TREE_SIDE_EFFECTS (valist))
4658 return valist;
4660 pt = build_pointer_type (vatype);
4661 valist = fold_build1 (ADDR_EXPR, pt, valist);
4662 TREE_SIDE_EFFECTS (valist) = 1;
4665 if (TREE_SIDE_EFFECTS (valist))
4666 valist = save_expr (valist);
4667 valist = build_fold_indirect_ref (valist);
4670 return valist;
4673 /* The "standard" definition of va_list is void*. */
4675 tree
4676 std_build_builtin_va_list (void)
4678 return ptr_type_node;
4681 /* The "standard" abi va_list is va_list_type_node. */
4683 tree
4684 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4686 return va_list_type_node;
4689 /* The "standard" type of va_list is va_list_type_node. */
4691 tree
4692 std_canonical_va_list_type (tree type)
4694 tree wtype, htype;
4696 if (INDIRECT_REF_P (type))
4697 type = TREE_TYPE (type);
4698 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4699 type = TREE_TYPE (type);
4700 wtype = va_list_type_node;
4701 htype = type;
4702 /* Treat structure va_list types. */
4703 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4704 htype = TREE_TYPE (htype);
4705 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4707 /* If va_list is an array type, the argument may have decayed
4708 to a pointer type, e.g. by being passed to another function.
4709 In that case, unwrap both types so that we can compare the
4710 underlying records. */
4711 if (TREE_CODE (htype) == ARRAY_TYPE
4712 || POINTER_TYPE_P (htype))
4714 wtype = TREE_TYPE (wtype);
4715 htype = TREE_TYPE (htype);
4718 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4719 return va_list_type_node;
4721 return NULL_TREE;
4724 /* The "standard" implementation of va_start: just assign `nextarg' to
4725 the variable. */
4727 void
4728 std_expand_builtin_va_start (tree valist, rtx nextarg)
4730 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4731 convert_move (va_r, nextarg, 0);
4734 /* Expand EXP, a call to __builtin_va_start. */
4736 static rtx
4737 expand_builtin_va_start (tree exp)
4739 rtx nextarg;
4740 tree valist;
4742 if (call_expr_nargs (exp) < 2)
4744 error ("too few arguments to function %<va_start%>");
4745 return const0_rtx;
4748 if (fold_builtin_next_arg (exp, true))
4749 return const0_rtx;
4751 nextarg = expand_builtin_next_arg ();
4752 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4754 if (targetm.expand_builtin_va_start)
4755 targetm.expand_builtin_va_start (valist, nextarg);
4756 else
4757 std_expand_builtin_va_start (valist, nextarg);
4759 return const0_rtx;
4762 /* The "standard" implementation of va_arg: read the value from the
4763 current (padded) address and increment by the (padded) size. */
4765 tree
4766 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4767 gimple_seq *post_p)
4769 tree addr, t, type_size, rounded_size, valist_tmp;
4770 unsigned HOST_WIDE_INT align, boundary;
4771 bool indirect;
4773 #ifdef ARGS_GROW_DOWNWARD
4774 /* All of the alignment and movement below is for args-grow-up machines.
4775 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4776 implement their own specialized gimplify_va_arg_expr routines. */
4777 gcc_unreachable ();
4778 #endif
4780 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4781 if (indirect)
4782 type = build_pointer_type (type);
4784 align = PARM_BOUNDARY / BITS_PER_UNIT;
4785 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4787 /* When we align parameter on stack for caller, if the parameter
4788 alignment is beyond PREFERRED_STACK_BOUNDARY, it will be
4789 aligned at PREFERRED_STACK_BOUNDARY. We will match callee
4790 here with caller. */
4791 if (boundary > PREFERRED_STACK_BOUNDARY)
4792 boundary = PREFERRED_STACK_BOUNDARY;
4794 boundary /= BITS_PER_UNIT;
4796 /* Hoist the valist value into a temporary for the moment. */
4797 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4799 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4800 requires greater alignment, we must perform dynamic alignment. */
4801 if (boundary > align
4802 && !integer_zerop (TYPE_SIZE (type)))
4804 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4805 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4806 valist_tmp, size_int (boundary - 1)));
4807 gimplify_and_add (t, pre_p);
4809 t = fold_convert (sizetype, valist_tmp);
4810 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4811 fold_convert (TREE_TYPE (valist),
4812 fold_build2 (BIT_AND_EXPR, sizetype, t,
4813 size_int (-boundary))));
4814 gimplify_and_add (t, pre_p);
4816 else
4817 boundary = align;
4819 /* If the actual alignment is less than the alignment of the type,
4820 adjust the type accordingly so that we don't assume strict alignment
4821 when dereferencing the pointer. */
4822 boundary *= BITS_PER_UNIT;
4823 if (boundary < TYPE_ALIGN (type))
4825 type = build_variant_type_copy (type);
4826 TYPE_ALIGN (type) = boundary;
4829 /* Compute the rounded size of the type. */
4830 type_size = size_in_bytes (type);
4831 rounded_size = round_up (type_size, align);
4833 /* Reduce rounded_size so it's sharable with the postqueue. */
4834 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4836 /* Get AP. */
4837 addr = valist_tmp;
4838 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4840 /* Small args are padded downward. */
4841 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4842 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4843 size_binop (MINUS_EXPR, rounded_size, type_size));
4844 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4847 /* Compute new value for AP. */
4848 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4849 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4850 gimplify_and_add (t, pre_p);
4852 addr = fold_convert (build_pointer_type (type), addr);
4854 if (indirect)
4855 addr = build_va_arg_indirect_ref (addr);
4857 return build_va_arg_indirect_ref (addr);
4860 /* Build an indirect-ref expression over the given TREE, which represents a
4861 piece of a va_arg() expansion. */
4862 tree
4863 build_va_arg_indirect_ref (tree addr)
4865 addr = build_fold_indirect_ref (addr);
4867 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4868 mf_mark (addr);
4870 return addr;
4873 /* Return a dummy expression of type TYPE in order to keep going after an
4874 error. */
4876 static tree
4877 dummy_object (tree type)
4879 tree t = build_int_cst (build_pointer_type (type), 0);
4880 return build1 (INDIRECT_REF, type, t);
4883 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4884 builtin function, but a very special sort of operator. */
4886 enum gimplify_status
4887 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4889 tree promoted_type, have_va_type;
4890 tree valist = TREE_OPERAND (*expr_p, 0);
4891 tree type = TREE_TYPE (*expr_p);
4892 tree t;
4894 /* Verify that valist is of the proper type. */
4895 have_va_type = TREE_TYPE (valist);
4896 if (have_va_type == error_mark_node)
4897 return GS_ERROR;
4898 have_va_type = targetm.canonical_va_list_type (have_va_type);
4900 if (have_va_type == NULL_TREE)
4902 error ("first argument to %<va_arg%> not of type %<va_list%>");
4903 return GS_ERROR;
4906 /* Generate a diagnostic for requesting data of a type that cannot
4907 be passed through `...' due to type promotion at the call site. */
4908 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4909 != type)
4911 static bool gave_help;
4913 /* Unfortunately, this is merely undefined, rather than a constraint
4914 violation, so we cannot make this an error. If this call is never
4915 executed, the program is still strictly conforming. */
4916 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4917 type, promoted_type);
4918 if (! gave_help)
4920 gave_help = true;
4921 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4922 promoted_type, type);
4925 /* We can, however, treat "undefined" any way we please.
4926 Call abort to encourage the user to fix the program. */
4927 inform ("if this code is reached, the program will abort");
4928 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4929 gimplify_and_add (t, pre_p);
4931 /* This is dead code, but go ahead and finish so that the
4932 mode of the result comes out right. */
4933 *expr_p = dummy_object (type);
4934 return GS_ALL_DONE;
4936 else
4938 /* Make it easier for the backends by protecting the valist argument
4939 from multiple evaluations. */
4940 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4942 /* For this case, the backends will be expecting a pointer to
4943 TREE_TYPE (abi), but it's possible we've
4944 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4945 So fix it. */
4946 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4948 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4949 valist = build_fold_addr_expr_with_type (valist, p1);
4952 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4954 else
4955 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4957 if (!targetm.gimplify_va_arg_expr)
4958 /* FIXME: Once most targets are converted we should merely
4959 assert this is non-null. */
4960 return GS_ALL_DONE;
4962 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4963 return GS_OK;
4967 /* Expand EXP, a call to __builtin_va_end. */
4969 static rtx
4970 expand_builtin_va_end (tree exp)
4972 tree valist = CALL_EXPR_ARG (exp, 0);
4974 /* Evaluate for side effects, if needed. I hate macros that don't
4975 do that. */
4976 if (TREE_SIDE_EFFECTS (valist))
4977 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4979 return const0_rtx;
4982 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4983 builtin rather than just as an assignment in stdarg.h because of the
4984 nastiness of array-type va_list types. */
4986 static rtx
4987 expand_builtin_va_copy (tree exp)
4989 tree dst, src, t;
4991 dst = CALL_EXPR_ARG (exp, 0);
4992 src = CALL_EXPR_ARG (exp, 1);
4994 dst = stabilize_va_list (dst, 1);
4995 src = stabilize_va_list (src, 0);
4997 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4999 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5001 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5002 TREE_SIDE_EFFECTS (t) = 1;
5003 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5005 else
5007 rtx dstb, srcb, size;
5009 /* Evaluate to pointers. */
5010 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5011 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5012 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5013 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5015 dstb = convert_memory_address (Pmode, dstb);
5016 srcb = convert_memory_address (Pmode, srcb);
5018 /* "Dereference" to BLKmode memories. */
5019 dstb = gen_rtx_MEM (BLKmode, dstb);
5020 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5021 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5022 srcb = gen_rtx_MEM (BLKmode, srcb);
5023 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5024 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5026 /* Copy. */
5027 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5030 return const0_rtx;
5033 /* Expand a call to one of the builtin functions __builtin_frame_address or
5034 __builtin_return_address. */
5036 static rtx
5037 expand_builtin_frame_address (tree fndecl, tree exp)
5039 /* The argument must be a nonnegative integer constant.
5040 It counts the number of frames to scan up the stack.
5041 The value is the return address saved in that frame. */
5042 if (call_expr_nargs (exp) == 0)
5043 /* Warning about missing arg was already issued. */
5044 return const0_rtx;
5045 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5047 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5048 error ("invalid argument to %<__builtin_frame_address%>");
5049 else
5050 error ("invalid argument to %<__builtin_return_address%>");
5051 return const0_rtx;
5053 else
5055 rtx tem
5056 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5057 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5059 /* Some ports cannot access arbitrary stack frames. */
5060 if (tem == NULL)
5062 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5063 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5064 else
5065 warning (0, "unsupported argument to %<__builtin_return_address%>");
5066 return const0_rtx;
5069 /* For __builtin_frame_address, return what we've got. */
5070 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5071 return tem;
5073 if (!REG_P (tem)
5074 && ! CONSTANT_P (tem))
5075 tem = copy_to_mode_reg (Pmode, tem);
5076 return tem;
5080 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5081 we failed and the caller should emit a normal call, otherwise try to get
5082 the result in TARGET, if convenient. */
5084 static rtx
5085 expand_builtin_alloca (tree exp, rtx target)
5087 rtx op0;
5088 rtx result;
5090 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5091 should always expand to function calls. These can be intercepted
5092 in libmudflap. */
5093 if (flag_mudflap)
5094 return NULL_RTX;
5096 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5097 return NULL_RTX;
5099 /* Compute the argument. */
5100 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5102 /* Allocate the desired space. */
5103 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5104 result = convert_memory_address (ptr_mode, result);
5106 return result;
5109 /* Expand a call to a bswap builtin with argument ARG0. MODE
5110 is the mode to expand with. */
5112 static rtx
5113 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5115 enum machine_mode mode;
5116 tree arg;
5117 rtx op0;
5119 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5120 return NULL_RTX;
5122 arg = CALL_EXPR_ARG (exp, 0);
5123 mode = TYPE_MODE (TREE_TYPE (arg));
5124 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5126 target = expand_unop (mode, bswap_optab, op0, target, 1);
5128 gcc_assert (target);
5130 return convert_to_mode (mode, target, 0);
5133 /* Expand a call to a unary builtin in EXP.
5134 Return NULL_RTX if a normal call should be emitted rather than expanding the
5135 function in-line. If convenient, the result should be placed in TARGET.
5136 SUBTARGET may be used as the target for computing one of EXP's operands. */
5138 static rtx
5139 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5140 rtx subtarget, optab op_optab)
5142 rtx op0;
5144 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5145 return NULL_RTX;
5147 /* Compute the argument. */
5148 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5149 VOIDmode, EXPAND_NORMAL);
5150 /* Compute op, into TARGET if possible.
5151 Set TARGET to wherever the result comes back. */
5152 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5153 op_optab, op0, target, 1);
5154 gcc_assert (target);
5156 return convert_to_mode (target_mode, target, 0);
5159 /* If the string passed to fputs is a constant and is one character
5160 long, we attempt to transform this call into __builtin_fputc(). */
5162 static rtx
5163 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5165 /* Verify the arguments in the original call. */
5166 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5168 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5169 CALL_EXPR_ARG (exp, 1),
5170 (target == const0_rtx),
5171 unlocked, NULL_TREE);
5172 if (result)
5173 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5175 return NULL_RTX;
5178 /* Expand a call to __builtin_expect. We just return our argument
5179 as the builtin_expect semantic should've been already executed by
5180 tree branch prediction pass. */
5182 static rtx
5183 expand_builtin_expect (tree exp, rtx target)
5185 tree arg, c;
5187 if (call_expr_nargs (exp) < 2)
5188 return const0_rtx;
5189 arg = CALL_EXPR_ARG (exp, 0);
5190 c = CALL_EXPR_ARG (exp, 1);
5192 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5193 /* When guessing was done, the hints should be already stripped away. */
5194 gcc_assert (!flag_guess_branch_prob
5195 || optimize == 0 || errorcount || sorrycount);
5196 return target;
5199 void
5200 expand_builtin_trap (void)
5202 #ifdef HAVE_trap
5203 if (HAVE_trap)
5204 emit_insn (gen_trap ());
5205 else
5206 #endif
5207 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5208 emit_barrier ();
5211 /* Expand EXP, a call to fabs, fabsf or fabsl.
5212 Return NULL_RTX if a normal call should be emitted rather than expanding
5213 the function inline. If convenient, the result should be placed
5214 in TARGET. SUBTARGET may be used as the target for computing
5215 the operand. */
5217 static rtx
5218 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5220 enum machine_mode mode;
5221 tree arg;
5222 rtx op0;
5224 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5225 return NULL_RTX;
5227 arg = CALL_EXPR_ARG (exp, 0);
5228 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5229 mode = TYPE_MODE (TREE_TYPE (arg));
5230 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5231 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5234 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5235 Return NULL is a normal call should be emitted rather than expanding the
5236 function inline. If convenient, the result should be placed in TARGET.
5237 SUBTARGET may be used as the target for computing the operand. */
5239 static rtx
5240 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5242 rtx op0, op1;
5243 tree arg;
5245 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5246 return NULL_RTX;
5248 arg = CALL_EXPR_ARG (exp, 0);
5249 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5251 arg = CALL_EXPR_ARG (exp, 1);
5252 op1 = expand_normal (arg);
5254 return expand_copysign (op0, op1, target);
5257 /* Create a new constant string literal and return a char* pointer to it.
5258 The STRING_CST value is the LEN characters at STR. */
5259 tree
5260 build_string_literal (int len, const char *str)
5262 tree t, elem, index, type;
5264 t = build_string (len, str);
5265 elem = build_type_variant (char_type_node, 1, 0);
5266 index = build_index_type (size_int (len - 1));
5267 type = build_array_type (elem, index);
5268 TREE_TYPE (t) = type;
5269 TREE_CONSTANT (t) = 1;
5270 TREE_READONLY (t) = 1;
5271 TREE_STATIC (t) = 1;
5273 type = build_pointer_type (elem);
5274 t = build1 (ADDR_EXPR, type,
5275 build4 (ARRAY_REF, elem,
5276 t, integer_zero_node, NULL_TREE, NULL_TREE));
5277 return t;
5280 /* Expand EXP, a call to printf or printf_unlocked.
5281 Return NULL_RTX if a normal call should be emitted rather than transforming
5282 the function inline. If convenient, the result should be placed in
5283 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5284 call. */
5285 static rtx
5286 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5287 bool unlocked)
5289 /* If we're using an unlocked function, assume the other unlocked
5290 functions exist explicitly. */
5291 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5292 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5293 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5294 : implicit_built_in_decls[BUILT_IN_PUTS];
5295 const char *fmt_str;
5296 tree fn = 0;
5297 tree fmt, arg;
5298 int nargs = call_expr_nargs (exp);
5300 /* If the return value is used, don't do the transformation. */
5301 if (target != const0_rtx)
5302 return NULL_RTX;
5304 /* Verify the required arguments in the original call. */
5305 if (nargs == 0)
5306 return NULL_RTX;
5307 fmt = CALL_EXPR_ARG (exp, 0);
5308 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5309 return NULL_RTX;
5311 /* Check whether the format is a literal string constant. */
5312 fmt_str = c_getstr (fmt);
5313 if (fmt_str == NULL)
5314 return NULL_RTX;
5316 if (!init_target_chars ())
5317 return NULL_RTX;
5319 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5320 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5322 if ((nargs != 2)
5323 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5324 return NULL_RTX;
5325 if (fn_puts)
5326 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5328 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5329 else if (strcmp (fmt_str, target_percent_c) == 0)
5331 if ((nargs != 2)
5332 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5333 return NULL_RTX;
5334 if (fn_putchar)
5335 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5337 else
5339 /* We can't handle anything else with % args or %% ... yet. */
5340 if (strchr (fmt_str, target_percent))
5341 return NULL_RTX;
5343 if (nargs > 1)
5344 return NULL_RTX;
5346 /* If the format specifier was "", printf does nothing. */
5347 if (fmt_str[0] == '\0')
5348 return const0_rtx;
5349 /* If the format specifier has length of 1, call putchar. */
5350 if (fmt_str[1] == '\0')
5352 /* Given printf("c"), (where c is any one character,)
5353 convert "c"[0] to an int and pass that to the replacement
5354 function. */
5355 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5356 if (fn_putchar)
5357 fn = build_call_expr (fn_putchar, 1, arg);
5359 else
5361 /* If the format specifier was "string\n", call puts("string"). */
5362 size_t len = strlen (fmt_str);
5363 if ((unsigned char)fmt_str[len - 1] == target_newline)
5365 /* Create a NUL-terminated string that's one char shorter
5366 than the original, stripping off the trailing '\n'. */
5367 char *newstr = XALLOCAVEC (char, len);
5368 memcpy (newstr, fmt_str, len - 1);
5369 newstr[len - 1] = 0;
5370 arg = build_string_literal (len, newstr);
5371 if (fn_puts)
5372 fn = build_call_expr (fn_puts, 1, arg);
5374 else
5375 /* We'd like to arrange to call fputs(string,stdout) here,
5376 but we need stdout and don't have a way to get it yet. */
5377 return NULL_RTX;
5381 if (!fn)
5382 return NULL_RTX;
5383 if (TREE_CODE (fn) == CALL_EXPR)
5384 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5385 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5388 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5389 Return NULL_RTX if a normal call should be emitted rather than transforming
5390 the function inline. If convenient, the result should be placed in
5391 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5392 call. */
5393 static rtx
5394 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5395 bool unlocked)
5397 /* If we're using an unlocked function, assume the other unlocked
5398 functions exist explicitly. */
5399 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5400 : implicit_built_in_decls[BUILT_IN_FPUTC];
5401 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5402 : implicit_built_in_decls[BUILT_IN_FPUTS];
5403 const char *fmt_str;
5404 tree fn = 0;
5405 tree fmt, fp, arg;
5406 int nargs = call_expr_nargs (exp);
5408 /* If the return value is used, don't do the transformation. */
5409 if (target != const0_rtx)
5410 return NULL_RTX;
5412 /* Verify the required arguments in the original call. */
5413 if (nargs < 2)
5414 return NULL_RTX;
5415 fp = CALL_EXPR_ARG (exp, 0);
5416 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5417 return NULL_RTX;
5418 fmt = CALL_EXPR_ARG (exp, 1);
5419 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5420 return NULL_RTX;
5422 /* Check whether the format is a literal string constant. */
5423 fmt_str = c_getstr (fmt);
5424 if (fmt_str == NULL)
5425 return NULL_RTX;
5427 if (!init_target_chars ())
5428 return NULL_RTX;
5430 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5431 if (strcmp (fmt_str, target_percent_s) == 0)
5433 if ((nargs != 3)
5434 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5435 return NULL_RTX;
5436 arg = CALL_EXPR_ARG (exp, 2);
5437 if (fn_fputs)
5438 fn = build_call_expr (fn_fputs, 2, arg, fp);
5440 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5441 else if (strcmp (fmt_str, target_percent_c) == 0)
5443 if ((nargs != 3)
5444 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5445 return NULL_RTX;
5446 arg = CALL_EXPR_ARG (exp, 2);
5447 if (fn_fputc)
5448 fn = build_call_expr (fn_fputc, 2, arg, fp);
5450 else
5452 /* We can't handle anything else with % args or %% ... yet. */
5453 if (strchr (fmt_str, target_percent))
5454 return NULL_RTX;
5456 if (nargs > 2)
5457 return NULL_RTX;
5459 /* If the format specifier was "", fprintf does nothing. */
5460 if (fmt_str[0] == '\0')
5462 /* Evaluate and ignore FILE* argument for side-effects. */
5463 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5464 return const0_rtx;
5467 /* When "string" doesn't contain %, replace all cases of
5468 fprintf(stream,string) with fputs(string,stream). The fputs
5469 builtin will take care of special cases like length == 1. */
5470 if (fn_fputs)
5471 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5474 if (!fn)
5475 return NULL_RTX;
5476 if (TREE_CODE (fn) == CALL_EXPR)
5477 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5478 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5481 /* Expand a call EXP to sprintf. Return NULL_RTX if
5482 a normal call should be emitted rather than expanding the function
5483 inline. If convenient, the result should be placed in TARGET with
5484 mode MODE. */
5486 static rtx
5487 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5489 tree dest, fmt;
5490 const char *fmt_str;
5491 int nargs = call_expr_nargs (exp);
5493 /* Verify the required arguments in the original call. */
5494 if (nargs < 2)
5495 return NULL_RTX;
5496 dest = CALL_EXPR_ARG (exp, 0);
5497 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5498 return NULL_RTX;
5499 fmt = CALL_EXPR_ARG (exp, 0);
5500 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5501 return NULL_RTX;
5503 /* Check whether the format is a literal string constant. */
5504 fmt_str = c_getstr (fmt);
5505 if (fmt_str == NULL)
5506 return NULL_RTX;
5508 if (!init_target_chars ())
5509 return NULL_RTX;
5511 /* If the format doesn't contain % args or %%, use strcpy. */
5512 if (strchr (fmt_str, target_percent) == 0)
5514 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5515 tree exp;
5517 if ((nargs > 2) || ! fn)
5518 return NULL_RTX;
5519 expand_expr (build_call_expr (fn, 2, dest, fmt),
5520 const0_rtx, VOIDmode, EXPAND_NORMAL);
5521 if (target == const0_rtx)
5522 return const0_rtx;
5523 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5524 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5526 /* If the format is "%s", use strcpy if the result isn't used. */
5527 else if (strcmp (fmt_str, target_percent_s) == 0)
5529 tree fn, arg, len;
5530 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5532 if (! fn)
5533 return NULL_RTX;
5534 if (nargs != 3)
5535 return NULL_RTX;
5536 arg = CALL_EXPR_ARG (exp, 2);
5537 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5538 return NULL_RTX;
5540 if (target != const0_rtx)
5542 len = c_strlen (arg, 1);
5543 if (! len || TREE_CODE (len) != INTEGER_CST)
5544 return NULL_RTX;
5546 else
5547 len = NULL_TREE;
5549 expand_expr (build_call_expr (fn, 2, dest, arg),
5550 const0_rtx, VOIDmode, EXPAND_NORMAL);
5552 if (target == const0_rtx)
5553 return const0_rtx;
5554 return expand_expr (len, target, mode, EXPAND_NORMAL);
5557 return NULL_RTX;
5560 /* Expand a call to either the entry or exit function profiler. */
5562 static rtx
5563 expand_builtin_profile_func (bool exitp)
5565 rtx this, which;
5567 this = DECL_RTL (current_function_decl);
5568 gcc_assert (MEM_P (this));
5569 this = XEXP (this, 0);
5571 if (exitp)
5572 which = profile_function_exit_libfunc;
5573 else
5574 which = profile_function_entry_libfunc;
5576 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5577 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5579 Pmode);
5581 return const0_rtx;
5584 /* Expand a call to __builtin___clear_cache. */
5586 static rtx
5587 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5589 #ifndef HAVE_clear_cache
5590 #ifdef CLEAR_INSN_CACHE
5591 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5592 does something. Just do the default expansion to a call to
5593 __clear_cache(). */
5594 return NULL_RTX;
5595 #else
5596 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5597 does nothing. There is no need to call it. Do nothing. */
5598 return const0_rtx;
5599 #endif /* CLEAR_INSN_CACHE */
5600 #else
5601 /* We have a "clear_cache" insn, and it will handle everything. */
5602 tree begin, end;
5603 rtx begin_rtx, end_rtx;
5604 enum insn_code icode;
5606 /* We must not expand to a library call. If we did, any
5607 fallback library function in libgcc that might contain a call to
5608 __builtin___clear_cache() would recurse infinitely. */
5609 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5611 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5612 return const0_rtx;
5615 if (HAVE_clear_cache)
5617 icode = CODE_FOR_clear_cache;
5619 begin = CALL_EXPR_ARG (exp, 0);
5620 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5621 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5622 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5623 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5625 end = CALL_EXPR_ARG (exp, 1);
5626 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5627 end_rtx = convert_memory_address (Pmode, end_rtx);
5628 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5629 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5631 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5633 return const0_rtx;
5634 #endif /* HAVE_clear_cache */
5637 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5639 static rtx
5640 round_trampoline_addr (rtx tramp)
5642 rtx temp, addend, mask;
5644 /* If we don't need too much alignment, we'll have been guaranteed
5645 proper alignment by get_trampoline_type. */
5646 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5647 return tramp;
5649 /* Round address up to desired boundary. */
5650 temp = gen_reg_rtx (Pmode);
5651 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5652 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5654 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5655 temp, 0, OPTAB_LIB_WIDEN);
5656 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5657 temp, 0, OPTAB_LIB_WIDEN);
5659 return tramp;
5662 static rtx
5663 expand_builtin_init_trampoline (tree exp)
5665 tree t_tramp, t_func, t_chain;
5666 rtx r_tramp, r_func, r_chain;
5667 #ifdef TRAMPOLINE_TEMPLATE
5668 rtx blktramp;
5669 #endif
5671 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5672 POINTER_TYPE, VOID_TYPE))
5673 return NULL_RTX;
5675 t_tramp = CALL_EXPR_ARG (exp, 0);
5676 t_func = CALL_EXPR_ARG (exp, 1);
5677 t_chain = CALL_EXPR_ARG (exp, 2);
5679 r_tramp = expand_normal (t_tramp);
5680 r_func = expand_normal (t_func);
5681 r_chain = expand_normal (t_chain);
5683 /* Generate insns to initialize the trampoline. */
5684 r_tramp = round_trampoline_addr (r_tramp);
5685 #ifdef TRAMPOLINE_TEMPLATE
5686 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5687 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5688 emit_block_move (blktramp, assemble_trampoline_template (),
5689 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5690 #endif
5691 trampolines_created = 1;
5692 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5694 return const0_rtx;
5697 static rtx
5698 expand_builtin_adjust_trampoline (tree exp)
5700 rtx tramp;
5702 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5703 return NULL_RTX;
5705 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5706 tramp = round_trampoline_addr (tramp);
5707 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5708 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5709 #endif
5711 return tramp;
5714 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5715 function. The function first checks whether the back end provides
5716 an insn to implement signbit for the respective mode. If not, it
5717 checks whether the floating point format of the value is such that
5718 the sign bit can be extracted. If that is not the case, the
5719 function returns NULL_RTX to indicate that a normal call should be
5720 emitted rather than expanding the function in-line. EXP is the
5721 expression that is a call to the builtin function; if convenient,
5722 the result should be placed in TARGET. */
5723 static rtx
5724 expand_builtin_signbit (tree exp, rtx target)
5726 const struct real_format *fmt;
5727 enum machine_mode fmode, imode, rmode;
5728 HOST_WIDE_INT hi, lo;
5729 tree arg;
5730 int word, bitpos;
5731 enum insn_code icode;
5732 rtx temp;
5734 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5735 return NULL_RTX;
5737 arg = CALL_EXPR_ARG (exp, 0);
5738 fmode = TYPE_MODE (TREE_TYPE (arg));
5739 rmode = TYPE_MODE (TREE_TYPE (exp));
5740 fmt = REAL_MODE_FORMAT (fmode);
5742 arg = builtin_save_expr (arg);
5744 /* Expand the argument yielding a RTX expression. */
5745 temp = expand_normal (arg);
5747 /* Check if the back end provides an insn that handles signbit for the
5748 argument's mode. */
5749 icode = signbit_optab->handlers [(int) fmode].insn_code;
5750 if (icode != CODE_FOR_nothing)
5752 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5753 emit_unop_insn (icode, target, temp, UNKNOWN);
5754 return target;
5757 /* For floating point formats without a sign bit, implement signbit
5758 as "ARG < 0.0". */
5759 bitpos = fmt->signbit_ro;
5760 if (bitpos < 0)
5762 /* But we can't do this if the format supports signed zero. */
5763 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5764 return NULL_RTX;
5766 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5767 build_real (TREE_TYPE (arg), dconst0));
5768 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5771 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5773 imode = int_mode_for_mode (fmode);
5774 if (imode == BLKmode)
5775 return NULL_RTX;
5776 temp = gen_lowpart (imode, temp);
5778 else
5780 imode = word_mode;
5781 /* Handle targets with different FP word orders. */
5782 if (FLOAT_WORDS_BIG_ENDIAN)
5783 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5784 else
5785 word = bitpos / BITS_PER_WORD;
5786 temp = operand_subword_force (temp, word, fmode);
5787 bitpos = bitpos % BITS_PER_WORD;
5790 /* Force the intermediate word_mode (or narrower) result into a
5791 register. This avoids attempting to create paradoxical SUBREGs
5792 of floating point modes below. */
5793 temp = force_reg (imode, temp);
5795 /* If the bitpos is within the "result mode" lowpart, the operation
5796 can be implement with a single bitwise AND. Otherwise, we need
5797 a right shift and an AND. */
5799 if (bitpos < GET_MODE_BITSIZE (rmode))
5801 if (bitpos < HOST_BITS_PER_WIDE_INT)
5803 hi = 0;
5804 lo = (HOST_WIDE_INT) 1 << bitpos;
5806 else
5808 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5809 lo = 0;
5812 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5813 temp = gen_lowpart (rmode, temp);
5814 temp = expand_binop (rmode, and_optab, temp,
5815 immed_double_const (lo, hi, rmode),
5816 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5818 else
5820 /* Perform a logical right shift to place the signbit in the least
5821 significant bit, then truncate the result to the desired mode
5822 and mask just this bit. */
5823 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5824 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5825 temp = gen_lowpart (rmode, temp);
5826 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5827 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5830 return temp;
5833 /* Expand fork or exec calls. TARGET is the desired target of the
5834 call. EXP is the call. FN is the
5835 identificator of the actual function. IGNORE is nonzero if the
5836 value is to be ignored. */
5838 static rtx
5839 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5841 tree id, decl;
5842 tree call;
5844 /* If we are not profiling, just call the function. */
5845 if (!profile_arc_flag)
5846 return NULL_RTX;
5848 /* Otherwise call the wrapper. This should be equivalent for the rest of
5849 compiler, so the code does not diverge, and the wrapper may run the
5850 code necessary for keeping the profiling sane. */
5852 switch (DECL_FUNCTION_CODE (fn))
5854 case BUILT_IN_FORK:
5855 id = get_identifier ("__gcov_fork");
5856 break;
5858 case BUILT_IN_EXECL:
5859 id = get_identifier ("__gcov_execl");
5860 break;
5862 case BUILT_IN_EXECV:
5863 id = get_identifier ("__gcov_execv");
5864 break;
5866 case BUILT_IN_EXECLP:
5867 id = get_identifier ("__gcov_execlp");
5868 break;
5870 case BUILT_IN_EXECLE:
5871 id = get_identifier ("__gcov_execle");
5872 break;
5874 case BUILT_IN_EXECVP:
5875 id = get_identifier ("__gcov_execvp");
5876 break;
5878 case BUILT_IN_EXECVE:
5879 id = get_identifier ("__gcov_execve");
5880 break;
5882 default:
5883 gcc_unreachable ();
5886 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5887 DECL_EXTERNAL (decl) = 1;
5888 TREE_PUBLIC (decl) = 1;
5889 DECL_ARTIFICIAL (decl) = 1;
5890 TREE_NOTHROW (decl) = 1;
5891 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5892 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5893 call = rewrite_call_expr (exp, 0, decl, 0);
5894 return expand_call (call, target, ignore);
5899 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5900 the pointer in these functions is void*, the tree optimizers may remove
5901 casts. The mode computed in expand_builtin isn't reliable either, due
5902 to __sync_bool_compare_and_swap.
5904 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5905 group of builtins. This gives us log2 of the mode size. */
5907 static inline enum machine_mode
5908 get_builtin_sync_mode (int fcode_diff)
5910 /* The size is not negotiable, so ask not to get BLKmode in return
5911 if the target indicates that a smaller size would be better. */
5912 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5915 /* Expand the memory expression LOC and return the appropriate memory operand
5916 for the builtin_sync operations. */
5918 static rtx
5919 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5921 rtx addr, mem;
5923 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5925 /* Note that we explicitly do not want any alias information for this
5926 memory, so that we kill all other live memories. Otherwise we don't
5927 satisfy the full barrier semantics of the intrinsic. */
5928 mem = validize_mem (gen_rtx_MEM (mode, addr));
5930 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5931 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5932 MEM_VOLATILE_P (mem) = 1;
5934 return mem;
5937 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5938 EXP is the CALL_EXPR. CODE is the rtx code
5939 that corresponds to the arithmetic or logical operation from the name;
5940 an exception here is that NOT actually means NAND. TARGET is an optional
5941 place for us to store the results; AFTER is true if this is the
5942 fetch_and_xxx form. IGNORE is true if we don't actually care about
5943 the result of the operation at all. */
5945 static rtx
5946 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5947 enum rtx_code code, bool after,
5948 rtx target, bool ignore)
5950 rtx val, mem;
5951 enum machine_mode old_mode;
5953 /* Expand the operands. */
5954 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5956 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5957 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5958 of CONST_INTs, where we know the old_mode only from the call argument. */
5959 old_mode = GET_MODE (val);
5960 if (old_mode == VOIDmode)
5961 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5962 val = convert_modes (mode, old_mode, val, 1);
5964 if (ignore)
5965 return expand_sync_operation (mem, val, code);
5966 else
5967 return expand_sync_fetch_operation (mem, val, code, after, target);
5970 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5971 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5972 true if this is the boolean form. TARGET is a place for us to store the
5973 results; this is NOT optional if IS_BOOL is true. */
5975 static rtx
5976 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5977 bool is_bool, rtx target)
5979 rtx old_val, new_val, mem;
5980 enum machine_mode old_mode;
5982 /* Expand the operands. */
5983 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5986 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5987 mode, EXPAND_NORMAL);
5988 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5989 of CONST_INTs, where we know the old_mode only from the call argument. */
5990 old_mode = GET_MODE (old_val);
5991 if (old_mode == VOIDmode)
5992 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5993 old_val = convert_modes (mode, old_mode, old_val, 1);
5995 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5996 mode, EXPAND_NORMAL);
5997 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5998 of CONST_INTs, where we know the old_mode only from the call argument. */
5999 old_mode = GET_MODE (new_val);
6000 if (old_mode == VOIDmode)
6001 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6002 new_val = convert_modes (mode, old_mode, new_val, 1);
6004 if (is_bool)
6005 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6006 else
6007 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6010 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6011 general form is actually an atomic exchange, and some targets only
6012 support a reduced form with the second argument being a constant 1.
6013 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6014 the results. */
6016 static rtx
6017 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6018 rtx target)
6020 rtx val, mem;
6021 enum machine_mode old_mode;
6023 /* Expand the operands. */
6024 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6025 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6026 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6027 of CONST_INTs, where we know the old_mode only from the call argument. */
6028 old_mode = GET_MODE (val);
6029 if (old_mode == VOIDmode)
6030 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6031 val = convert_modes (mode, old_mode, val, 1);
6033 return expand_sync_lock_test_and_set (mem, val, target);
6036 /* Expand the __sync_synchronize intrinsic. */
6038 static void
6039 expand_builtin_synchronize (void)
6041 tree x;
6043 #ifdef HAVE_memory_barrier
6044 if (HAVE_memory_barrier)
6046 emit_insn (gen_memory_barrier ());
6047 return;
6049 #endif
6051 if (synchronize_libfunc != NULL_RTX)
6053 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6054 return;
6057 /* If no explicit memory barrier instruction is available, create an
6058 empty asm stmt with a memory clobber. */
6059 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6060 tree_cons (NULL, build_string (6, "memory"), NULL));
6061 ASM_VOLATILE_P (x) = 1;
6062 expand_asm_expr (x);
6065 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6067 static void
6068 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6070 enum insn_code icode;
6071 rtx mem, insn;
6072 rtx val = const0_rtx;
6074 /* Expand the operands. */
6075 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6077 /* If there is an explicit operation in the md file, use it. */
6078 icode = sync_lock_release[mode];
6079 if (icode != CODE_FOR_nothing)
6081 if (!insn_data[icode].operand[1].predicate (val, mode))
6082 val = force_reg (mode, val);
6084 insn = GEN_FCN (icode) (mem, val);
6085 if (insn)
6087 emit_insn (insn);
6088 return;
6092 /* Otherwise we can implement this operation by emitting a barrier
6093 followed by a store of zero. */
6094 expand_builtin_synchronize ();
6095 emit_move_insn (mem, val);
6098 /* Expand an expression EXP that calls a built-in function,
6099 with result going to TARGET if that's convenient
6100 (and in mode MODE if that's convenient).
6101 SUBTARGET may be used as the target for computing one of EXP's operands.
6102 IGNORE is nonzero if the value is to be ignored. */
6105 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6106 int ignore)
6108 tree fndecl = get_callee_fndecl (exp);
6109 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6110 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6112 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6113 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6115 /* When not optimizing, generate calls to library functions for a certain
6116 set of builtins. */
6117 if (!optimize
6118 && !called_as_built_in (fndecl)
6119 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6120 && fcode != BUILT_IN_ALLOCA)
6121 return expand_call (exp, target, ignore);
6123 /* The built-in function expanders test for target == const0_rtx
6124 to determine whether the function's result will be ignored. */
6125 if (ignore)
6126 target = const0_rtx;
6128 /* If the result of a pure or const built-in function is ignored, and
6129 none of its arguments are volatile, we can avoid expanding the
6130 built-in call and just evaluate the arguments for side-effects. */
6131 if (target == const0_rtx
6132 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6134 bool volatilep = false;
6135 tree arg;
6136 call_expr_arg_iterator iter;
6138 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6139 if (TREE_THIS_VOLATILE (arg))
6141 volatilep = true;
6142 break;
6145 if (! volatilep)
6147 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6148 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6149 return const0_rtx;
6153 switch (fcode)
6155 CASE_FLT_FN (BUILT_IN_FABS):
6156 target = expand_builtin_fabs (exp, target, subtarget);
6157 if (target)
6158 return target;
6159 break;
6161 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6162 target = expand_builtin_copysign (exp, target, subtarget);
6163 if (target)
6164 return target;
6165 break;
6167 /* Just do a normal library call if we were unable to fold
6168 the values. */
6169 CASE_FLT_FN (BUILT_IN_CABS):
6170 break;
6172 CASE_FLT_FN (BUILT_IN_EXP):
6173 CASE_FLT_FN (BUILT_IN_EXP10):
6174 CASE_FLT_FN (BUILT_IN_POW10):
6175 CASE_FLT_FN (BUILT_IN_EXP2):
6176 CASE_FLT_FN (BUILT_IN_EXPM1):
6177 CASE_FLT_FN (BUILT_IN_LOGB):
6178 CASE_FLT_FN (BUILT_IN_LOG):
6179 CASE_FLT_FN (BUILT_IN_LOG10):
6180 CASE_FLT_FN (BUILT_IN_LOG2):
6181 CASE_FLT_FN (BUILT_IN_LOG1P):
6182 CASE_FLT_FN (BUILT_IN_TAN):
6183 CASE_FLT_FN (BUILT_IN_ASIN):
6184 CASE_FLT_FN (BUILT_IN_ACOS):
6185 CASE_FLT_FN (BUILT_IN_ATAN):
6186 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6187 because of possible accuracy problems. */
6188 if (! flag_unsafe_math_optimizations)
6189 break;
6190 CASE_FLT_FN (BUILT_IN_SQRT):
6191 CASE_FLT_FN (BUILT_IN_FLOOR):
6192 CASE_FLT_FN (BUILT_IN_CEIL):
6193 CASE_FLT_FN (BUILT_IN_TRUNC):
6194 CASE_FLT_FN (BUILT_IN_ROUND):
6195 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6196 CASE_FLT_FN (BUILT_IN_RINT):
6197 target = expand_builtin_mathfn (exp, target, subtarget);
6198 if (target)
6199 return target;
6200 break;
6202 CASE_FLT_FN (BUILT_IN_ILOGB):
6203 if (! flag_unsafe_math_optimizations)
6204 break;
6205 CASE_FLT_FN (BUILT_IN_ISINF):
6206 CASE_FLT_FN (BUILT_IN_FINITE):
6207 case BUILT_IN_ISFINITE:
6208 case BUILT_IN_ISNORMAL:
6209 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6210 if (target)
6211 return target;
6212 break;
6214 CASE_FLT_FN (BUILT_IN_LCEIL):
6215 CASE_FLT_FN (BUILT_IN_LLCEIL):
6216 CASE_FLT_FN (BUILT_IN_LFLOOR):
6217 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6218 target = expand_builtin_int_roundingfn (exp, target);
6219 if (target)
6220 return target;
6221 break;
6223 CASE_FLT_FN (BUILT_IN_LRINT):
6224 CASE_FLT_FN (BUILT_IN_LLRINT):
6225 CASE_FLT_FN (BUILT_IN_LROUND):
6226 CASE_FLT_FN (BUILT_IN_LLROUND):
6227 target = expand_builtin_int_roundingfn_2 (exp, target);
6228 if (target)
6229 return target;
6230 break;
6232 CASE_FLT_FN (BUILT_IN_POW):
6233 target = expand_builtin_pow (exp, target, subtarget);
6234 if (target)
6235 return target;
6236 break;
6238 CASE_FLT_FN (BUILT_IN_POWI):
6239 target = expand_builtin_powi (exp, target, subtarget);
6240 if (target)
6241 return target;
6242 break;
6244 CASE_FLT_FN (BUILT_IN_ATAN2):
6245 CASE_FLT_FN (BUILT_IN_LDEXP):
6246 CASE_FLT_FN (BUILT_IN_SCALB):
6247 CASE_FLT_FN (BUILT_IN_SCALBN):
6248 CASE_FLT_FN (BUILT_IN_SCALBLN):
6249 if (! flag_unsafe_math_optimizations)
6250 break;
6252 CASE_FLT_FN (BUILT_IN_FMOD):
6253 CASE_FLT_FN (BUILT_IN_REMAINDER):
6254 CASE_FLT_FN (BUILT_IN_DREM):
6255 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6256 if (target)
6257 return target;
6258 break;
6260 CASE_FLT_FN (BUILT_IN_CEXPI):
6261 target = expand_builtin_cexpi (exp, target, subtarget);
6262 gcc_assert (target);
6263 return target;
6265 CASE_FLT_FN (BUILT_IN_SIN):
6266 CASE_FLT_FN (BUILT_IN_COS):
6267 if (! flag_unsafe_math_optimizations)
6268 break;
6269 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6270 if (target)
6271 return target;
6272 break;
6274 CASE_FLT_FN (BUILT_IN_SINCOS):
6275 if (! flag_unsafe_math_optimizations)
6276 break;
6277 target = expand_builtin_sincos (exp);
6278 if (target)
6279 return target;
6280 break;
6282 case BUILT_IN_APPLY_ARGS:
6283 return expand_builtin_apply_args ();
6285 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6286 FUNCTION with a copy of the parameters described by
6287 ARGUMENTS, and ARGSIZE. It returns a block of memory
6288 allocated on the stack into which is stored all the registers
6289 that might possibly be used for returning the result of a
6290 function. ARGUMENTS is the value returned by
6291 __builtin_apply_args. ARGSIZE is the number of bytes of
6292 arguments that must be copied. ??? How should this value be
6293 computed? We'll also need a safe worst case value for varargs
6294 functions. */
6295 case BUILT_IN_APPLY:
6296 if (!validate_arglist (exp, POINTER_TYPE,
6297 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6298 && !validate_arglist (exp, REFERENCE_TYPE,
6299 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6300 return const0_rtx;
6301 else
6303 rtx ops[3];
6305 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6306 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6307 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6309 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6312 /* __builtin_return (RESULT) causes the function to return the
6313 value described by RESULT. RESULT is address of the block of
6314 memory returned by __builtin_apply. */
6315 case BUILT_IN_RETURN:
6316 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6317 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6318 return const0_rtx;
6320 case BUILT_IN_SAVEREGS:
6321 return expand_builtin_saveregs ();
6323 case BUILT_IN_ARGS_INFO:
6324 return expand_builtin_args_info (exp);
6326 case BUILT_IN_VA_ARG_PACK:
6327 /* All valid uses of __builtin_va_arg_pack () are removed during
6328 inlining. */
6329 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6330 return const0_rtx;
6332 case BUILT_IN_VA_ARG_PACK_LEN:
6333 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6334 inlining. */
6335 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6336 return const0_rtx;
6338 /* Return the address of the first anonymous stack arg. */
6339 case BUILT_IN_NEXT_ARG:
6340 if (fold_builtin_next_arg (exp, false))
6341 return const0_rtx;
6342 return expand_builtin_next_arg ();
6344 case BUILT_IN_CLEAR_CACHE:
6345 target = expand_builtin___clear_cache (exp);
6346 if (target)
6347 return target;
6348 break;
6350 case BUILT_IN_CLASSIFY_TYPE:
6351 return expand_builtin_classify_type (exp);
6353 case BUILT_IN_CONSTANT_P:
6354 return const0_rtx;
6356 case BUILT_IN_FRAME_ADDRESS:
6357 case BUILT_IN_RETURN_ADDRESS:
6358 return expand_builtin_frame_address (fndecl, exp);
6360 /* Returns the address of the area where the structure is returned.
6361 0 otherwise. */
6362 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6363 if (call_expr_nargs (exp) != 0
6364 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6365 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6366 return const0_rtx;
6367 else
6368 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6370 case BUILT_IN_ALLOCA:
6371 target = expand_builtin_alloca (exp, target);
6372 if (target)
6373 return target;
6374 break;
6376 case BUILT_IN_STACK_SAVE:
6377 return expand_stack_save ();
6379 case BUILT_IN_STACK_RESTORE:
6380 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6381 return const0_rtx;
6383 case BUILT_IN_BSWAP32:
6384 case BUILT_IN_BSWAP64:
6385 target = expand_builtin_bswap (exp, target, subtarget);
6387 if (target)
6388 return target;
6389 break;
6391 CASE_INT_FN (BUILT_IN_FFS):
6392 case BUILT_IN_FFSIMAX:
6393 target = expand_builtin_unop (target_mode, exp, target,
6394 subtarget, ffs_optab);
6395 if (target)
6396 return target;
6397 break;
6399 CASE_INT_FN (BUILT_IN_CLZ):
6400 case BUILT_IN_CLZIMAX:
6401 target = expand_builtin_unop (target_mode, exp, target,
6402 subtarget, clz_optab);
6403 if (target)
6404 return target;
6405 break;
6407 CASE_INT_FN (BUILT_IN_CTZ):
6408 case BUILT_IN_CTZIMAX:
6409 target = expand_builtin_unop (target_mode, exp, target,
6410 subtarget, ctz_optab);
6411 if (target)
6412 return target;
6413 break;
6415 CASE_INT_FN (BUILT_IN_POPCOUNT):
6416 case BUILT_IN_POPCOUNTIMAX:
6417 target = expand_builtin_unop (target_mode, exp, target,
6418 subtarget, popcount_optab);
6419 if (target)
6420 return target;
6421 break;
6423 CASE_INT_FN (BUILT_IN_PARITY):
6424 case BUILT_IN_PARITYIMAX:
6425 target = expand_builtin_unop (target_mode, exp, target,
6426 subtarget, parity_optab);
6427 if (target)
6428 return target;
6429 break;
6431 case BUILT_IN_STRLEN:
6432 target = expand_builtin_strlen (exp, target, target_mode);
6433 if (target)
6434 return target;
6435 break;
6437 case BUILT_IN_STRCPY:
6438 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6439 if (target)
6440 return target;
6441 break;
6443 case BUILT_IN_STRNCPY:
6444 target = expand_builtin_strncpy (exp, target, mode);
6445 if (target)
6446 return target;
6447 break;
6449 case BUILT_IN_STPCPY:
6450 target = expand_builtin_stpcpy (exp, target, mode);
6451 if (target)
6452 return target;
6453 break;
6455 case BUILT_IN_STRCAT:
6456 target = expand_builtin_strcat (fndecl, exp, target, mode);
6457 if (target)
6458 return target;
6459 break;
6461 case BUILT_IN_STRNCAT:
6462 target = expand_builtin_strncat (exp, target, mode);
6463 if (target)
6464 return target;
6465 break;
6467 case BUILT_IN_STRSPN:
6468 target = expand_builtin_strspn (exp, target, mode);
6469 if (target)
6470 return target;
6471 break;
6473 case BUILT_IN_STRCSPN:
6474 target = expand_builtin_strcspn (exp, target, mode);
6475 if (target)
6476 return target;
6477 break;
6479 case BUILT_IN_STRSTR:
6480 target = expand_builtin_strstr (exp, target, mode);
6481 if (target)
6482 return target;
6483 break;
6485 case BUILT_IN_STRPBRK:
6486 target = expand_builtin_strpbrk (exp, target, mode);
6487 if (target)
6488 return target;
6489 break;
6491 case BUILT_IN_INDEX:
6492 case BUILT_IN_STRCHR:
6493 target = expand_builtin_strchr (exp, target, mode);
6494 if (target)
6495 return target;
6496 break;
6498 case BUILT_IN_RINDEX:
6499 case BUILT_IN_STRRCHR:
6500 target = expand_builtin_strrchr (exp, target, mode);
6501 if (target)
6502 return target;
6503 break;
6505 case BUILT_IN_MEMCPY:
6506 target = expand_builtin_memcpy (exp, target, mode);
6507 if (target)
6508 return target;
6509 break;
6511 case BUILT_IN_MEMPCPY:
6512 target = expand_builtin_mempcpy (exp, target, mode);
6513 if (target)
6514 return target;
6515 break;
6517 case BUILT_IN_MEMMOVE:
6518 target = expand_builtin_memmove (exp, target, mode, ignore);
6519 if (target)
6520 return target;
6521 break;
6523 case BUILT_IN_BCOPY:
6524 target = expand_builtin_bcopy (exp, ignore);
6525 if (target)
6526 return target;
6527 break;
6529 case BUILT_IN_MEMSET:
6530 target = expand_builtin_memset (exp, target, mode);
6531 if (target)
6532 return target;
6533 break;
6535 case BUILT_IN_BZERO:
6536 target = expand_builtin_bzero (exp);
6537 if (target)
6538 return target;
6539 break;
6541 case BUILT_IN_STRCMP:
6542 target = expand_builtin_strcmp (exp, target, mode);
6543 if (target)
6544 return target;
6545 break;
6547 case BUILT_IN_STRNCMP:
6548 target = expand_builtin_strncmp (exp, target, mode);
6549 if (target)
6550 return target;
6551 break;
6553 case BUILT_IN_MEMCHR:
6554 target = expand_builtin_memchr (exp, target, mode);
6555 if (target)
6556 return target;
6557 break;
6559 case BUILT_IN_BCMP:
6560 case BUILT_IN_MEMCMP:
6561 target = expand_builtin_memcmp (exp, target, mode);
6562 if (target)
6563 return target;
6564 break;
6566 case BUILT_IN_SETJMP:
6567 /* This should have been lowered to the builtins below. */
6568 gcc_unreachable ();
6570 case BUILT_IN_SETJMP_SETUP:
6571 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6572 and the receiver label. */
6573 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6575 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6576 VOIDmode, EXPAND_NORMAL);
6577 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6578 rtx label_r = label_rtx (label);
6580 /* This is copied from the handling of non-local gotos. */
6581 expand_builtin_setjmp_setup (buf_addr, label_r);
6582 nonlocal_goto_handler_labels
6583 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6584 nonlocal_goto_handler_labels);
6585 /* ??? Do not let expand_label treat us as such since we would
6586 not want to be both on the list of non-local labels and on
6587 the list of forced labels. */
6588 FORCED_LABEL (label) = 0;
6589 return const0_rtx;
6591 break;
6593 case BUILT_IN_SETJMP_DISPATCHER:
6594 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6595 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6597 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6598 rtx label_r = label_rtx (label);
6600 /* Remove the dispatcher label from the list of non-local labels
6601 since the receiver labels have been added to it above. */
6602 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6603 return const0_rtx;
6605 break;
6607 case BUILT_IN_SETJMP_RECEIVER:
6608 /* __builtin_setjmp_receiver is passed the receiver label. */
6609 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6611 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6612 rtx label_r = label_rtx (label);
6614 expand_builtin_setjmp_receiver (label_r);
6615 return const0_rtx;
6617 break;
6619 /* __builtin_longjmp is passed a pointer to an array of five words.
6620 It's similar to the C library longjmp function but works with
6621 __builtin_setjmp above. */
6622 case BUILT_IN_LONGJMP:
6623 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6625 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6626 VOIDmode, EXPAND_NORMAL);
6627 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6629 if (value != const1_rtx)
6631 error ("%<__builtin_longjmp%> second argument must be 1");
6632 return const0_rtx;
6635 expand_builtin_longjmp (buf_addr, value);
6636 return const0_rtx;
6638 break;
6640 case BUILT_IN_NONLOCAL_GOTO:
6641 target = expand_builtin_nonlocal_goto (exp);
6642 if (target)
6643 return target;
6644 break;
6646 /* This updates the setjmp buffer that is its argument with the value
6647 of the current stack pointer. */
6648 case BUILT_IN_UPDATE_SETJMP_BUF:
6649 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6651 rtx buf_addr
6652 = expand_normal (CALL_EXPR_ARG (exp, 0));
6654 expand_builtin_update_setjmp_buf (buf_addr);
6655 return const0_rtx;
6657 break;
6659 case BUILT_IN_TRAP:
6660 expand_builtin_trap ();
6661 return const0_rtx;
6663 case BUILT_IN_PRINTF:
6664 target = expand_builtin_printf (exp, target, mode, false);
6665 if (target)
6666 return target;
6667 break;
6669 case BUILT_IN_PRINTF_UNLOCKED:
6670 target = expand_builtin_printf (exp, target, mode, true);
6671 if (target)
6672 return target;
6673 break;
6675 case BUILT_IN_FPUTS:
6676 target = expand_builtin_fputs (exp, target, false);
6677 if (target)
6678 return target;
6679 break;
6680 case BUILT_IN_FPUTS_UNLOCKED:
6681 target = expand_builtin_fputs (exp, target, true);
6682 if (target)
6683 return target;
6684 break;
6686 case BUILT_IN_FPRINTF:
6687 target = expand_builtin_fprintf (exp, target, mode, false);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_FPRINTF_UNLOCKED:
6693 target = expand_builtin_fprintf (exp, target, mode, true);
6694 if (target)
6695 return target;
6696 break;
6698 case BUILT_IN_SPRINTF:
6699 target = expand_builtin_sprintf (exp, target, mode);
6700 if (target)
6701 return target;
6702 break;
6704 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6705 case BUILT_IN_SIGNBITD32:
6706 case BUILT_IN_SIGNBITD64:
6707 case BUILT_IN_SIGNBITD128:
6708 target = expand_builtin_signbit (exp, target);
6709 if (target)
6710 return target;
6711 break;
6713 /* Various hooks for the DWARF 2 __throw routine. */
6714 case BUILT_IN_UNWIND_INIT:
6715 expand_builtin_unwind_init ();
6716 return const0_rtx;
6717 case BUILT_IN_DWARF_CFA:
6718 return virtual_cfa_rtx;
6719 #ifdef DWARF2_UNWIND_INFO
6720 case BUILT_IN_DWARF_SP_COLUMN:
6721 return expand_builtin_dwarf_sp_column ();
6722 case BUILT_IN_INIT_DWARF_REG_SIZES:
6723 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6724 return const0_rtx;
6725 #endif
6726 case BUILT_IN_FROB_RETURN_ADDR:
6727 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6728 case BUILT_IN_EXTRACT_RETURN_ADDR:
6729 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6730 case BUILT_IN_EH_RETURN:
6731 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6732 CALL_EXPR_ARG (exp, 1));
6733 return const0_rtx;
6734 #ifdef EH_RETURN_DATA_REGNO
6735 case BUILT_IN_EH_RETURN_DATA_REGNO:
6736 return expand_builtin_eh_return_data_regno (exp);
6737 #endif
6738 case BUILT_IN_EXTEND_POINTER:
6739 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6741 case BUILT_IN_VA_START:
6742 return expand_builtin_va_start (exp);
6743 case BUILT_IN_VA_END:
6744 return expand_builtin_va_end (exp);
6745 case BUILT_IN_VA_COPY:
6746 return expand_builtin_va_copy (exp);
6747 case BUILT_IN_EXPECT:
6748 return expand_builtin_expect (exp, target);
6749 case BUILT_IN_PREFETCH:
6750 expand_builtin_prefetch (exp);
6751 return const0_rtx;
6753 case BUILT_IN_PROFILE_FUNC_ENTER:
6754 return expand_builtin_profile_func (false);
6755 case BUILT_IN_PROFILE_FUNC_EXIT:
6756 return expand_builtin_profile_func (true);
6758 case BUILT_IN_INIT_TRAMPOLINE:
6759 return expand_builtin_init_trampoline (exp);
6760 case BUILT_IN_ADJUST_TRAMPOLINE:
6761 return expand_builtin_adjust_trampoline (exp);
6763 case BUILT_IN_FORK:
6764 case BUILT_IN_EXECL:
6765 case BUILT_IN_EXECV:
6766 case BUILT_IN_EXECLP:
6767 case BUILT_IN_EXECLE:
6768 case BUILT_IN_EXECVP:
6769 case BUILT_IN_EXECVE:
6770 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6771 if (target)
6772 return target;
6773 break;
6775 case BUILT_IN_FETCH_AND_ADD_1:
6776 case BUILT_IN_FETCH_AND_ADD_2:
6777 case BUILT_IN_FETCH_AND_ADD_4:
6778 case BUILT_IN_FETCH_AND_ADD_8:
6779 case BUILT_IN_FETCH_AND_ADD_16:
6780 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6781 target = expand_builtin_sync_operation (mode, exp, PLUS,
6782 false, target, ignore);
6783 if (target)
6784 return target;
6785 break;
6787 case BUILT_IN_FETCH_AND_SUB_1:
6788 case BUILT_IN_FETCH_AND_SUB_2:
6789 case BUILT_IN_FETCH_AND_SUB_4:
6790 case BUILT_IN_FETCH_AND_SUB_8:
6791 case BUILT_IN_FETCH_AND_SUB_16:
6792 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6793 target = expand_builtin_sync_operation (mode, exp, MINUS,
6794 false, target, ignore);
6795 if (target)
6796 return target;
6797 break;
6799 case BUILT_IN_FETCH_AND_OR_1:
6800 case BUILT_IN_FETCH_AND_OR_2:
6801 case BUILT_IN_FETCH_AND_OR_4:
6802 case BUILT_IN_FETCH_AND_OR_8:
6803 case BUILT_IN_FETCH_AND_OR_16:
6804 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6805 target = expand_builtin_sync_operation (mode, exp, IOR,
6806 false, target, ignore);
6807 if (target)
6808 return target;
6809 break;
6811 case BUILT_IN_FETCH_AND_AND_1:
6812 case BUILT_IN_FETCH_AND_AND_2:
6813 case BUILT_IN_FETCH_AND_AND_4:
6814 case BUILT_IN_FETCH_AND_AND_8:
6815 case BUILT_IN_FETCH_AND_AND_16:
6816 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6817 target = expand_builtin_sync_operation (mode, exp, AND,
6818 false, target, ignore);
6819 if (target)
6820 return target;
6821 break;
6823 case BUILT_IN_FETCH_AND_XOR_1:
6824 case BUILT_IN_FETCH_AND_XOR_2:
6825 case BUILT_IN_FETCH_AND_XOR_4:
6826 case BUILT_IN_FETCH_AND_XOR_8:
6827 case BUILT_IN_FETCH_AND_XOR_16:
6828 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6829 target = expand_builtin_sync_operation (mode, exp, XOR,
6830 false, target, ignore);
6831 if (target)
6832 return target;
6833 break;
6835 case BUILT_IN_FETCH_AND_NAND_1:
6836 case BUILT_IN_FETCH_AND_NAND_2:
6837 case BUILT_IN_FETCH_AND_NAND_4:
6838 case BUILT_IN_FETCH_AND_NAND_8:
6839 case BUILT_IN_FETCH_AND_NAND_16:
6840 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6841 target = expand_builtin_sync_operation (mode, exp, NOT,
6842 false, target, ignore);
6843 if (target)
6844 return target;
6845 break;
6847 case BUILT_IN_ADD_AND_FETCH_1:
6848 case BUILT_IN_ADD_AND_FETCH_2:
6849 case BUILT_IN_ADD_AND_FETCH_4:
6850 case BUILT_IN_ADD_AND_FETCH_8:
6851 case BUILT_IN_ADD_AND_FETCH_16:
6852 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6853 target = expand_builtin_sync_operation (mode, exp, PLUS,
6854 true, target, ignore);
6855 if (target)
6856 return target;
6857 break;
6859 case BUILT_IN_SUB_AND_FETCH_1:
6860 case BUILT_IN_SUB_AND_FETCH_2:
6861 case BUILT_IN_SUB_AND_FETCH_4:
6862 case BUILT_IN_SUB_AND_FETCH_8:
6863 case BUILT_IN_SUB_AND_FETCH_16:
6864 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6865 target = expand_builtin_sync_operation (mode, exp, MINUS,
6866 true, target, ignore);
6867 if (target)
6868 return target;
6869 break;
6871 case BUILT_IN_OR_AND_FETCH_1:
6872 case BUILT_IN_OR_AND_FETCH_2:
6873 case BUILT_IN_OR_AND_FETCH_4:
6874 case BUILT_IN_OR_AND_FETCH_8:
6875 case BUILT_IN_OR_AND_FETCH_16:
6876 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6877 target = expand_builtin_sync_operation (mode, exp, IOR,
6878 true, target, ignore);
6879 if (target)
6880 return target;
6881 break;
6883 case BUILT_IN_AND_AND_FETCH_1:
6884 case BUILT_IN_AND_AND_FETCH_2:
6885 case BUILT_IN_AND_AND_FETCH_4:
6886 case BUILT_IN_AND_AND_FETCH_8:
6887 case BUILT_IN_AND_AND_FETCH_16:
6888 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6889 target = expand_builtin_sync_operation (mode, exp, AND,
6890 true, target, ignore);
6891 if (target)
6892 return target;
6893 break;
6895 case BUILT_IN_XOR_AND_FETCH_1:
6896 case BUILT_IN_XOR_AND_FETCH_2:
6897 case BUILT_IN_XOR_AND_FETCH_4:
6898 case BUILT_IN_XOR_AND_FETCH_8:
6899 case BUILT_IN_XOR_AND_FETCH_16:
6900 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6901 target = expand_builtin_sync_operation (mode, exp, XOR,
6902 true, target, ignore);
6903 if (target)
6904 return target;
6905 break;
6907 case BUILT_IN_NAND_AND_FETCH_1:
6908 case BUILT_IN_NAND_AND_FETCH_2:
6909 case BUILT_IN_NAND_AND_FETCH_4:
6910 case BUILT_IN_NAND_AND_FETCH_8:
6911 case BUILT_IN_NAND_AND_FETCH_16:
6912 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6913 target = expand_builtin_sync_operation (mode, exp, NOT,
6914 true, target, ignore);
6915 if (target)
6916 return target;
6917 break;
6919 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6920 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6921 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6922 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6923 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6924 if (mode == VOIDmode)
6925 mode = TYPE_MODE (boolean_type_node);
6926 if (!target || !register_operand (target, mode))
6927 target = gen_reg_rtx (mode);
6929 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6930 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6931 if (target)
6932 return target;
6933 break;
6935 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6936 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6937 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6938 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6939 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6940 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6941 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6942 if (target)
6943 return target;
6944 break;
6946 case BUILT_IN_LOCK_TEST_AND_SET_1:
6947 case BUILT_IN_LOCK_TEST_AND_SET_2:
6948 case BUILT_IN_LOCK_TEST_AND_SET_4:
6949 case BUILT_IN_LOCK_TEST_AND_SET_8:
6950 case BUILT_IN_LOCK_TEST_AND_SET_16:
6951 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6952 target = expand_builtin_lock_test_and_set (mode, exp, target);
6953 if (target)
6954 return target;
6955 break;
6957 case BUILT_IN_LOCK_RELEASE_1:
6958 case BUILT_IN_LOCK_RELEASE_2:
6959 case BUILT_IN_LOCK_RELEASE_4:
6960 case BUILT_IN_LOCK_RELEASE_8:
6961 case BUILT_IN_LOCK_RELEASE_16:
6962 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6963 expand_builtin_lock_release (mode, exp);
6964 return const0_rtx;
6966 case BUILT_IN_SYNCHRONIZE:
6967 expand_builtin_synchronize ();
6968 return const0_rtx;
6970 case BUILT_IN_OBJECT_SIZE:
6971 return expand_builtin_object_size (exp);
6973 case BUILT_IN_MEMCPY_CHK:
6974 case BUILT_IN_MEMPCPY_CHK:
6975 case BUILT_IN_MEMMOVE_CHK:
6976 case BUILT_IN_MEMSET_CHK:
6977 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6978 if (target)
6979 return target;
6980 break;
6982 case BUILT_IN_STRCPY_CHK:
6983 case BUILT_IN_STPCPY_CHK:
6984 case BUILT_IN_STRNCPY_CHK:
6985 case BUILT_IN_STRCAT_CHK:
6986 case BUILT_IN_STRNCAT_CHK:
6987 case BUILT_IN_SNPRINTF_CHK:
6988 case BUILT_IN_VSNPRINTF_CHK:
6989 maybe_emit_chk_warning (exp, fcode);
6990 break;
6992 case BUILT_IN_SPRINTF_CHK:
6993 case BUILT_IN_VSPRINTF_CHK:
6994 maybe_emit_sprintf_chk_warning (exp, fcode);
6995 break;
6997 default: /* just do library call, if unknown builtin */
6998 break;
7001 /* The switch statement above can drop through to cause the function
7002 to be called normally. */
7003 return expand_call (exp, target, ignore);
7006 /* Determine whether a tree node represents a call to a built-in
7007 function. If the tree T is a call to a built-in function with
7008 the right number of arguments of the appropriate types, return
7009 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7010 Otherwise the return value is END_BUILTINS. */
7012 enum built_in_function
7013 builtin_mathfn_code (const_tree t)
7015 const_tree fndecl, arg, parmlist;
7016 const_tree argtype, parmtype;
7017 const_call_expr_arg_iterator iter;
7019 if (TREE_CODE (t) != CALL_EXPR
7020 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7021 return END_BUILTINS;
7023 fndecl = get_callee_fndecl (t);
7024 if (fndecl == NULL_TREE
7025 || TREE_CODE (fndecl) != FUNCTION_DECL
7026 || ! DECL_BUILT_IN (fndecl)
7027 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7028 return END_BUILTINS;
7030 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7031 init_const_call_expr_arg_iterator (t, &iter);
7032 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7034 /* If a function doesn't take a variable number of arguments,
7035 the last element in the list will have type `void'. */
7036 parmtype = TREE_VALUE (parmlist);
7037 if (VOID_TYPE_P (parmtype))
7039 if (more_const_call_expr_args_p (&iter))
7040 return END_BUILTINS;
7041 return DECL_FUNCTION_CODE (fndecl);
7044 if (! more_const_call_expr_args_p (&iter))
7045 return END_BUILTINS;
7047 arg = next_const_call_expr_arg (&iter);
7048 argtype = TREE_TYPE (arg);
7050 if (SCALAR_FLOAT_TYPE_P (parmtype))
7052 if (! SCALAR_FLOAT_TYPE_P (argtype))
7053 return END_BUILTINS;
7055 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7057 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7058 return END_BUILTINS;
7060 else if (POINTER_TYPE_P (parmtype))
7062 if (! POINTER_TYPE_P (argtype))
7063 return END_BUILTINS;
7065 else if (INTEGRAL_TYPE_P (parmtype))
7067 if (! INTEGRAL_TYPE_P (argtype))
7068 return END_BUILTINS;
7070 else
7071 return END_BUILTINS;
7074 /* Variable-length argument list. */
7075 return DECL_FUNCTION_CODE (fndecl);
7078 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7079 evaluate to a constant. */
7081 static tree
7082 fold_builtin_constant_p (tree arg)
7084 /* We return 1 for a numeric type that's known to be a constant
7085 value at compile-time or for an aggregate type that's a
7086 literal constant. */
7087 STRIP_NOPS (arg);
7089 /* If we know this is a constant, emit the constant of one. */
7090 if (CONSTANT_CLASS_P (arg)
7091 || (TREE_CODE (arg) == CONSTRUCTOR
7092 && TREE_CONSTANT (arg)))
7093 return integer_one_node;
7094 if (TREE_CODE (arg) == ADDR_EXPR)
7096 tree op = TREE_OPERAND (arg, 0);
7097 if (TREE_CODE (op) == STRING_CST
7098 || (TREE_CODE (op) == ARRAY_REF
7099 && integer_zerop (TREE_OPERAND (op, 1))
7100 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7101 return integer_one_node;
7104 /* If this expression has side effects, show we don't know it to be a
7105 constant. Likewise if it's a pointer or aggregate type since in
7106 those case we only want literals, since those are only optimized
7107 when generating RTL, not later.
7108 And finally, if we are compiling an initializer, not code, we
7109 need to return a definite result now; there's not going to be any
7110 more optimization done. */
7111 if (TREE_SIDE_EFFECTS (arg)
7112 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7113 || POINTER_TYPE_P (TREE_TYPE (arg))
7114 || cfun == 0
7115 || folding_initializer)
7116 return integer_zero_node;
7118 return NULL_TREE;
7121 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7122 return it as a truthvalue. */
7124 static tree
7125 build_builtin_expect_predicate (tree pred, tree expected)
7127 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7129 fn = built_in_decls[BUILT_IN_EXPECT];
7130 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7131 ret_type = TREE_TYPE (TREE_TYPE (fn));
7132 pred_type = TREE_VALUE (arg_types);
7133 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7135 pred = fold_convert (pred_type, pred);
7136 expected = fold_convert (expected_type, expected);
7137 call_expr = build_call_expr (fn, 2, pred, expected);
7139 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7140 build_int_cst (ret_type, 0));
7143 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7144 NULL_TREE if no simplification is possible. */
7146 static tree
7147 fold_builtin_expect (tree arg0, tree arg1)
7149 tree inner, fndecl;
7150 enum tree_code code;
7152 /* If this is a builtin_expect within a builtin_expect keep the
7153 inner one. See through a comparison against a constant. It
7154 might have been added to create a thruthvalue. */
7155 inner = arg0;
7156 if (COMPARISON_CLASS_P (inner)
7157 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7158 inner = TREE_OPERAND (inner, 0);
7160 if (TREE_CODE (inner) == CALL_EXPR
7161 && (fndecl = get_callee_fndecl (inner))
7162 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7163 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7164 return arg0;
7166 /* Distribute the expected value over short-circuiting operators.
7167 See through the cast from truthvalue_type_node to long. */
7168 inner = arg0;
7169 while (TREE_CODE (inner) == NOP_EXPR
7170 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7171 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7172 inner = TREE_OPERAND (inner, 0);
7174 code = TREE_CODE (inner);
7175 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7177 tree op0 = TREE_OPERAND (inner, 0);
7178 tree op1 = TREE_OPERAND (inner, 1);
7180 op0 = build_builtin_expect_predicate (op0, arg1);
7181 op1 = build_builtin_expect_predicate (op1, arg1);
7182 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7184 return fold_convert (TREE_TYPE (arg0), inner);
7187 /* If the argument isn't invariant then there's nothing else we can do. */
7188 if (!TREE_CONSTANT (arg0))
7189 return NULL_TREE;
7191 /* If we expect that a comparison against the argument will fold to
7192 a constant return the constant. In practice, this means a true
7193 constant or the address of a non-weak symbol. */
7194 inner = arg0;
7195 STRIP_NOPS (inner);
7196 if (TREE_CODE (inner) == ADDR_EXPR)
7200 inner = TREE_OPERAND (inner, 0);
7202 while (TREE_CODE (inner) == COMPONENT_REF
7203 || TREE_CODE (inner) == ARRAY_REF);
7204 if (DECL_P (inner) && DECL_WEAK (inner))
7205 return NULL_TREE;
7208 /* Otherwise, ARG0 already has the proper type for the return value. */
7209 return arg0;
7212 /* Fold a call to __builtin_classify_type with argument ARG. */
7214 static tree
7215 fold_builtin_classify_type (tree arg)
7217 if (arg == 0)
7218 return build_int_cst (NULL_TREE, no_type_class);
7220 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7223 /* Fold a call to __builtin_strlen with argument ARG. */
7225 static tree
7226 fold_builtin_strlen (tree arg)
7228 if (!validate_arg (arg, POINTER_TYPE))
7229 return NULL_TREE;
7230 else
7232 tree len = c_strlen (arg, 0);
7234 if (len)
7236 /* Convert from the internal "sizetype" type to "size_t". */
7237 if (size_type_node)
7238 len = fold_convert (size_type_node, len);
7239 return len;
7242 return NULL_TREE;
7246 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7248 static tree
7249 fold_builtin_inf (tree type, int warn)
7251 REAL_VALUE_TYPE real;
7253 /* __builtin_inff is intended to be usable to define INFINITY on all
7254 targets. If an infinity is not available, INFINITY expands "to a
7255 positive constant of type float that overflows at translation
7256 time", footnote "In this case, using INFINITY will violate the
7257 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7258 Thus we pedwarn to ensure this constraint violation is
7259 diagnosed. */
7260 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7261 pedwarn (0, "target format does not support infinity");
7263 real_inf (&real);
7264 return build_real (type, real);
7267 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7269 static tree
7270 fold_builtin_nan (tree arg, tree type, int quiet)
7272 REAL_VALUE_TYPE real;
7273 const char *str;
7275 if (!validate_arg (arg, POINTER_TYPE))
7276 return NULL_TREE;
7277 str = c_getstr (arg);
7278 if (!str)
7279 return NULL_TREE;
7281 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7282 return NULL_TREE;
7284 return build_real (type, real);
7287 /* Return true if the floating point expression T has an integer value.
7288 We also allow +Inf, -Inf and NaN to be considered integer values. */
7290 static bool
7291 integer_valued_real_p (tree t)
7293 switch (TREE_CODE (t))
7295 case FLOAT_EXPR:
7296 return true;
7298 case ABS_EXPR:
7299 case SAVE_EXPR:
7300 return integer_valued_real_p (TREE_OPERAND (t, 0));
7302 case COMPOUND_EXPR:
7303 case MODIFY_EXPR:
7304 case BIND_EXPR:
7305 return integer_valued_real_p (TREE_OPERAND (t, 1));
7307 case PLUS_EXPR:
7308 case MINUS_EXPR:
7309 case MULT_EXPR:
7310 case MIN_EXPR:
7311 case MAX_EXPR:
7312 return integer_valued_real_p (TREE_OPERAND (t, 0))
7313 && integer_valued_real_p (TREE_OPERAND (t, 1));
7315 case COND_EXPR:
7316 return integer_valued_real_p (TREE_OPERAND (t, 1))
7317 && integer_valued_real_p (TREE_OPERAND (t, 2));
7319 case REAL_CST:
7320 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7322 case NOP_EXPR:
7324 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7325 if (TREE_CODE (type) == INTEGER_TYPE)
7326 return true;
7327 if (TREE_CODE (type) == REAL_TYPE)
7328 return integer_valued_real_p (TREE_OPERAND (t, 0));
7329 break;
7332 case CALL_EXPR:
7333 switch (builtin_mathfn_code (t))
7335 CASE_FLT_FN (BUILT_IN_CEIL):
7336 CASE_FLT_FN (BUILT_IN_FLOOR):
7337 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7338 CASE_FLT_FN (BUILT_IN_RINT):
7339 CASE_FLT_FN (BUILT_IN_ROUND):
7340 CASE_FLT_FN (BUILT_IN_TRUNC):
7341 return true;
7343 CASE_FLT_FN (BUILT_IN_FMIN):
7344 CASE_FLT_FN (BUILT_IN_FMAX):
7345 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7346 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7348 default:
7349 break;
7351 break;
7353 default:
7354 break;
7356 return false;
7359 /* FNDECL is assumed to be a builtin where truncation can be propagated
7360 across (for instance floor((double)f) == (double)floorf (f).
7361 Do the transformation for a call with argument ARG. */
7363 static tree
7364 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7366 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7368 if (!validate_arg (arg, REAL_TYPE))
7369 return NULL_TREE;
7371 /* Integer rounding functions are idempotent. */
7372 if (fcode == builtin_mathfn_code (arg))
7373 return arg;
7375 /* If argument is already integer valued, and we don't need to worry
7376 about setting errno, there's no need to perform rounding. */
7377 if (! flag_errno_math && integer_valued_real_p (arg))
7378 return arg;
7380 if (optimize)
7382 tree arg0 = strip_float_extensions (arg);
7383 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7384 tree newtype = TREE_TYPE (arg0);
7385 tree decl;
7387 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7388 && (decl = mathfn_built_in (newtype, fcode)))
7389 return fold_convert (ftype,
7390 build_call_expr (decl, 1,
7391 fold_convert (newtype, arg0)));
7393 return NULL_TREE;
7396 /* FNDECL is assumed to be builtin which can narrow the FP type of
7397 the argument, for instance lround((double)f) -> lroundf (f).
7398 Do the transformation for a call with argument ARG. */
7400 static tree
7401 fold_fixed_mathfn (tree fndecl, tree arg)
7403 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7405 if (!validate_arg (arg, REAL_TYPE))
7406 return NULL_TREE;
7408 /* If argument is already integer valued, and we don't need to worry
7409 about setting errno, there's no need to perform rounding. */
7410 if (! flag_errno_math && integer_valued_real_p (arg))
7411 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7413 if (optimize)
7415 tree ftype = TREE_TYPE (arg);
7416 tree arg0 = strip_float_extensions (arg);
7417 tree newtype = TREE_TYPE (arg0);
7418 tree decl;
7420 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7421 && (decl = mathfn_built_in (newtype, fcode)))
7422 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7425 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7426 sizeof (long long) == sizeof (long). */
7427 if (TYPE_PRECISION (long_long_integer_type_node)
7428 == TYPE_PRECISION (long_integer_type_node))
7430 tree newfn = NULL_TREE;
7431 switch (fcode)
7433 CASE_FLT_FN (BUILT_IN_LLCEIL):
7434 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7435 break;
7437 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7438 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7439 break;
7441 CASE_FLT_FN (BUILT_IN_LLROUND):
7442 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7443 break;
7445 CASE_FLT_FN (BUILT_IN_LLRINT):
7446 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7447 break;
7449 default:
7450 break;
7453 if (newfn)
7455 tree newcall = build_call_expr(newfn, 1, arg);
7456 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7460 return NULL_TREE;
7463 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7464 return type. Return NULL_TREE if no simplification can be made. */
7466 static tree
7467 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7469 tree res;
7471 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7472 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7473 return NULL_TREE;
7475 /* Calculate the result when the argument is a constant. */
7476 if (TREE_CODE (arg) == COMPLEX_CST
7477 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7478 type, mpfr_hypot)))
7479 return res;
7481 if (TREE_CODE (arg) == COMPLEX_EXPR)
7483 tree real = TREE_OPERAND (arg, 0);
7484 tree imag = TREE_OPERAND (arg, 1);
7486 /* If either part is zero, cabs is fabs of the other. */
7487 if (real_zerop (real))
7488 return fold_build1 (ABS_EXPR, type, imag);
7489 if (real_zerop (imag))
7490 return fold_build1 (ABS_EXPR, type, real);
7492 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7493 if (flag_unsafe_math_optimizations
7494 && operand_equal_p (real, imag, OEP_PURE_SAME))
7496 const REAL_VALUE_TYPE sqrt2_trunc
7497 = real_value_truncate (TYPE_MODE (type),
7498 *get_real_const (rv_sqrt2));
7499 STRIP_NOPS (real);
7500 return fold_build2 (MULT_EXPR, type,
7501 fold_build1 (ABS_EXPR, type, real),
7502 build_real (type, sqrt2_trunc));
7506 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7507 if (TREE_CODE (arg) == NEGATE_EXPR
7508 || TREE_CODE (arg) == CONJ_EXPR)
7509 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7511 /* Don't do this when optimizing for size. */
7512 if (flag_unsafe_math_optimizations
7513 && optimize && !optimize_size)
7515 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7517 if (sqrtfn != NULL_TREE)
7519 tree rpart, ipart, result;
7521 arg = builtin_save_expr (arg);
7523 rpart = fold_build1 (REALPART_EXPR, type, arg);
7524 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7526 rpart = builtin_save_expr (rpart);
7527 ipart = builtin_save_expr (ipart);
7529 result = fold_build2 (PLUS_EXPR, type,
7530 fold_build2 (MULT_EXPR, type,
7531 rpart, rpart),
7532 fold_build2 (MULT_EXPR, type,
7533 ipart, ipart));
7535 return build_call_expr (sqrtfn, 1, result);
7539 return NULL_TREE;
7542 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7543 Return NULL_TREE if no simplification can be made. */
7545 static tree
7546 fold_builtin_sqrt (tree arg, tree type)
7549 enum built_in_function fcode;
7550 tree res;
7552 if (!validate_arg (arg, REAL_TYPE))
7553 return NULL_TREE;
7555 /* Calculate the result when the argument is a constant. */
7556 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7557 return res;
7559 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7560 fcode = builtin_mathfn_code (arg);
7561 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7563 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7564 arg = fold_build2 (MULT_EXPR, type,
7565 CALL_EXPR_ARG (arg, 0),
7566 build_real (type, dconsthalf));
7567 return build_call_expr (expfn, 1, arg);
7570 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7571 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7573 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7575 if (powfn)
7577 tree arg0 = CALL_EXPR_ARG (arg, 0);
7578 tree tree_root;
7579 /* The inner root was either sqrt or cbrt. */
7580 REAL_VALUE_TYPE dconstroot =
7581 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7583 /* Adjust for the outer root. */
7584 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7585 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7586 tree_root = build_real (type, dconstroot);
7587 return build_call_expr (powfn, 2, arg0, tree_root);
7591 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7592 if (flag_unsafe_math_optimizations
7593 && (fcode == BUILT_IN_POW
7594 || fcode == BUILT_IN_POWF
7595 || fcode == BUILT_IN_POWL))
7597 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7598 tree arg0 = CALL_EXPR_ARG (arg, 0);
7599 tree arg1 = CALL_EXPR_ARG (arg, 1);
7600 tree narg1;
7601 if (!tree_expr_nonnegative_p (arg0))
7602 arg0 = build1 (ABS_EXPR, type, arg0);
7603 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7604 build_real (type, dconsthalf));
7605 return build_call_expr (powfn, 2, arg0, narg1);
7608 return NULL_TREE;
7611 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7612 Return NULL_TREE if no simplification can be made. */
7614 static tree
7615 fold_builtin_cbrt (tree arg, tree type)
7617 const enum built_in_function fcode = builtin_mathfn_code (arg);
7618 tree res;
7620 if (!validate_arg (arg, REAL_TYPE))
7621 return NULL_TREE;
7623 /* Calculate the result when the argument is a constant. */
7624 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7625 return res;
7627 if (flag_unsafe_math_optimizations)
7629 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7630 if (BUILTIN_EXPONENT_P (fcode))
7632 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7633 const REAL_VALUE_TYPE third_trunc =
7634 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7635 arg = fold_build2 (MULT_EXPR, type,
7636 CALL_EXPR_ARG (arg, 0),
7637 build_real (type, third_trunc));
7638 return build_call_expr (expfn, 1, arg);
7641 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7642 if (BUILTIN_SQRT_P (fcode))
7644 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7646 if (powfn)
7648 tree arg0 = CALL_EXPR_ARG (arg, 0);
7649 tree tree_root;
7650 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7652 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7653 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7654 tree_root = build_real (type, dconstroot);
7655 return build_call_expr (powfn, 2, arg0, tree_root);
7659 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7660 if (BUILTIN_CBRT_P (fcode))
7662 tree arg0 = CALL_EXPR_ARG (arg, 0);
7663 if (tree_expr_nonnegative_p (arg0))
7665 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7667 if (powfn)
7669 tree tree_root;
7670 REAL_VALUE_TYPE dconstroot;
7672 real_arithmetic (&dconstroot, MULT_EXPR,
7673 get_real_const (rv_third),
7674 get_real_const (rv_third));
7675 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7676 tree_root = build_real (type, dconstroot);
7677 return build_call_expr (powfn, 2, arg0, tree_root);
7682 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7683 if (fcode == BUILT_IN_POW
7684 || fcode == BUILT_IN_POWF
7685 || fcode == BUILT_IN_POWL)
7687 tree arg00 = CALL_EXPR_ARG (arg, 0);
7688 tree arg01 = CALL_EXPR_ARG (arg, 1);
7689 if (tree_expr_nonnegative_p (arg00))
7691 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7692 const REAL_VALUE_TYPE dconstroot
7693 = real_value_truncate (TYPE_MODE (type),
7694 *get_real_const (rv_third));
7695 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7696 build_real (type, dconstroot));
7697 return build_call_expr (powfn, 2, arg00, narg01);
7701 return NULL_TREE;
7704 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7705 TYPE is the type of the return value. Return NULL_TREE if no
7706 simplification can be made. */
7708 static tree
7709 fold_builtin_cos (tree arg, tree type, tree fndecl)
7711 tree res, narg;
7713 if (!validate_arg (arg, REAL_TYPE))
7714 return NULL_TREE;
7716 /* Calculate the result when the argument is a constant. */
7717 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7718 return res;
7720 /* Optimize cos(-x) into cos (x). */
7721 if ((narg = fold_strip_sign_ops (arg)))
7722 return build_call_expr (fndecl, 1, narg);
7724 return NULL_TREE;
7727 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7728 Return NULL_TREE if no simplification can be made. */
7730 static tree
7731 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7733 if (validate_arg (arg, REAL_TYPE))
7735 tree res, narg;
7737 /* Calculate the result when the argument is a constant. */
7738 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7739 return res;
7741 /* Optimize cosh(-x) into cosh (x). */
7742 if ((narg = fold_strip_sign_ops (arg)))
7743 return build_call_expr (fndecl, 1, narg);
7746 return NULL_TREE;
7749 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7750 Return NULL_TREE if no simplification can be made. */
7752 static tree
7753 fold_builtin_tan (tree arg, tree type)
7755 enum built_in_function fcode;
7756 tree res;
7758 if (!validate_arg (arg, REAL_TYPE))
7759 return NULL_TREE;
7761 /* Calculate the result when the argument is a constant. */
7762 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7763 return res;
7765 /* Optimize tan(atan(x)) = x. */
7766 fcode = builtin_mathfn_code (arg);
7767 if (flag_unsafe_math_optimizations
7768 && (fcode == BUILT_IN_ATAN
7769 || fcode == BUILT_IN_ATANF
7770 || fcode == BUILT_IN_ATANL))
7771 return CALL_EXPR_ARG (arg, 0);
7773 return NULL_TREE;
7776 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7777 NULL_TREE if no simplification can be made. */
7779 static tree
7780 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7782 tree type;
7783 tree res, fn, call;
7785 if (!validate_arg (arg0, REAL_TYPE)
7786 || !validate_arg (arg1, POINTER_TYPE)
7787 || !validate_arg (arg2, POINTER_TYPE))
7788 return NULL_TREE;
7790 type = TREE_TYPE (arg0);
7792 /* Calculate the result when the argument is a constant. */
7793 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7794 return res;
7796 /* Canonicalize sincos to cexpi. */
7797 if (!TARGET_C99_FUNCTIONS)
7798 return NULL_TREE;
7799 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7800 if (!fn)
7801 return NULL_TREE;
7803 call = build_call_expr (fn, 1, arg0);
7804 call = builtin_save_expr (call);
7806 return build2 (COMPOUND_EXPR, type,
7807 build2 (MODIFY_EXPR, void_type_node,
7808 build_fold_indirect_ref (arg1),
7809 build1 (IMAGPART_EXPR, type, call)),
7810 build2 (MODIFY_EXPR, void_type_node,
7811 build_fold_indirect_ref (arg2),
7812 build1 (REALPART_EXPR, type, call)));
7815 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7816 NULL_TREE if no simplification can be made. */
7818 static tree
7819 fold_builtin_cexp (tree arg0, tree type)
7821 tree rtype;
7822 tree realp, imagp, ifn;
7824 if (!validate_arg (arg0, COMPLEX_TYPE))
7825 return NULL_TREE;
7827 rtype = TREE_TYPE (TREE_TYPE (arg0));
7829 /* In case we can figure out the real part of arg0 and it is constant zero
7830 fold to cexpi. */
7831 if (!TARGET_C99_FUNCTIONS)
7832 return NULL_TREE;
7833 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7834 if (!ifn)
7835 return NULL_TREE;
7837 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7838 && real_zerop (realp))
7840 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7841 return build_call_expr (ifn, 1, narg);
7844 /* In case we can easily decompose real and imaginary parts split cexp
7845 to exp (r) * cexpi (i). */
7846 if (flag_unsafe_math_optimizations
7847 && realp)
7849 tree rfn, rcall, icall;
7851 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7852 if (!rfn)
7853 return NULL_TREE;
7855 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7856 if (!imagp)
7857 return NULL_TREE;
7859 icall = build_call_expr (ifn, 1, imagp);
7860 icall = builtin_save_expr (icall);
7861 rcall = build_call_expr (rfn, 1, realp);
7862 rcall = builtin_save_expr (rcall);
7863 return fold_build2 (COMPLEX_EXPR, type,
7864 fold_build2 (MULT_EXPR, rtype,
7865 rcall,
7866 fold_build1 (REALPART_EXPR, rtype, icall)),
7867 fold_build2 (MULT_EXPR, rtype,
7868 rcall,
7869 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7872 return NULL_TREE;
7875 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7876 Return NULL_TREE if no simplification can be made. */
7878 static tree
7879 fold_builtin_trunc (tree fndecl, tree arg)
7881 if (!validate_arg (arg, REAL_TYPE))
7882 return NULL_TREE;
7884 /* Optimize trunc of constant value. */
7885 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7887 REAL_VALUE_TYPE r, x;
7888 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7890 x = TREE_REAL_CST (arg);
7891 real_trunc (&r, TYPE_MODE (type), &x);
7892 return build_real (type, r);
7895 return fold_trunc_transparent_mathfn (fndecl, arg);
7898 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7899 Return NULL_TREE if no simplification can be made. */
7901 static tree
7902 fold_builtin_floor (tree fndecl, tree arg)
7904 if (!validate_arg (arg, REAL_TYPE))
7905 return NULL_TREE;
7907 /* Optimize floor of constant value. */
7908 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7910 REAL_VALUE_TYPE x;
7912 x = TREE_REAL_CST (arg);
7913 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7915 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7916 REAL_VALUE_TYPE r;
7918 real_floor (&r, TYPE_MODE (type), &x);
7919 return build_real (type, r);
7923 /* Fold floor (x) where x is nonnegative to trunc (x). */
7924 if (tree_expr_nonnegative_p (arg))
7926 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7927 if (truncfn)
7928 return build_call_expr (truncfn, 1, arg);
7931 return fold_trunc_transparent_mathfn (fndecl, arg);
7934 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7935 Return NULL_TREE if no simplification can be made. */
7937 static tree
7938 fold_builtin_ceil (tree fndecl, tree arg)
7940 if (!validate_arg (arg, REAL_TYPE))
7941 return NULL_TREE;
7943 /* Optimize ceil of constant value. */
7944 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7946 REAL_VALUE_TYPE x;
7948 x = TREE_REAL_CST (arg);
7949 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7951 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7952 REAL_VALUE_TYPE r;
7954 real_ceil (&r, TYPE_MODE (type), &x);
7955 return build_real (type, r);
7959 return fold_trunc_transparent_mathfn (fndecl, arg);
7962 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7963 Return NULL_TREE if no simplification can be made. */
7965 static tree
7966 fold_builtin_round (tree fndecl, tree arg)
7968 if (!validate_arg (arg, REAL_TYPE))
7969 return NULL_TREE;
7971 /* Optimize round of constant value. */
7972 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7974 REAL_VALUE_TYPE x;
7976 x = TREE_REAL_CST (arg);
7977 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7979 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7980 REAL_VALUE_TYPE r;
7982 real_round (&r, TYPE_MODE (type), &x);
7983 return build_real (type, r);
7987 return fold_trunc_transparent_mathfn (fndecl, arg);
7990 /* Fold function call to builtin lround, lroundf or lroundl (or the
7991 corresponding long long versions) and other rounding functions. ARG
7992 is the argument to the call. Return NULL_TREE if no simplification
7993 can be made. */
7995 static tree
7996 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7998 if (!validate_arg (arg, REAL_TYPE))
7999 return NULL_TREE;
8001 /* Optimize lround of constant value. */
8002 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8004 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8006 if (real_isfinite (&x))
8008 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8009 tree ftype = TREE_TYPE (arg);
8010 unsigned HOST_WIDE_INT lo2;
8011 HOST_WIDE_INT hi, lo;
8012 REAL_VALUE_TYPE r;
8014 switch (DECL_FUNCTION_CODE (fndecl))
8016 CASE_FLT_FN (BUILT_IN_LFLOOR):
8017 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8018 real_floor (&r, TYPE_MODE (ftype), &x);
8019 break;
8021 CASE_FLT_FN (BUILT_IN_LCEIL):
8022 CASE_FLT_FN (BUILT_IN_LLCEIL):
8023 real_ceil (&r, TYPE_MODE (ftype), &x);
8024 break;
8026 CASE_FLT_FN (BUILT_IN_LROUND):
8027 CASE_FLT_FN (BUILT_IN_LLROUND):
8028 real_round (&r, TYPE_MODE (ftype), &x);
8029 break;
8031 default:
8032 gcc_unreachable ();
8035 REAL_VALUE_TO_INT (&lo, &hi, r);
8036 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8037 return build_int_cst_wide (itype, lo2, hi);
8041 switch (DECL_FUNCTION_CODE (fndecl))
8043 CASE_FLT_FN (BUILT_IN_LFLOOR):
8044 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8045 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8046 if (tree_expr_nonnegative_p (arg))
8047 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8048 arg);
8049 break;
8050 default:;
8053 return fold_fixed_mathfn (fndecl, arg);
8056 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8057 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8058 the argument to the call. Return NULL_TREE if no simplification can
8059 be made. */
8061 static tree
8062 fold_builtin_bitop (tree fndecl, tree arg)
8064 if (!validate_arg (arg, INTEGER_TYPE))
8065 return NULL_TREE;
8067 /* Optimize for constant argument. */
8068 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8070 HOST_WIDE_INT hi, width, result;
8071 unsigned HOST_WIDE_INT lo;
8072 tree type;
8074 type = TREE_TYPE (arg);
8075 width = TYPE_PRECISION (type);
8076 lo = TREE_INT_CST_LOW (arg);
8078 /* Clear all the bits that are beyond the type's precision. */
8079 if (width > HOST_BITS_PER_WIDE_INT)
8081 hi = TREE_INT_CST_HIGH (arg);
8082 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8083 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8085 else
8087 hi = 0;
8088 if (width < HOST_BITS_PER_WIDE_INT)
8089 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8092 switch (DECL_FUNCTION_CODE (fndecl))
8094 CASE_INT_FN (BUILT_IN_FFS):
8095 if (lo != 0)
8096 result = exact_log2 (lo & -lo) + 1;
8097 else if (hi != 0)
8098 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8099 else
8100 result = 0;
8101 break;
8103 CASE_INT_FN (BUILT_IN_CLZ):
8104 if (hi != 0)
8105 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8106 else if (lo != 0)
8107 result = width - floor_log2 (lo) - 1;
8108 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8109 result = width;
8110 break;
8112 CASE_INT_FN (BUILT_IN_CTZ):
8113 if (lo != 0)
8114 result = exact_log2 (lo & -lo);
8115 else if (hi != 0)
8116 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8117 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8118 result = width;
8119 break;
8121 CASE_INT_FN (BUILT_IN_POPCOUNT):
8122 result = 0;
8123 while (lo)
8124 result++, lo &= lo - 1;
8125 while (hi)
8126 result++, hi &= hi - 1;
8127 break;
8129 CASE_INT_FN (BUILT_IN_PARITY):
8130 result = 0;
8131 while (lo)
8132 result++, lo &= lo - 1;
8133 while (hi)
8134 result++, hi &= hi - 1;
8135 result &= 1;
8136 break;
8138 default:
8139 gcc_unreachable ();
8142 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8145 return NULL_TREE;
8148 /* Fold function call to builtin_bswap and the long and long long
8149 variants. Return NULL_TREE if no simplification can be made. */
8150 static tree
8151 fold_builtin_bswap (tree fndecl, tree arg)
8153 if (! validate_arg (arg, INTEGER_TYPE))
8154 return NULL_TREE;
8156 /* Optimize constant value. */
8157 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8159 HOST_WIDE_INT hi, width, r_hi = 0;
8160 unsigned HOST_WIDE_INT lo, r_lo = 0;
8161 tree type;
8163 type = TREE_TYPE (arg);
8164 width = TYPE_PRECISION (type);
8165 lo = TREE_INT_CST_LOW (arg);
8166 hi = TREE_INT_CST_HIGH (arg);
8168 switch (DECL_FUNCTION_CODE (fndecl))
8170 case BUILT_IN_BSWAP32:
8171 case BUILT_IN_BSWAP64:
8173 int s;
8175 for (s = 0; s < width; s += 8)
8177 int d = width - s - 8;
8178 unsigned HOST_WIDE_INT byte;
8180 if (s < HOST_BITS_PER_WIDE_INT)
8181 byte = (lo >> s) & 0xff;
8182 else
8183 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8185 if (d < HOST_BITS_PER_WIDE_INT)
8186 r_lo |= byte << d;
8187 else
8188 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8192 break;
8194 default:
8195 gcc_unreachable ();
8198 if (width < HOST_BITS_PER_WIDE_INT)
8199 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8200 else
8201 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8204 return NULL_TREE;
8207 /* Return true if EXPR is the real constant contained in VALUE. */
8209 static bool
8210 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8212 STRIP_NOPS (expr);
8214 return ((TREE_CODE (expr) == REAL_CST
8215 && !TREE_OVERFLOW (expr)
8216 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8217 || (TREE_CODE (expr) == COMPLEX_CST
8218 && real_dconstp (TREE_REALPART (expr), value)
8219 && real_zerop (TREE_IMAGPART (expr))));
8222 /* A subroutine of fold_builtin to fold the various logarithmic
8223 functions. Return NULL_TREE if no simplification can me made.
8224 FUNC is the corresponding MPFR logarithm function. */
8226 static tree
8227 fold_builtin_logarithm (tree fndecl, tree arg,
8228 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8230 if (validate_arg (arg, REAL_TYPE))
8232 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8233 tree res;
8234 const enum built_in_function fcode = builtin_mathfn_code (arg);
8236 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8237 instead we'll look for 'e' truncated to MODE. So only do
8238 this if flag_unsafe_math_optimizations is set. */
8239 if (flag_unsafe_math_optimizations && func == mpfr_log)
8241 const REAL_VALUE_TYPE e_truncated =
8242 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8243 if (real_dconstp (arg, &e_truncated))
8244 return build_real (type, dconst1);
8247 /* Calculate the result when the argument is a constant. */
8248 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8249 return res;
8251 /* Special case, optimize logN(expN(x)) = x. */
8252 if (flag_unsafe_math_optimizations
8253 && ((func == mpfr_log
8254 && (fcode == BUILT_IN_EXP
8255 || fcode == BUILT_IN_EXPF
8256 || fcode == BUILT_IN_EXPL))
8257 || (func == mpfr_log2
8258 && (fcode == BUILT_IN_EXP2
8259 || fcode == BUILT_IN_EXP2F
8260 || fcode == BUILT_IN_EXP2L))
8261 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8262 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8264 /* Optimize logN(func()) for various exponential functions. We
8265 want to determine the value "x" and the power "exponent" in
8266 order to transform logN(x**exponent) into exponent*logN(x). */
8267 if (flag_unsafe_math_optimizations)
8269 tree exponent = 0, x = 0;
8271 switch (fcode)
8273 CASE_FLT_FN (BUILT_IN_EXP):
8274 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8275 x = build_real (type,
8276 real_value_truncate (TYPE_MODE (type),
8277 *get_real_const (rv_e)));
8278 exponent = CALL_EXPR_ARG (arg, 0);
8279 break;
8280 CASE_FLT_FN (BUILT_IN_EXP2):
8281 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8282 x = build_real (type, dconst2);
8283 exponent = CALL_EXPR_ARG (arg, 0);
8284 break;
8285 CASE_FLT_FN (BUILT_IN_EXP10):
8286 CASE_FLT_FN (BUILT_IN_POW10):
8287 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8289 REAL_VALUE_TYPE dconst10;
8290 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8291 x = build_real (type, dconst10);
8293 exponent = CALL_EXPR_ARG (arg, 0);
8294 break;
8295 CASE_FLT_FN (BUILT_IN_SQRT):
8296 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8297 x = CALL_EXPR_ARG (arg, 0);
8298 exponent = build_real (type, dconsthalf);
8299 break;
8300 CASE_FLT_FN (BUILT_IN_CBRT):
8301 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8302 x = CALL_EXPR_ARG (arg, 0);
8303 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8304 *get_real_const (rv_third)));
8305 break;
8306 CASE_FLT_FN (BUILT_IN_POW):
8307 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8308 x = CALL_EXPR_ARG (arg, 0);
8309 exponent = CALL_EXPR_ARG (arg, 1);
8310 break;
8311 default:
8312 break;
8315 /* Now perform the optimization. */
8316 if (x && exponent)
8318 tree logfn = build_call_expr (fndecl, 1, x);
8319 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8324 return NULL_TREE;
8327 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8328 NULL_TREE if no simplification can be made. */
8330 static tree
8331 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8333 tree res, narg0, narg1;
8335 if (!validate_arg (arg0, REAL_TYPE)
8336 || !validate_arg (arg1, REAL_TYPE))
8337 return NULL_TREE;
8339 /* Calculate the result when the argument is a constant. */
8340 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8341 return res;
8343 /* If either argument to hypot has a negate or abs, strip that off.
8344 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8345 narg0 = fold_strip_sign_ops (arg0);
8346 narg1 = fold_strip_sign_ops (arg1);
8347 if (narg0 || narg1)
8349 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8350 narg1 ? narg1 : arg1);
8353 /* If either argument is zero, hypot is fabs of the other. */
8354 if (real_zerop (arg0))
8355 return fold_build1 (ABS_EXPR, type, arg1);
8356 else if (real_zerop (arg1))
8357 return fold_build1 (ABS_EXPR, type, arg0);
8359 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8360 if (flag_unsafe_math_optimizations
8361 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8363 const REAL_VALUE_TYPE sqrt2_trunc
8364 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8365 return fold_build2 (MULT_EXPR, type,
8366 fold_build1 (ABS_EXPR, type, arg0),
8367 build_real (type, sqrt2_trunc));
8370 return NULL_TREE;
8374 /* Fold a builtin function call to pow, powf, or powl. Return
8375 NULL_TREE if no simplification can be made. */
8376 static tree
8377 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8379 tree res;
8381 if (!validate_arg (arg0, REAL_TYPE)
8382 || !validate_arg (arg1, REAL_TYPE))
8383 return NULL_TREE;
8385 /* Calculate the result when the argument is a constant. */
8386 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8387 return res;
8389 /* Optimize pow(1.0,y) = 1.0. */
8390 if (real_onep (arg0))
8391 return omit_one_operand (type, build_real (type, dconst1), arg1);
8393 if (TREE_CODE (arg1) == REAL_CST
8394 && !TREE_OVERFLOW (arg1))
8396 REAL_VALUE_TYPE cint;
8397 REAL_VALUE_TYPE c;
8398 HOST_WIDE_INT n;
8400 c = TREE_REAL_CST (arg1);
8402 /* Optimize pow(x,0.0) = 1.0. */
8403 if (REAL_VALUES_EQUAL (c, dconst0))
8404 return omit_one_operand (type, build_real (type, dconst1),
8405 arg0);
8407 /* Optimize pow(x,1.0) = x. */
8408 if (REAL_VALUES_EQUAL (c, dconst1))
8409 return arg0;
8411 /* Optimize pow(x,-1.0) = 1.0/x. */
8412 if (REAL_VALUES_EQUAL (c, dconstm1))
8413 return fold_build2 (RDIV_EXPR, type,
8414 build_real (type, dconst1), arg0);
8416 /* Optimize pow(x,0.5) = sqrt(x). */
8417 if (flag_unsafe_math_optimizations
8418 && REAL_VALUES_EQUAL (c, dconsthalf))
8420 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8422 if (sqrtfn != NULL_TREE)
8423 return build_call_expr (sqrtfn, 1, arg0);
8426 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8427 if (flag_unsafe_math_optimizations)
8429 const REAL_VALUE_TYPE dconstroot
8430 = real_value_truncate (TYPE_MODE (type),
8431 *get_real_const (rv_third));
8433 if (REAL_VALUES_EQUAL (c, dconstroot))
8435 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8436 if (cbrtfn != NULL_TREE)
8437 return build_call_expr (cbrtfn, 1, arg0);
8441 /* Check for an integer exponent. */
8442 n = real_to_integer (&c);
8443 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8444 if (real_identical (&c, &cint))
8446 /* Attempt to evaluate pow at compile-time. */
8447 if (TREE_CODE (arg0) == REAL_CST
8448 && !TREE_OVERFLOW (arg0))
8450 REAL_VALUE_TYPE x;
8451 bool inexact;
8453 x = TREE_REAL_CST (arg0);
8454 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8455 if (flag_unsafe_math_optimizations || !inexact)
8456 return build_real (type, x);
8459 /* Strip sign ops from even integer powers. */
8460 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8462 tree narg0 = fold_strip_sign_ops (arg0);
8463 if (narg0)
8464 return build_call_expr (fndecl, 2, narg0, arg1);
8469 if (flag_unsafe_math_optimizations)
8471 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8473 /* Optimize pow(expN(x),y) = expN(x*y). */
8474 if (BUILTIN_EXPONENT_P (fcode))
8476 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8477 tree arg = CALL_EXPR_ARG (arg0, 0);
8478 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8479 return build_call_expr (expfn, 1, arg);
8482 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8483 if (BUILTIN_SQRT_P (fcode))
8485 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8486 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8487 build_real (type, dconsthalf));
8488 return build_call_expr (fndecl, 2, narg0, narg1);
8491 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8492 if (BUILTIN_CBRT_P (fcode))
8494 tree arg = CALL_EXPR_ARG (arg0, 0);
8495 if (tree_expr_nonnegative_p (arg))
8497 const REAL_VALUE_TYPE dconstroot
8498 = real_value_truncate (TYPE_MODE (type),
8499 *get_real_const (rv_third));
8500 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8501 build_real (type, dconstroot));
8502 return build_call_expr (fndecl, 2, arg, narg1);
8506 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8507 if (fcode == BUILT_IN_POW
8508 || fcode == BUILT_IN_POWF
8509 || fcode == BUILT_IN_POWL)
8511 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8512 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8513 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8514 return build_call_expr (fndecl, 2, arg00, narg1);
8518 return NULL_TREE;
8521 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8522 Return NULL_TREE if no simplification can be made. */
8523 static tree
8524 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8525 tree arg0, tree arg1, tree type)
8527 if (!validate_arg (arg0, REAL_TYPE)
8528 || !validate_arg (arg1, INTEGER_TYPE))
8529 return NULL_TREE;
8531 /* Optimize pow(1.0,y) = 1.0. */
8532 if (real_onep (arg0))
8533 return omit_one_operand (type, build_real (type, dconst1), arg1);
8535 if (host_integerp (arg1, 0))
8537 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8539 /* Evaluate powi at compile-time. */
8540 if (TREE_CODE (arg0) == REAL_CST
8541 && !TREE_OVERFLOW (arg0))
8543 REAL_VALUE_TYPE x;
8544 x = TREE_REAL_CST (arg0);
8545 real_powi (&x, TYPE_MODE (type), &x, c);
8546 return build_real (type, x);
8549 /* Optimize pow(x,0) = 1.0. */
8550 if (c == 0)
8551 return omit_one_operand (type, build_real (type, dconst1),
8552 arg0);
8554 /* Optimize pow(x,1) = x. */
8555 if (c == 1)
8556 return arg0;
8558 /* Optimize pow(x,-1) = 1.0/x. */
8559 if (c == -1)
8560 return fold_build2 (RDIV_EXPR, type,
8561 build_real (type, dconst1), arg0);
8564 return NULL_TREE;
8567 /* A subroutine of fold_builtin to fold the various exponent
8568 functions. Return NULL_TREE if no simplification can be made.
8569 FUNC is the corresponding MPFR exponent function. */
8571 static tree
8572 fold_builtin_exponent (tree fndecl, tree arg,
8573 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8575 if (validate_arg (arg, REAL_TYPE))
8577 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8578 tree res;
8580 /* Calculate the result when the argument is a constant. */
8581 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8582 return res;
8584 /* Optimize expN(logN(x)) = x. */
8585 if (flag_unsafe_math_optimizations)
8587 const enum built_in_function fcode = builtin_mathfn_code (arg);
8589 if ((func == mpfr_exp
8590 && (fcode == BUILT_IN_LOG
8591 || fcode == BUILT_IN_LOGF
8592 || fcode == BUILT_IN_LOGL))
8593 || (func == mpfr_exp2
8594 && (fcode == BUILT_IN_LOG2
8595 || fcode == BUILT_IN_LOG2F
8596 || fcode == BUILT_IN_LOG2L))
8597 || (func == mpfr_exp10
8598 && (fcode == BUILT_IN_LOG10
8599 || fcode == BUILT_IN_LOG10F
8600 || fcode == BUILT_IN_LOG10L)))
8601 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8605 return NULL_TREE;
8608 /* Return true if VAR is a VAR_DECL or a component thereof. */
8610 static bool
8611 var_decl_component_p (tree var)
8613 tree inner = var;
8614 while (handled_component_p (inner))
8615 inner = TREE_OPERAND (inner, 0);
8616 return SSA_VAR_P (inner);
8619 /* Fold function call to builtin memset. Return
8620 NULL_TREE if no simplification can be made. */
8622 static tree
8623 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8625 tree var, ret;
8626 unsigned HOST_WIDE_INT length, cval;
8628 if (! validate_arg (dest, POINTER_TYPE)
8629 || ! validate_arg (c, INTEGER_TYPE)
8630 || ! validate_arg (len, INTEGER_TYPE))
8631 return NULL_TREE;
8633 if (! host_integerp (len, 1))
8634 return NULL_TREE;
8636 /* If the LEN parameter is zero, return DEST. */
8637 if (integer_zerop (len))
8638 return omit_one_operand (type, dest, c);
8640 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8641 return NULL_TREE;
8643 var = dest;
8644 STRIP_NOPS (var);
8645 if (TREE_CODE (var) != ADDR_EXPR)
8646 return NULL_TREE;
8648 var = TREE_OPERAND (var, 0);
8649 if (TREE_THIS_VOLATILE (var))
8650 return NULL_TREE;
8652 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8653 && !POINTER_TYPE_P (TREE_TYPE (var)))
8654 return NULL_TREE;
8656 if (! var_decl_component_p (var))
8657 return NULL_TREE;
8659 length = tree_low_cst (len, 1);
8660 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8661 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8662 < (int) length)
8663 return NULL_TREE;
8665 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8666 return NULL_TREE;
8668 if (integer_zerop (c))
8669 cval = 0;
8670 else
8672 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8673 return NULL_TREE;
8675 cval = tree_low_cst (c, 1);
8676 cval &= 0xff;
8677 cval |= cval << 8;
8678 cval |= cval << 16;
8679 cval |= (cval << 31) << 1;
8682 ret = build_int_cst_type (TREE_TYPE (var), cval);
8683 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8684 if (ignore)
8685 return ret;
8687 return omit_one_operand (type, dest, ret);
8690 /* Fold function call to builtin memset. Return
8691 NULL_TREE if no simplification can be made. */
8693 static tree
8694 fold_builtin_bzero (tree dest, tree size, bool ignore)
8696 if (! validate_arg (dest, POINTER_TYPE)
8697 || ! validate_arg (size, INTEGER_TYPE))
8698 return NULL_TREE;
8700 if (!ignore)
8701 return NULL_TREE;
8703 /* New argument list transforming bzero(ptr x, int y) to
8704 memset(ptr x, int 0, size_t y). This is done this way
8705 so that if it isn't expanded inline, we fallback to
8706 calling bzero instead of memset. */
8708 return fold_builtin_memset (dest, integer_zero_node,
8709 fold_convert (sizetype, size),
8710 void_type_node, ignore);
8713 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8714 NULL_TREE if no simplification can be made.
8715 If ENDP is 0, return DEST (like memcpy).
8716 If ENDP is 1, return DEST+LEN (like mempcpy).
8717 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8718 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8719 (memmove). */
8721 static tree
8722 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8724 tree destvar, srcvar, expr;
8726 if (! validate_arg (dest, POINTER_TYPE)
8727 || ! validate_arg (src, POINTER_TYPE)
8728 || ! validate_arg (len, INTEGER_TYPE))
8729 return NULL_TREE;
8731 /* If the LEN parameter is zero, return DEST. */
8732 if (integer_zerop (len))
8733 return omit_one_operand (type, dest, src);
8735 /* If SRC and DEST are the same (and not volatile), return
8736 DEST{,+LEN,+LEN-1}. */
8737 if (operand_equal_p (src, dest, 0))
8738 expr = len;
8739 else
8741 tree srctype, desttype;
8742 if (endp == 3)
8744 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8745 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8747 /* Both DEST and SRC must be pointer types.
8748 ??? This is what old code did. Is the testing for pointer types
8749 really mandatory?
8751 If either SRC is readonly or length is 1, we can use memcpy. */
8752 if (dest_align && src_align
8753 && (readonly_data_expr (src)
8754 || (host_integerp (len, 1)
8755 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8756 tree_low_cst (len, 1)))))
8758 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8759 if (!fn)
8760 return NULL_TREE;
8761 return build_call_expr (fn, 3, dest, src, len);
8763 return NULL_TREE;
8766 if (!host_integerp (len, 0))
8767 return NULL_TREE;
8768 /* FIXME:
8769 This logic lose for arguments like (type *)malloc (sizeof (type)),
8770 since we strip the casts of up to VOID return value from malloc.
8771 Perhaps we ought to inherit type from non-VOID argument here? */
8772 STRIP_NOPS (src);
8773 STRIP_NOPS (dest);
8774 srctype = TREE_TYPE (TREE_TYPE (src));
8775 desttype = TREE_TYPE (TREE_TYPE (dest));
8776 if (!srctype || !desttype
8777 || !TYPE_SIZE_UNIT (srctype)
8778 || !TYPE_SIZE_UNIT (desttype)
8779 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8780 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8781 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8782 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8783 return NULL_TREE;
8785 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8786 < (int) TYPE_ALIGN (desttype)
8787 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8788 < (int) TYPE_ALIGN (srctype)))
8789 return NULL_TREE;
8791 if (!ignore)
8792 dest = builtin_save_expr (dest);
8794 srcvar = build_fold_indirect_ref (src);
8795 if (TREE_THIS_VOLATILE (srcvar))
8796 return NULL_TREE;
8797 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8798 return NULL_TREE;
8799 /* With memcpy, it is possible to bypass aliasing rules, so without
8800 this check i.e. execute/20060930-2.c would be misoptimized, because
8801 it use conflicting alias set to hold argument for the memcpy call.
8802 This check is probably unnecessary with -fno-strict-aliasing.
8803 Similarly for destvar. See also PR29286. */
8804 if (!var_decl_component_p (srcvar)
8805 /* Accept: memcpy (*char_var, "test", 1); that simplify
8806 to char_var='t'; */
8807 || is_gimple_min_invariant (srcvar)
8808 || readonly_data_expr (src))
8809 return NULL_TREE;
8811 destvar = build_fold_indirect_ref (dest);
8812 if (TREE_THIS_VOLATILE (destvar))
8813 return NULL_TREE;
8814 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8815 return NULL_TREE;
8816 if (!var_decl_component_p (destvar))
8817 return NULL_TREE;
8819 if (srctype == desttype
8820 || (gimple_in_ssa_p (cfun)
8821 && useless_type_conversion_p (desttype, srctype)))
8822 expr = srcvar;
8823 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8824 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8825 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8826 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8827 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8828 else
8829 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8830 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8833 if (ignore)
8834 return expr;
8836 if (endp == 0 || endp == 3)
8837 return omit_one_operand (type, dest, expr);
8839 if (expr == len)
8840 expr = NULL_TREE;
8842 if (endp == 2)
8843 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8844 ssize_int (1));
8846 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8847 dest = fold_convert (type, dest);
8848 if (expr)
8849 dest = omit_one_operand (type, dest, expr);
8850 return dest;
8853 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8854 If LEN is not NULL, it represents the length of the string to be
8855 copied. Return NULL_TREE if no simplification can be made. */
8857 tree
8858 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8860 tree fn;
8862 if (!validate_arg (dest, POINTER_TYPE)
8863 || !validate_arg (src, POINTER_TYPE))
8864 return NULL_TREE;
8866 /* If SRC and DEST are the same (and not volatile), return DEST. */
8867 if (operand_equal_p (src, dest, 0))
8868 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8870 if (optimize_size)
8871 return NULL_TREE;
8873 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8874 if (!fn)
8875 return NULL_TREE;
8877 if (!len)
8879 len = c_strlen (src, 1);
8880 if (! len || TREE_SIDE_EFFECTS (len))
8881 return NULL_TREE;
8884 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8885 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8886 build_call_expr (fn, 3, dest, src, len));
8889 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8890 If SLEN is not NULL, it represents the length of the source string.
8891 Return NULL_TREE if no simplification can be made. */
8893 tree
8894 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8896 tree fn;
8898 if (!validate_arg (dest, POINTER_TYPE)
8899 || !validate_arg (src, POINTER_TYPE)
8900 || !validate_arg (len, INTEGER_TYPE))
8901 return NULL_TREE;
8903 /* If the LEN parameter is zero, return DEST. */
8904 if (integer_zerop (len))
8905 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8907 /* We can't compare slen with len as constants below if len is not a
8908 constant. */
8909 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8910 return NULL_TREE;
8912 if (!slen)
8913 slen = c_strlen (src, 1);
8915 /* Now, we must be passed a constant src ptr parameter. */
8916 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8917 return NULL_TREE;
8919 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8921 /* We do not support simplification of this case, though we do
8922 support it when expanding trees into RTL. */
8923 /* FIXME: generate a call to __builtin_memset. */
8924 if (tree_int_cst_lt (slen, len))
8925 return NULL_TREE;
8927 /* OK transform into builtin memcpy. */
8928 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8929 if (!fn)
8930 return NULL_TREE;
8931 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8932 build_call_expr (fn, 3, dest, src, len));
8935 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8936 arguments to the call, and TYPE is its return type.
8937 Return NULL_TREE if no simplification can be made. */
8939 static tree
8940 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8942 if (!validate_arg (arg1, POINTER_TYPE)
8943 || !validate_arg (arg2, INTEGER_TYPE)
8944 || !validate_arg (len, INTEGER_TYPE))
8945 return NULL_TREE;
8946 else
8948 const char *p1;
8950 if (TREE_CODE (arg2) != INTEGER_CST
8951 || !host_integerp (len, 1))
8952 return NULL_TREE;
8954 p1 = c_getstr (arg1);
8955 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8957 char c;
8958 const char *r;
8959 tree tem;
8961 if (target_char_cast (arg2, &c))
8962 return NULL_TREE;
8964 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8966 if (r == NULL)
8967 return build_int_cst (TREE_TYPE (arg1), 0);
8969 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8970 size_int (r - p1));
8971 return fold_convert (type, tem);
8973 return NULL_TREE;
8977 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8978 Return NULL_TREE if no simplification can be made. */
8980 static tree
8981 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8983 const char *p1, *p2;
8985 if (!validate_arg (arg1, POINTER_TYPE)
8986 || !validate_arg (arg2, POINTER_TYPE)
8987 || !validate_arg (len, INTEGER_TYPE))
8988 return NULL_TREE;
8990 /* If the LEN parameter is zero, return zero. */
8991 if (integer_zerop (len))
8992 return omit_two_operands (integer_type_node, integer_zero_node,
8993 arg1, arg2);
8995 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8996 if (operand_equal_p (arg1, arg2, 0))
8997 return omit_one_operand (integer_type_node, integer_zero_node, len);
8999 p1 = c_getstr (arg1);
9000 p2 = c_getstr (arg2);
9002 /* If all arguments are constant, and the value of len is not greater
9003 than the lengths of arg1 and arg2, evaluate at compile-time. */
9004 if (host_integerp (len, 1) && p1 && p2
9005 && compare_tree_int (len, strlen (p1) + 1) <= 0
9006 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9008 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9010 if (r > 0)
9011 return integer_one_node;
9012 else if (r < 0)
9013 return integer_minus_one_node;
9014 else
9015 return integer_zero_node;
9018 /* If len parameter is one, return an expression corresponding to
9019 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9020 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9022 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9023 tree cst_uchar_ptr_node
9024 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9026 tree ind1 = fold_convert (integer_type_node,
9027 build1 (INDIRECT_REF, cst_uchar_node,
9028 fold_convert (cst_uchar_ptr_node,
9029 arg1)));
9030 tree ind2 = fold_convert (integer_type_node,
9031 build1 (INDIRECT_REF, cst_uchar_node,
9032 fold_convert (cst_uchar_ptr_node,
9033 arg2)));
9034 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9037 return NULL_TREE;
9040 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9041 Return NULL_TREE if no simplification can be made. */
9043 static tree
9044 fold_builtin_strcmp (tree arg1, tree arg2)
9046 const char *p1, *p2;
9048 if (!validate_arg (arg1, POINTER_TYPE)
9049 || !validate_arg (arg2, POINTER_TYPE))
9050 return NULL_TREE;
9052 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9053 if (operand_equal_p (arg1, arg2, 0))
9054 return integer_zero_node;
9056 p1 = c_getstr (arg1);
9057 p2 = c_getstr (arg2);
9059 if (p1 && p2)
9061 const int i = strcmp (p1, p2);
9062 if (i < 0)
9063 return integer_minus_one_node;
9064 else if (i > 0)
9065 return integer_one_node;
9066 else
9067 return integer_zero_node;
9070 /* If the second arg is "", return *(const unsigned char*)arg1. */
9071 if (p2 && *p2 == '\0')
9073 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9074 tree cst_uchar_ptr_node
9075 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9077 return fold_convert (integer_type_node,
9078 build1 (INDIRECT_REF, cst_uchar_node,
9079 fold_convert (cst_uchar_ptr_node,
9080 arg1)));
9083 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9084 if (p1 && *p1 == '\0')
9086 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9087 tree cst_uchar_ptr_node
9088 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9090 tree temp = fold_convert (integer_type_node,
9091 build1 (INDIRECT_REF, cst_uchar_node,
9092 fold_convert (cst_uchar_ptr_node,
9093 arg2)));
9094 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9097 return NULL_TREE;
9100 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9101 Return NULL_TREE if no simplification can be made. */
9103 static tree
9104 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9106 const char *p1, *p2;
9108 if (!validate_arg (arg1, POINTER_TYPE)
9109 || !validate_arg (arg2, POINTER_TYPE)
9110 || !validate_arg (len, INTEGER_TYPE))
9111 return NULL_TREE;
9113 /* If the LEN parameter is zero, return zero. */
9114 if (integer_zerop (len))
9115 return omit_two_operands (integer_type_node, integer_zero_node,
9116 arg1, arg2);
9118 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9119 if (operand_equal_p (arg1, arg2, 0))
9120 return omit_one_operand (integer_type_node, integer_zero_node, len);
9122 p1 = c_getstr (arg1);
9123 p2 = c_getstr (arg2);
9125 if (host_integerp (len, 1) && p1 && p2)
9127 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9128 if (i > 0)
9129 return integer_one_node;
9130 else if (i < 0)
9131 return integer_minus_one_node;
9132 else
9133 return integer_zero_node;
9136 /* If the second arg is "", and the length is greater than zero,
9137 return *(const unsigned char*)arg1. */
9138 if (p2 && *p2 == '\0'
9139 && TREE_CODE (len) == INTEGER_CST
9140 && tree_int_cst_sgn (len) == 1)
9142 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9143 tree cst_uchar_ptr_node
9144 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9146 return fold_convert (integer_type_node,
9147 build1 (INDIRECT_REF, cst_uchar_node,
9148 fold_convert (cst_uchar_ptr_node,
9149 arg1)));
9152 /* If the first arg is "", and the length is greater than zero,
9153 return -*(const unsigned char*)arg2. */
9154 if (p1 && *p1 == '\0'
9155 && TREE_CODE (len) == INTEGER_CST
9156 && tree_int_cst_sgn (len) == 1)
9158 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9159 tree cst_uchar_ptr_node
9160 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9162 tree temp = fold_convert (integer_type_node,
9163 build1 (INDIRECT_REF, cst_uchar_node,
9164 fold_convert (cst_uchar_ptr_node,
9165 arg2)));
9166 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9169 /* If len parameter is one, return an expression corresponding to
9170 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9171 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9173 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9174 tree cst_uchar_ptr_node
9175 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9177 tree ind1 = fold_convert (integer_type_node,
9178 build1 (INDIRECT_REF, cst_uchar_node,
9179 fold_convert (cst_uchar_ptr_node,
9180 arg1)));
9181 tree ind2 = fold_convert (integer_type_node,
9182 build1 (INDIRECT_REF, cst_uchar_node,
9183 fold_convert (cst_uchar_ptr_node,
9184 arg2)));
9185 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9188 return NULL_TREE;
9191 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9192 ARG. Return NULL_TREE if no simplification can be made. */
9194 static tree
9195 fold_builtin_signbit (tree arg, tree type)
9197 tree temp;
9199 if (!validate_arg (arg, REAL_TYPE))
9200 return NULL_TREE;
9202 /* If ARG is a compile-time constant, determine the result. */
9203 if (TREE_CODE (arg) == REAL_CST
9204 && !TREE_OVERFLOW (arg))
9206 REAL_VALUE_TYPE c;
9208 c = TREE_REAL_CST (arg);
9209 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9210 return fold_convert (type, temp);
9213 /* If ARG is non-negative, the result is always zero. */
9214 if (tree_expr_nonnegative_p (arg))
9215 return omit_one_operand (type, integer_zero_node, arg);
9217 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9218 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9219 return fold_build2 (LT_EXPR, type, arg,
9220 build_real (TREE_TYPE (arg), dconst0));
9222 return NULL_TREE;
9225 /* Fold function call to builtin copysign, copysignf or copysignl with
9226 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9227 be made. */
9229 static tree
9230 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9232 tree tem;
9234 if (!validate_arg (arg1, REAL_TYPE)
9235 || !validate_arg (arg2, REAL_TYPE))
9236 return NULL_TREE;
9238 /* copysign(X,X) is X. */
9239 if (operand_equal_p (arg1, arg2, 0))
9240 return fold_convert (type, arg1);
9242 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9243 if (TREE_CODE (arg1) == REAL_CST
9244 && TREE_CODE (arg2) == REAL_CST
9245 && !TREE_OVERFLOW (arg1)
9246 && !TREE_OVERFLOW (arg2))
9248 REAL_VALUE_TYPE c1, c2;
9250 c1 = TREE_REAL_CST (arg1);
9251 c2 = TREE_REAL_CST (arg2);
9252 /* c1.sign := c2.sign. */
9253 real_copysign (&c1, &c2);
9254 return build_real (type, c1);
9257 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9258 Remember to evaluate Y for side-effects. */
9259 if (tree_expr_nonnegative_p (arg2))
9260 return omit_one_operand (type,
9261 fold_build1 (ABS_EXPR, type, arg1),
9262 arg2);
9264 /* Strip sign changing operations for the first argument. */
9265 tem = fold_strip_sign_ops (arg1);
9266 if (tem)
9267 return build_call_expr (fndecl, 2, tem, arg2);
9269 return NULL_TREE;
9272 /* Fold a call to builtin isascii with argument ARG. */
9274 static tree
9275 fold_builtin_isascii (tree arg)
9277 if (!validate_arg (arg, INTEGER_TYPE))
9278 return NULL_TREE;
9279 else
9281 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9282 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9283 build_int_cst (NULL_TREE,
9284 ~ (unsigned HOST_WIDE_INT) 0x7f));
9285 return fold_build2 (EQ_EXPR, integer_type_node,
9286 arg, integer_zero_node);
9290 /* Fold a call to builtin toascii with argument ARG. */
9292 static tree
9293 fold_builtin_toascii (tree arg)
9295 if (!validate_arg (arg, INTEGER_TYPE))
9296 return NULL_TREE;
9298 /* Transform toascii(c) -> (c & 0x7f). */
9299 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9300 build_int_cst (NULL_TREE, 0x7f));
9303 /* Fold a call to builtin isdigit with argument ARG. */
9305 static tree
9306 fold_builtin_isdigit (tree arg)
9308 if (!validate_arg (arg, INTEGER_TYPE))
9309 return NULL_TREE;
9310 else
9312 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9313 /* According to the C standard, isdigit is unaffected by locale.
9314 However, it definitely is affected by the target character set. */
9315 unsigned HOST_WIDE_INT target_digit0
9316 = lang_hooks.to_target_charset ('0');
9318 if (target_digit0 == 0)
9319 return NULL_TREE;
9321 arg = fold_convert (unsigned_type_node, arg);
9322 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9323 build_int_cst (unsigned_type_node, target_digit0));
9324 return fold_build2 (LE_EXPR, integer_type_node, arg,
9325 build_int_cst (unsigned_type_node, 9));
9329 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9331 static tree
9332 fold_builtin_fabs (tree arg, tree type)
9334 if (!validate_arg (arg, REAL_TYPE))
9335 return NULL_TREE;
9337 arg = fold_convert (type, arg);
9338 if (TREE_CODE (arg) == REAL_CST)
9339 return fold_abs_const (arg, type);
9340 return fold_build1 (ABS_EXPR, type, arg);
9343 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9345 static tree
9346 fold_builtin_abs (tree arg, tree type)
9348 if (!validate_arg (arg, INTEGER_TYPE))
9349 return NULL_TREE;
9351 arg = fold_convert (type, arg);
9352 if (TREE_CODE (arg) == INTEGER_CST)
9353 return fold_abs_const (arg, type);
9354 return fold_build1 (ABS_EXPR, type, arg);
9357 /* Fold a call to builtin fmin or fmax. */
9359 static tree
9360 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9362 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9364 /* Calculate the result when the argument is a constant. */
9365 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9367 if (res)
9368 return res;
9370 /* If either argument is NaN, return the other one. Avoid the
9371 transformation if we get (and honor) a signalling NaN. Using
9372 omit_one_operand() ensures we create a non-lvalue. */
9373 if (TREE_CODE (arg0) == REAL_CST
9374 && real_isnan (&TREE_REAL_CST (arg0))
9375 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9376 || ! TREE_REAL_CST (arg0).signalling))
9377 return omit_one_operand (type, arg1, arg0);
9378 if (TREE_CODE (arg1) == REAL_CST
9379 && real_isnan (&TREE_REAL_CST (arg1))
9380 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9381 || ! TREE_REAL_CST (arg1).signalling))
9382 return omit_one_operand (type, arg0, arg1);
9384 /* Transform fmin/fmax(x,x) -> x. */
9385 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9386 return omit_one_operand (type, arg0, arg1);
9388 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9389 functions to return the numeric arg if the other one is NaN.
9390 These tree codes don't honor that, so only transform if
9391 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9392 handled, so we don't have to worry about it either. */
9393 if (flag_finite_math_only)
9394 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9395 fold_convert (type, arg0),
9396 fold_convert (type, arg1));
9398 return NULL_TREE;
9401 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9403 static tree
9404 fold_builtin_carg (tree arg, tree type)
9406 if (validate_arg (arg, COMPLEX_TYPE))
9408 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9410 if (atan2_fn)
9412 tree new_arg = builtin_save_expr (arg);
9413 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9414 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9415 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9419 return NULL_TREE;
9422 /* Fold a call to builtin logb/ilogb. */
9424 static tree
9425 fold_builtin_logb (tree arg, tree rettype)
9427 if (! validate_arg (arg, REAL_TYPE))
9428 return NULL_TREE;
9430 STRIP_NOPS (arg);
9432 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9434 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9436 switch (value->cl)
9438 case rvc_nan:
9439 case rvc_inf:
9440 /* If arg is Inf or NaN and we're logb, return it. */
9441 if (TREE_CODE (rettype) == REAL_TYPE)
9442 return fold_convert (rettype, arg);
9443 /* Fall through... */
9444 case rvc_zero:
9445 /* Zero may set errno and/or raise an exception for logb, also
9446 for ilogb we don't know FP_ILOGB0. */
9447 return NULL_TREE;
9448 case rvc_normal:
9449 /* For normal numbers, proceed iff radix == 2. In GCC,
9450 normalized significands are in the range [0.5, 1.0). We
9451 want the exponent as if they were [1.0, 2.0) so get the
9452 exponent and subtract 1. */
9453 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9454 return fold_convert (rettype, build_int_cst (NULL_TREE,
9455 REAL_EXP (value)-1));
9456 break;
9460 return NULL_TREE;
9463 /* Fold a call to builtin significand, if radix == 2. */
9465 static tree
9466 fold_builtin_significand (tree arg, tree rettype)
9468 if (! validate_arg (arg, REAL_TYPE))
9469 return NULL_TREE;
9471 STRIP_NOPS (arg);
9473 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9475 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9477 switch (value->cl)
9479 case rvc_zero:
9480 case rvc_nan:
9481 case rvc_inf:
9482 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9483 return fold_convert (rettype, arg);
9484 case rvc_normal:
9485 /* For normal numbers, proceed iff radix == 2. */
9486 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9488 REAL_VALUE_TYPE result = *value;
9489 /* In GCC, normalized significands are in the range [0.5,
9490 1.0). We want them to be [1.0, 2.0) so set the
9491 exponent to 1. */
9492 SET_REAL_EXP (&result, 1);
9493 return build_real (rettype, result);
9495 break;
9499 return NULL_TREE;
9502 /* Fold a call to builtin frexp, we can assume the base is 2. */
9504 static tree
9505 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9507 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9508 return NULL_TREE;
9510 STRIP_NOPS (arg0);
9512 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9513 return NULL_TREE;
9515 arg1 = build_fold_indirect_ref (arg1);
9517 /* Proceed if a valid pointer type was passed in. */
9518 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9520 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9521 tree frac, exp;
9523 switch (value->cl)
9525 case rvc_zero:
9526 /* For +-0, return (*exp = 0, +-0). */
9527 exp = integer_zero_node;
9528 frac = arg0;
9529 break;
9530 case rvc_nan:
9531 case rvc_inf:
9532 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9533 return omit_one_operand (rettype, arg0, arg1);
9534 case rvc_normal:
9536 /* Since the frexp function always expects base 2, and in
9537 GCC normalized significands are already in the range
9538 [0.5, 1.0), we have exactly what frexp wants. */
9539 REAL_VALUE_TYPE frac_rvt = *value;
9540 SET_REAL_EXP (&frac_rvt, 0);
9541 frac = build_real (rettype, frac_rvt);
9542 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9544 break;
9545 default:
9546 gcc_unreachable ();
9549 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9550 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9551 TREE_SIDE_EFFECTS (arg1) = 1;
9552 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9555 return NULL_TREE;
9558 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9559 then we can assume the base is two. If it's false, then we have to
9560 check the mode of the TYPE parameter in certain cases. */
9562 static tree
9563 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9565 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9567 STRIP_NOPS (arg0);
9568 STRIP_NOPS (arg1);
9570 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9571 if (real_zerop (arg0) || integer_zerop (arg1)
9572 || (TREE_CODE (arg0) == REAL_CST
9573 && !real_isfinite (&TREE_REAL_CST (arg0))))
9574 return omit_one_operand (type, arg0, arg1);
9576 /* If both arguments are constant, then try to evaluate it. */
9577 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9578 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9579 && host_integerp (arg1, 0))
9581 /* Bound the maximum adjustment to twice the range of the
9582 mode's valid exponents. Use abs to ensure the range is
9583 positive as a sanity check. */
9584 const long max_exp_adj = 2 *
9585 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9586 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9588 /* Get the user-requested adjustment. */
9589 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9591 /* The requested adjustment must be inside this range. This
9592 is a preliminary cap to avoid things like overflow, we
9593 may still fail to compute the result for other reasons. */
9594 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9596 REAL_VALUE_TYPE initial_result;
9598 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9600 /* Ensure we didn't overflow. */
9601 if (! real_isinf (&initial_result))
9603 const REAL_VALUE_TYPE trunc_result
9604 = real_value_truncate (TYPE_MODE (type), initial_result);
9606 /* Only proceed if the target mode can hold the
9607 resulting value. */
9608 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9609 return build_real (type, trunc_result);
9615 return NULL_TREE;
9618 /* Fold a call to builtin modf. */
9620 static tree
9621 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9623 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9624 return NULL_TREE;
9626 STRIP_NOPS (arg0);
9628 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9629 return NULL_TREE;
9631 arg1 = build_fold_indirect_ref (arg1);
9633 /* Proceed if a valid pointer type was passed in. */
9634 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9636 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9637 REAL_VALUE_TYPE trunc, frac;
9639 switch (value->cl)
9641 case rvc_nan:
9642 case rvc_zero:
9643 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9644 trunc = frac = *value;
9645 break;
9646 case rvc_inf:
9647 /* For +-Inf, return (*arg1 = arg0, +-0). */
9648 frac = dconst0;
9649 frac.sign = value->sign;
9650 trunc = *value;
9651 break;
9652 case rvc_normal:
9653 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9654 real_trunc (&trunc, VOIDmode, value);
9655 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9656 /* If the original number was negative and already
9657 integral, then the fractional part is -0.0. */
9658 if (value->sign && frac.cl == rvc_zero)
9659 frac.sign = value->sign;
9660 break;
9663 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9664 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9665 build_real (rettype, trunc));
9666 TREE_SIDE_EFFECTS (arg1) = 1;
9667 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9668 build_real (rettype, frac));
9671 return NULL_TREE;
9674 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9675 ARG is the argument for the call. */
9677 static tree
9678 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9680 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9681 REAL_VALUE_TYPE r;
9683 if (!validate_arg (arg, REAL_TYPE))
9684 return NULL_TREE;
9686 switch (builtin_index)
9688 case BUILT_IN_ISINF:
9689 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9690 return omit_one_operand (type, integer_zero_node, arg);
9692 if (TREE_CODE (arg) == REAL_CST)
9694 r = TREE_REAL_CST (arg);
9695 if (real_isinf (&r))
9696 return real_compare (GT_EXPR, &r, &dconst0)
9697 ? integer_one_node : integer_minus_one_node;
9698 else
9699 return integer_zero_node;
9702 return NULL_TREE;
9704 case BUILT_IN_ISINF_SIGN:
9706 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9707 /* In a boolean context, GCC will fold the inner COND_EXPR to
9708 1. So e.g. "if (isinf_sign(x))" would be folded to just
9709 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9710 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9711 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9712 tree tmp = NULL_TREE;
9714 arg = builtin_save_expr (arg);
9716 if (signbit_fn && isinf_fn)
9718 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9719 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9721 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9722 signbit_call, integer_zero_node);
9723 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9724 isinf_call, integer_zero_node);
9726 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9727 integer_minus_one_node, integer_one_node);
9728 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9729 integer_zero_node);
9732 return tmp;
9735 case BUILT_IN_ISFINITE:
9736 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9737 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9738 return omit_one_operand (type, integer_one_node, arg);
9740 if (TREE_CODE (arg) == REAL_CST)
9742 r = TREE_REAL_CST (arg);
9743 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9746 return NULL_TREE;
9748 case BUILT_IN_ISNAN:
9749 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9750 return omit_one_operand (type, integer_zero_node, arg);
9752 if (TREE_CODE (arg) == REAL_CST)
9754 r = TREE_REAL_CST (arg);
9755 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9758 arg = builtin_save_expr (arg);
9759 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9761 default:
9762 gcc_unreachable ();
9766 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9767 This builtin will generate code to return the appropriate floating
9768 point classification depending on the value of the floating point
9769 number passed in. The possible return values must be supplied as
9770 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9771 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9772 one floating point argument which is "type generic". */
9774 static tree
9775 fold_builtin_fpclassify (tree exp)
9777 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9778 arg, type, res, tmp;
9779 enum machine_mode mode;
9780 REAL_VALUE_TYPE r;
9781 char buf[128];
9783 /* Verify the required arguments in the original call. */
9784 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9785 INTEGER_TYPE, INTEGER_TYPE,
9786 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9787 return NULL_TREE;
9789 fp_nan = CALL_EXPR_ARG (exp, 0);
9790 fp_infinite = CALL_EXPR_ARG (exp, 1);
9791 fp_normal = CALL_EXPR_ARG (exp, 2);
9792 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9793 fp_zero = CALL_EXPR_ARG (exp, 4);
9794 arg = CALL_EXPR_ARG (exp, 5);
9795 type = TREE_TYPE (arg);
9796 mode = TYPE_MODE (type);
9797 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9799 /* fpclassify(x) ->
9800 isnan(x) ? FP_NAN :
9801 (fabs(x) == Inf ? FP_INFINITE :
9802 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9803 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9805 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9806 build_real (type, dconst0));
9807 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9809 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9810 real_from_string (&r, buf);
9811 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9812 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9814 if (HONOR_INFINITIES (mode))
9816 real_inf (&r);
9817 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9818 build_real (type, r));
9819 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9822 if (HONOR_NANS (mode))
9824 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9825 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9828 return res;
9831 /* Fold a call to an unordered comparison function such as
9832 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9833 being called and ARG0 and ARG1 are the arguments for the call.
9834 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9835 the opposite of the desired result. UNORDERED_CODE is used
9836 for modes that can hold NaNs and ORDERED_CODE is used for
9837 the rest. */
9839 static tree
9840 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9841 enum tree_code unordered_code,
9842 enum tree_code ordered_code)
9844 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9845 enum tree_code code;
9846 tree type0, type1;
9847 enum tree_code code0, code1;
9848 tree cmp_type = NULL_TREE;
9850 type0 = TREE_TYPE (arg0);
9851 type1 = TREE_TYPE (arg1);
9853 code0 = TREE_CODE (type0);
9854 code1 = TREE_CODE (type1);
9856 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9857 /* Choose the wider of two real types. */
9858 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9859 ? type0 : type1;
9860 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9861 cmp_type = type0;
9862 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9863 cmp_type = type1;
9865 arg0 = fold_convert (cmp_type, arg0);
9866 arg1 = fold_convert (cmp_type, arg1);
9868 if (unordered_code == UNORDERED_EXPR)
9870 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9871 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9872 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9875 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9876 : ordered_code;
9877 return fold_build1 (TRUTH_NOT_EXPR, type,
9878 fold_build2 (code, type, arg0, arg1));
9881 /* Fold a call to built-in function FNDECL with 0 arguments.
9882 IGNORE is true if the result of the function call is ignored. This
9883 function returns NULL_TREE if no simplification was possible. */
9885 static tree
9886 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9888 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9889 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9890 switch (fcode)
9892 CASE_FLT_FN (BUILT_IN_INF):
9893 case BUILT_IN_INFD32:
9894 case BUILT_IN_INFD64:
9895 case BUILT_IN_INFD128:
9896 return fold_builtin_inf (type, true);
9898 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9899 return fold_builtin_inf (type, false);
9901 case BUILT_IN_CLASSIFY_TYPE:
9902 return fold_builtin_classify_type (NULL_TREE);
9904 default:
9905 break;
9907 return NULL_TREE;
9910 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9911 IGNORE is true if the result of the function call is ignored. This
9912 function returns NULL_TREE if no simplification was possible. */
9914 static tree
9915 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9917 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9918 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9919 switch (fcode)
9922 case BUILT_IN_CONSTANT_P:
9924 tree val = fold_builtin_constant_p (arg0);
9926 /* Gimplification will pull the CALL_EXPR for the builtin out of
9927 an if condition. When not optimizing, we'll not CSE it back.
9928 To avoid link error types of regressions, return false now. */
9929 if (!val && !optimize)
9930 val = integer_zero_node;
9932 return val;
9935 case BUILT_IN_CLASSIFY_TYPE:
9936 return fold_builtin_classify_type (arg0);
9938 case BUILT_IN_STRLEN:
9939 return fold_builtin_strlen (arg0);
9941 CASE_FLT_FN (BUILT_IN_FABS):
9942 return fold_builtin_fabs (arg0, type);
9944 case BUILT_IN_ABS:
9945 case BUILT_IN_LABS:
9946 case BUILT_IN_LLABS:
9947 case BUILT_IN_IMAXABS:
9948 return fold_builtin_abs (arg0, type);
9950 CASE_FLT_FN (BUILT_IN_CONJ):
9951 if (validate_arg (arg0, COMPLEX_TYPE))
9952 return fold_build1 (CONJ_EXPR, type, arg0);
9953 break;
9955 CASE_FLT_FN (BUILT_IN_CREAL):
9956 if (validate_arg (arg0, COMPLEX_TYPE))
9957 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9958 break;
9960 CASE_FLT_FN (BUILT_IN_CIMAG):
9961 if (validate_arg (arg0, COMPLEX_TYPE))
9962 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9963 break;
9965 CASE_FLT_FN (BUILT_IN_CCOS):
9966 CASE_FLT_FN (BUILT_IN_CCOSH):
9967 /* These functions are "even", i.e. f(x) == f(-x). */
9968 if (validate_arg (arg0, COMPLEX_TYPE))
9970 tree narg = fold_strip_sign_ops (arg0);
9971 if (narg)
9972 return build_call_expr (fndecl, 1, narg);
9974 break;
9976 CASE_FLT_FN (BUILT_IN_CABS):
9977 return fold_builtin_cabs (arg0, type, fndecl);
9979 CASE_FLT_FN (BUILT_IN_CARG):
9980 return fold_builtin_carg (arg0, type);
9982 CASE_FLT_FN (BUILT_IN_SQRT):
9983 return fold_builtin_sqrt (arg0, type);
9985 CASE_FLT_FN (BUILT_IN_CBRT):
9986 return fold_builtin_cbrt (arg0, type);
9988 CASE_FLT_FN (BUILT_IN_ASIN):
9989 if (validate_arg (arg0, REAL_TYPE))
9990 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9991 &dconstm1, &dconst1, true);
9992 break;
9994 CASE_FLT_FN (BUILT_IN_ACOS):
9995 if (validate_arg (arg0, REAL_TYPE))
9996 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9997 &dconstm1, &dconst1, true);
9998 break;
10000 CASE_FLT_FN (BUILT_IN_ATAN):
10001 if (validate_arg (arg0, REAL_TYPE))
10002 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10003 break;
10005 CASE_FLT_FN (BUILT_IN_ASINH):
10006 if (validate_arg (arg0, REAL_TYPE))
10007 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10008 break;
10010 CASE_FLT_FN (BUILT_IN_ACOSH):
10011 if (validate_arg (arg0, REAL_TYPE))
10012 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10013 &dconst1, NULL, true);
10014 break;
10016 CASE_FLT_FN (BUILT_IN_ATANH):
10017 if (validate_arg (arg0, REAL_TYPE))
10018 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10019 &dconstm1, &dconst1, false);
10020 break;
10022 CASE_FLT_FN (BUILT_IN_SIN):
10023 if (validate_arg (arg0, REAL_TYPE))
10024 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10025 break;
10027 CASE_FLT_FN (BUILT_IN_COS):
10028 return fold_builtin_cos (arg0, type, fndecl);
10029 break;
10031 CASE_FLT_FN (BUILT_IN_TAN):
10032 return fold_builtin_tan (arg0, type);
10034 CASE_FLT_FN (BUILT_IN_CEXP):
10035 return fold_builtin_cexp (arg0, type);
10037 CASE_FLT_FN (BUILT_IN_CEXPI):
10038 if (validate_arg (arg0, REAL_TYPE))
10039 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10040 break;
10042 CASE_FLT_FN (BUILT_IN_SINH):
10043 if (validate_arg (arg0, REAL_TYPE))
10044 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10045 break;
10047 CASE_FLT_FN (BUILT_IN_COSH):
10048 return fold_builtin_cosh (arg0, type, fndecl);
10050 CASE_FLT_FN (BUILT_IN_TANH):
10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10053 break;
10055 CASE_FLT_FN (BUILT_IN_ERF):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10058 break;
10060 CASE_FLT_FN (BUILT_IN_ERFC):
10061 if (validate_arg (arg0, REAL_TYPE))
10062 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10063 break;
10065 CASE_FLT_FN (BUILT_IN_TGAMMA):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10068 break;
10070 CASE_FLT_FN (BUILT_IN_EXP):
10071 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10073 CASE_FLT_FN (BUILT_IN_EXP2):
10074 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10076 CASE_FLT_FN (BUILT_IN_EXP10):
10077 CASE_FLT_FN (BUILT_IN_POW10):
10078 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10080 CASE_FLT_FN (BUILT_IN_EXPM1):
10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10083 break;
10085 CASE_FLT_FN (BUILT_IN_LOG):
10086 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10088 CASE_FLT_FN (BUILT_IN_LOG2):
10089 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10091 CASE_FLT_FN (BUILT_IN_LOG10):
10092 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10094 CASE_FLT_FN (BUILT_IN_LOG1P):
10095 if (validate_arg (arg0, REAL_TYPE))
10096 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10097 &dconstm1, NULL, false);
10098 break;
10100 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10101 CASE_FLT_FN (BUILT_IN_J0):
10102 if (validate_arg (arg0, REAL_TYPE))
10103 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10104 NULL, NULL, 0);
10105 break;
10107 CASE_FLT_FN (BUILT_IN_J1):
10108 if (validate_arg (arg0, REAL_TYPE))
10109 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10110 NULL, NULL, 0);
10111 break;
10113 CASE_FLT_FN (BUILT_IN_Y0):
10114 if (validate_arg (arg0, REAL_TYPE))
10115 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10116 &dconst0, NULL, false);
10117 break;
10119 CASE_FLT_FN (BUILT_IN_Y1):
10120 if (validate_arg (arg0, REAL_TYPE))
10121 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10122 &dconst0, NULL, false);
10123 break;
10124 #endif
10126 CASE_FLT_FN (BUILT_IN_NAN):
10127 case BUILT_IN_NAND32:
10128 case BUILT_IN_NAND64:
10129 case BUILT_IN_NAND128:
10130 return fold_builtin_nan (arg0, type, true);
10132 CASE_FLT_FN (BUILT_IN_NANS):
10133 return fold_builtin_nan (arg0, type, false);
10135 CASE_FLT_FN (BUILT_IN_FLOOR):
10136 return fold_builtin_floor (fndecl, arg0);
10138 CASE_FLT_FN (BUILT_IN_CEIL):
10139 return fold_builtin_ceil (fndecl, arg0);
10141 CASE_FLT_FN (BUILT_IN_TRUNC):
10142 return fold_builtin_trunc (fndecl, arg0);
10144 CASE_FLT_FN (BUILT_IN_ROUND):
10145 return fold_builtin_round (fndecl, arg0);
10147 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10148 CASE_FLT_FN (BUILT_IN_RINT):
10149 return fold_trunc_transparent_mathfn (fndecl, arg0);
10151 CASE_FLT_FN (BUILT_IN_LCEIL):
10152 CASE_FLT_FN (BUILT_IN_LLCEIL):
10153 CASE_FLT_FN (BUILT_IN_LFLOOR):
10154 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10155 CASE_FLT_FN (BUILT_IN_LROUND):
10156 CASE_FLT_FN (BUILT_IN_LLROUND):
10157 return fold_builtin_int_roundingfn (fndecl, arg0);
10159 CASE_FLT_FN (BUILT_IN_LRINT):
10160 CASE_FLT_FN (BUILT_IN_LLRINT):
10161 return fold_fixed_mathfn (fndecl, arg0);
10163 case BUILT_IN_BSWAP32:
10164 case BUILT_IN_BSWAP64:
10165 return fold_builtin_bswap (fndecl, arg0);
10167 CASE_INT_FN (BUILT_IN_FFS):
10168 CASE_INT_FN (BUILT_IN_CLZ):
10169 CASE_INT_FN (BUILT_IN_CTZ):
10170 CASE_INT_FN (BUILT_IN_POPCOUNT):
10171 CASE_INT_FN (BUILT_IN_PARITY):
10172 return fold_builtin_bitop (fndecl, arg0);
10174 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10175 return fold_builtin_signbit (arg0, type);
10177 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10178 return fold_builtin_significand (arg0, type);
10180 CASE_FLT_FN (BUILT_IN_ILOGB):
10181 CASE_FLT_FN (BUILT_IN_LOGB):
10182 return fold_builtin_logb (arg0, type);
10184 case BUILT_IN_ISASCII:
10185 return fold_builtin_isascii (arg0);
10187 case BUILT_IN_TOASCII:
10188 return fold_builtin_toascii (arg0);
10190 case BUILT_IN_ISDIGIT:
10191 return fold_builtin_isdigit (arg0);
10193 CASE_FLT_FN (BUILT_IN_FINITE):
10194 case BUILT_IN_FINITED32:
10195 case BUILT_IN_FINITED64:
10196 case BUILT_IN_FINITED128:
10197 case BUILT_IN_ISFINITE:
10198 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10200 CASE_FLT_FN (BUILT_IN_ISINF):
10201 case BUILT_IN_ISINFD32:
10202 case BUILT_IN_ISINFD64:
10203 case BUILT_IN_ISINFD128:
10204 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10206 case BUILT_IN_ISINF_SIGN:
10207 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10209 CASE_FLT_FN (BUILT_IN_ISNAN):
10210 case BUILT_IN_ISNAND32:
10211 case BUILT_IN_ISNAND64:
10212 case BUILT_IN_ISNAND128:
10213 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10215 case BUILT_IN_PRINTF:
10216 case BUILT_IN_PRINTF_UNLOCKED:
10217 case BUILT_IN_VPRINTF:
10218 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10220 default:
10221 break;
10224 return NULL_TREE;
10228 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10229 IGNORE is true if the result of the function call is ignored. This
10230 function returns NULL_TREE if no simplification was possible. */
10232 static tree
10233 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10235 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10236 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10238 switch (fcode)
10240 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10241 CASE_FLT_FN (BUILT_IN_JN):
10242 if (validate_arg (arg0, INTEGER_TYPE)
10243 && validate_arg (arg1, REAL_TYPE))
10244 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10245 break;
10247 CASE_FLT_FN (BUILT_IN_YN):
10248 if (validate_arg (arg0, INTEGER_TYPE)
10249 && validate_arg (arg1, REAL_TYPE))
10250 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10251 &dconst0, false);
10252 break;
10254 CASE_FLT_FN (BUILT_IN_DREM):
10255 CASE_FLT_FN (BUILT_IN_REMAINDER):
10256 if (validate_arg (arg0, REAL_TYPE)
10257 && validate_arg(arg1, REAL_TYPE))
10258 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10259 break;
10261 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10262 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10263 if (validate_arg (arg0, REAL_TYPE)
10264 && validate_arg(arg1, POINTER_TYPE))
10265 return do_mpfr_lgamma_r (arg0, arg1, type);
10266 break;
10267 #endif
10269 CASE_FLT_FN (BUILT_IN_ATAN2):
10270 if (validate_arg (arg0, REAL_TYPE)
10271 && validate_arg(arg1, REAL_TYPE))
10272 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10273 break;
10275 CASE_FLT_FN (BUILT_IN_FDIM):
10276 if (validate_arg (arg0, REAL_TYPE)
10277 && validate_arg(arg1, REAL_TYPE))
10278 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10279 break;
10281 CASE_FLT_FN (BUILT_IN_HYPOT):
10282 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10284 CASE_FLT_FN (BUILT_IN_LDEXP):
10285 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10286 CASE_FLT_FN (BUILT_IN_SCALBN):
10287 CASE_FLT_FN (BUILT_IN_SCALBLN):
10288 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10290 CASE_FLT_FN (BUILT_IN_FREXP):
10291 return fold_builtin_frexp (arg0, arg1, type);
10293 CASE_FLT_FN (BUILT_IN_MODF):
10294 return fold_builtin_modf (arg0, arg1, type);
10296 case BUILT_IN_BZERO:
10297 return fold_builtin_bzero (arg0, arg1, ignore);
10299 case BUILT_IN_FPUTS:
10300 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10302 case BUILT_IN_FPUTS_UNLOCKED:
10303 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10305 case BUILT_IN_STRSTR:
10306 return fold_builtin_strstr (arg0, arg1, type);
10308 case BUILT_IN_STRCAT:
10309 return fold_builtin_strcat (arg0, arg1);
10311 case BUILT_IN_STRSPN:
10312 return fold_builtin_strspn (arg0, arg1);
10314 case BUILT_IN_STRCSPN:
10315 return fold_builtin_strcspn (arg0, arg1);
10317 case BUILT_IN_STRCHR:
10318 case BUILT_IN_INDEX:
10319 return fold_builtin_strchr (arg0, arg1, type);
10321 case BUILT_IN_STRRCHR:
10322 case BUILT_IN_RINDEX:
10323 return fold_builtin_strrchr (arg0, arg1, type);
10325 case BUILT_IN_STRCPY:
10326 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10328 case BUILT_IN_STRCMP:
10329 return fold_builtin_strcmp (arg0, arg1);
10331 case BUILT_IN_STRPBRK:
10332 return fold_builtin_strpbrk (arg0, arg1, type);
10334 case BUILT_IN_EXPECT:
10335 return fold_builtin_expect (arg0, arg1);
10337 CASE_FLT_FN (BUILT_IN_POW):
10338 return fold_builtin_pow (fndecl, arg0, arg1, type);
10340 CASE_FLT_FN (BUILT_IN_POWI):
10341 return fold_builtin_powi (fndecl, arg0, arg1, type);
10343 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10344 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10346 CASE_FLT_FN (BUILT_IN_FMIN):
10347 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10349 CASE_FLT_FN (BUILT_IN_FMAX):
10350 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10352 case BUILT_IN_ISGREATER:
10353 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10354 case BUILT_IN_ISGREATEREQUAL:
10355 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10356 case BUILT_IN_ISLESS:
10357 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10358 case BUILT_IN_ISLESSEQUAL:
10359 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10360 case BUILT_IN_ISLESSGREATER:
10361 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10362 case BUILT_IN_ISUNORDERED:
10363 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10364 NOP_EXPR);
10366 /* We do the folding for va_start in the expander. */
10367 case BUILT_IN_VA_START:
10368 break;
10370 case BUILT_IN_SPRINTF:
10371 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10373 case BUILT_IN_OBJECT_SIZE:
10374 return fold_builtin_object_size (arg0, arg1);
10376 case BUILT_IN_PRINTF:
10377 case BUILT_IN_PRINTF_UNLOCKED:
10378 case BUILT_IN_VPRINTF:
10379 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10381 case BUILT_IN_PRINTF_CHK:
10382 case BUILT_IN_VPRINTF_CHK:
10383 if (!validate_arg (arg0, INTEGER_TYPE)
10384 || TREE_SIDE_EFFECTS (arg0))
10385 return NULL_TREE;
10386 else
10387 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10388 break;
10390 case BUILT_IN_FPRINTF:
10391 case BUILT_IN_FPRINTF_UNLOCKED:
10392 case BUILT_IN_VFPRINTF:
10393 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10394 ignore, fcode);
10396 default:
10397 break;
10399 return NULL_TREE;
10402 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10403 and ARG2. IGNORE is true if the result of the function call is ignored.
10404 This function returns NULL_TREE if no simplification was possible. */
10406 static tree
10407 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10409 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10410 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10411 switch (fcode)
10414 CASE_FLT_FN (BUILT_IN_SINCOS):
10415 return fold_builtin_sincos (arg0, arg1, arg2);
10417 CASE_FLT_FN (BUILT_IN_FMA):
10418 if (validate_arg (arg0, REAL_TYPE)
10419 && validate_arg(arg1, REAL_TYPE)
10420 && validate_arg(arg2, REAL_TYPE))
10421 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10422 break;
10424 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10425 CASE_FLT_FN (BUILT_IN_REMQUO):
10426 if (validate_arg (arg0, REAL_TYPE)
10427 && validate_arg(arg1, REAL_TYPE)
10428 && validate_arg(arg2, POINTER_TYPE))
10429 return do_mpfr_remquo (arg0, arg1, arg2);
10430 break;
10431 #endif
10433 case BUILT_IN_MEMSET:
10434 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10436 case BUILT_IN_BCOPY:
10437 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10439 case BUILT_IN_MEMCPY:
10440 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10442 case BUILT_IN_MEMPCPY:
10443 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10445 case BUILT_IN_MEMMOVE:
10446 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10448 case BUILT_IN_STRNCAT:
10449 return fold_builtin_strncat (arg0, arg1, arg2);
10451 case BUILT_IN_STRNCPY:
10452 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10454 case BUILT_IN_STRNCMP:
10455 return fold_builtin_strncmp (arg0, arg1, arg2);
10457 case BUILT_IN_MEMCHR:
10458 return fold_builtin_memchr (arg0, arg1, arg2, type);
10460 case BUILT_IN_BCMP:
10461 case BUILT_IN_MEMCMP:
10462 return fold_builtin_memcmp (arg0, arg1, arg2);;
10464 case BUILT_IN_SPRINTF:
10465 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10467 case BUILT_IN_STRCPY_CHK:
10468 case BUILT_IN_STPCPY_CHK:
10469 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10470 ignore, fcode);
10472 case BUILT_IN_STRCAT_CHK:
10473 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10475 case BUILT_IN_PRINTF_CHK:
10476 case BUILT_IN_VPRINTF_CHK:
10477 if (!validate_arg (arg0, INTEGER_TYPE)
10478 || TREE_SIDE_EFFECTS (arg0))
10479 return NULL_TREE;
10480 else
10481 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10482 break;
10484 case BUILT_IN_FPRINTF:
10485 case BUILT_IN_FPRINTF_UNLOCKED:
10486 case BUILT_IN_VFPRINTF:
10487 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10489 case BUILT_IN_FPRINTF_CHK:
10490 case BUILT_IN_VFPRINTF_CHK:
10491 if (!validate_arg (arg1, INTEGER_TYPE)
10492 || TREE_SIDE_EFFECTS (arg1))
10493 return NULL_TREE;
10494 else
10495 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10496 ignore, fcode);
10498 default:
10499 break;
10501 return NULL_TREE;
10504 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10505 ARG2, and ARG3. IGNORE is true if the result of the function call is
10506 ignored. This function returns NULL_TREE if no simplification was
10507 possible. */
10509 static tree
10510 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10511 bool ignore)
10513 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10515 switch (fcode)
10517 case BUILT_IN_MEMCPY_CHK:
10518 case BUILT_IN_MEMPCPY_CHK:
10519 case BUILT_IN_MEMMOVE_CHK:
10520 case BUILT_IN_MEMSET_CHK:
10521 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10522 NULL_TREE, ignore,
10523 DECL_FUNCTION_CODE (fndecl));
10525 case BUILT_IN_STRNCPY_CHK:
10526 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10528 case BUILT_IN_STRNCAT_CHK:
10529 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10531 case BUILT_IN_FPRINTF_CHK:
10532 case BUILT_IN_VFPRINTF_CHK:
10533 if (!validate_arg (arg1, INTEGER_TYPE)
10534 || TREE_SIDE_EFFECTS (arg1))
10535 return NULL_TREE;
10536 else
10537 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10538 ignore, fcode);
10539 break;
10541 default:
10542 break;
10544 return NULL_TREE;
10547 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10548 arguments, where NARGS <= 4. IGNORE is true if the result of the
10549 function call is ignored. This function returns NULL_TREE if no
10550 simplification was possible. Note that this only folds builtins with
10551 fixed argument patterns. Foldings that do varargs-to-varargs
10552 transformations, or that match calls with more than 4 arguments,
10553 need to be handled with fold_builtin_varargs instead. */
10555 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10557 static tree
10558 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10560 tree ret = NULL_TREE;
10562 switch (nargs)
10564 case 0:
10565 ret = fold_builtin_0 (fndecl, ignore);
10566 break;
10567 case 1:
10568 ret = fold_builtin_1 (fndecl, args[0], ignore);
10569 break;
10570 case 2:
10571 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10572 break;
10573 case 3:
10574 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10575 break;
10576 case 4:
10577 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10578 ignore);
10579 break;
10580 default:
10581 break;
10583 if (ret)
10585 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10586 TREE_NO_WARNING (ret) = 1;
10587 return ret;
10589 return NULL_TREE;
10592 /* Builtins with folding operations that operate on "..." arguments
10593 need special handling; we need to store the arguments in a convenient
10594 data structure before attempting any folding. Fortunately there are
10595 only a few builtins that fall into this category. FNDECL is the
10596 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10597 result of the function call is ignored. */
10599 static tree
10600 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10602 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10603 tree ret = NULL_TREE;
10605 switch (fcode)
10607 case BUILT_IN_SPRINTF_CHK:
10608 case BUILT_IN_VSPRINTF_CHK:
10609 ret = fold_builtin_sprintf_chk (exp, fcode);
10610 break;
10612 case BUILT_IN_SNPRINTF_CHK:
10613 case BUILT_IN_VSNPRINTF_CHK:
10614 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10615 break;
10617 case BUILT_IN_FPCLASSIFY:
10618 ret = fold_builtin_fpclassify (exp);
10619 break;
10621 default:
10622 break;
10624 if (ret)
10626 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10627 TREE_NO_WARNING (ret) = 1;
10628 return ret;
10630 return NULL_TREE;
10633 /* A wrapper function for builtin folding that prevents warnings for
10634 "statement without effect" and the like, caused by removing the
10635 call node earlier than the warning is generated. */
10637 tree
10638 fold_call_expr (tree exp, bool ignore)
10640 tree ret = NULL_TREE;
10641 tree fndecl = get_callee_fndecl (exp);
10642 if (fndecl
10643 && TREE_CODE (fndecl) == FUNCTION_DECL
10644 && DECL_BUILT_IN (fndecl)
10645 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10646 yet. Defer folding until we see all the arguments
10647 (after inlining). */
10648 && !CALL_EXPR_VA_ARG_PACK (exp))
10650 int nargs = call_expr_nargs (exp);
10652 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10653 instead last argument is __builtin_va_arg_pack (). Defer folding
10654 even in that case, until arguments are finalized. */
10655 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10657 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10658 if (fndecl2
10659 && TREE_CODE (fndecl2) == FUNCTION_DECL
10660 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10661 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10662 return NULL_TREE;
10665 /* FIXME: Don't use a list in this interface. */
10666 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10667 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10668 else
10670 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10672 tree *args = CALL_EXPR_ARGP (exp);
10673 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10675 if (!ret)
10676 ret = fold_builtin_varargs (fndecl, exp, ignore);
10677 if (ret)
10679 /* Propagate location information from original call to
10680 expansion of builtin. Otherwise things like
10681 maybe_emit_chk_warning, that operate on the expansion
10682 of a builtin, will use the wrong location information. */
10683 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10685 tree realret = ret;
10686 if (TREE_CODE (ret) == NOP_EXPR)
10687 realret = TREE_OPERAND (ret, 0);
10688 if (CAN_HAVE_LOCATION_P (realret)
10689 && !EXPR_HAS_LOCATION (realret))
10690 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10691 return realret;
10693 return ret;
10697 return NULL_TREE;
10700 /* Conveniently construct a function call expression. FNDECL names the
10701 function to be called and ARGLIST is a TREE_LIST of arguments. */
10703 tree
10704 build_function_call_expr (tree fndecl, tree arglist)
10706 tree fntype = TREE_TYPE (fndecl);
10707 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10708 int n = list_length (arglist);
10709 tree *argarray = (tree *) alloca (n * sizeof (tree));
10710 int i;
10712 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10713 argarray[i] = TREE_VALUE (arglist);
10714 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10717 /* Conveniently construct a function call expression. FNDECL names the
10718 function to be called, N is the number of arguments, and the "..."
10719 parameters are the argument expressions. */
10721 tree
10722 build_call_expr (tree fndecl, int n, ...)
10724 va_list ap;
10725 tree fntype = TREE_TYPE (fndecl);
10726 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10727 tree *argarray = (tree *) alloca (n * sizeof (tree));
10728 int i;
10730 va_start (ap, n);
10731 for (i = 0; i < n; i++)
10732 argarray[i] = va_arg (ap, tree);
10733 va_end (ap);
10734 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10737 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10738 N arguments are passed in the array ARGARRAY. */
10740 tree
10741 fold_builtin_call_array (tree type,
10742 tree fn,
10743 int n,
10744 tree *argarray)
10746 tree ret = NULL_TREE;
10747 int i;
10748 tree exp;
10750 if (TREE_CODE (fn) == ADDR_EXPR)
10752 tree fndecl = TREE_OPERAND (fn, 0);
10753 if (TREE_CODE (fndecl) == FUNCTION_DECL
10754 && DECL_BUILT_IN (fndecl))
10756 /* If last argument is __builtin_va_arg_pack (), arguments to this
10757 function are not finalized yet. Defer folding until they are. */
10758 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10760 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10761 if (fndecl2
10762 && TREE_CODE (fndecl2) == FUNCTION_DECL
10763 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10764 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10765 return build_call_array (type, fn, n, argarray);
10767 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10769 tree arglist = NULL_TREE;
10770 for (i = n - 1; i >= 0; i--)
10771 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10772 ret = targetm.fold_builtin (fndecl, arglist, false);
10773 if (ret)
10774 return ret;
10776 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10778 /* First try the transformations that don't require consing up
10779 an exp. */
10780 ret = fold_builtin_n (fndecl, argarray, n, false);
10781 if (ret)
10782 return ret;
10785 /* If we got this far, we need to build an exp. */
10786 exp = build_call_array (type, fn, n, argarray);
10787 ret = fold_builtin_varargs (fndecl, exp, false);
10788 return ret ? ret : exp;
10792 return build_call_array (type, fn, n, argarray);
10795 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10796 along with N new arguments specified as the "..." parameters. SKIP
10797 is the number of arguments in EXP to be omitted. This function is used
10798 to do varargs-to-varargs transformations. */
10800 static tree
10801 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10803 int oldnargs = call_expr_nargs (exp);
10804 int nargs = oldnargs - skip + n;
10805 tree fntype = TREE_TYPE (fndecl);
10806 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10807 tree *buffer;
10809 if (n > 0)
10811 int i, j;
10812 va_list ap;
10814 buffer = XALLOCAVEC (tree, nargs);
10815 va_start (ap, n);
10816 for (i = 0; i < n; i++)
10817 buffer[i] = va_arg (ap, tree);
10818 va_end (ap);
10819 for (j = skip; j < oldnargs; j++, i++)
10820 buffer[i] = CALL_EXPR_ARG (exp, j);
10822 else
10823 buffer = CALL_EXPR_ARGP (exp) + skip;
10825 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10828 /* Validate a single argument ARG against a tree code CODE representing
10829 a type. */
10831 static bool
10832 validate_arg (const_tree arg, enum tree_code code)
10834 if (!arg)
10835 return false;
10836 else if (code == POINTER_TYPE)
10837 return POINTER_TYPE_P (TREE_TYPE (arg));
10838 else if (code == INTEGER_TYPE)
10839 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10840 return code == TREE_CODE (TREE_TYPE (arg));
10843 /* This function validates the types of a function call argument list
10844 against a specified list of tree_codes. If the last specifier is a 0,
10845 that represents an ellipses, otherwise the last specifier must be a
10846 VOID_TYPE.
10848 This is the GIMPLE version of validate_arglist. Eventually we want to
10849 completely convert builtins.c to work from GIMPLEs and the tree based
10850 validate_arglist will then be removed. */
10852 bool
10853 validate_gimple_arglist (const_gimple call, ...)
10855 enum tree_code code;
10856 bool res = 0;
10857 va_list ap;
10858 const_tree arg;
10859 size_t i;
10861 va_start (ap, call);
10862 i = 0;
10866 code = va_arg (ap, enum tree_code);
10867 switch (code)
10869 case 0:
10870 /* This signifies an ellipses, any further arguments are all ok. */
10871 res = true;
10872 goto end;
10873 case VOID_TYPE:
10874 /* This signifies an endlink, if no arguments remain, return
10875 true, otherwise return false. */
10876 res = (i == gimple_call_num_args (call));
10877 goto end;
10878 default:
10879 /* If no parameters remain or the parameter's code does not
10880 match the specified code, return false. Otherwise continue
10881 checking any remaining arguments. */
10882 arg = gimple_call_arg (call, i++);
10883 if (!validate_arg (arg, code))
10884 goto end;
10885 break;
10888 while (1);
10890 /* We need gotos here since we can only have one VA_CLOSE in a
10891 function. */
10892 end: ;
10893 va_end (ap);
10895 return res;
10898 /* This function validates the types of a function call argument list
10899 against a specified list of tree_codes. If the last specifier is a 0,
10900 that represents an ellipses, otherwise the last specifier must be a
10901 VOID_TYPE. */
10903 bool
10904 validate_arglist (const_tree callexpr, ...)
10906 enum tree_code code;
10907 bool res = 0;
10908 va_list ap;
10909 const_call_expr_arg_iterator iter;
10910 const_tree arg;
10912 va_start (ap, callexpr);
10913 init_const_call_expr_arg_iterator (callexpr, &iter);
10917 code = va_arg (ap, enum tree_code);
10918 switch (code)
10920 case 0:
10921 /* This signifies an ellipses, any further arguments are all ok. */
10922 res = true;
10923 goto end;
10924 case VOID_TYPE:
10925 /* This signifies an endlink, if no arguments remain, return
10926 true, otherwise return false. */
10927 res = !more_const_call_expr_args_p (&iter);
10928 goto end;
10929 default:
10930 /* If no parameters remain or the parameter's code does not
10931 match the specified code, return false. Otherwise continue
10932 checking any remaining arguments. */
10933 arg = next_const_call_expr_arg (&iter);
10934 if (!validate_arg (arg, code))
10935 goto end;
10936 break;
10939 while (1);
10941 /* We need gotos here since we can only have one VA_CLOSE in a
10942 function. */
10943 end: ;
10944 va_end (ap);
10946 return res;
10949 /* Default target-specific builtin expander that does nothing. */
10952 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10953 rtx target ATTRIBUTE_UNUSED,
10954 rtx subtarget ATTRIBUTE_UNUSED,
10955 enum machine_mode mode ATTRIBUTE_UNUSED,
10956 int ignore ATTRIBUTE_UNUSED)
10958 return NULL_RTX;
10961 /* Returns true is EXP represents data that would potentially reside
10962 in a readonly section. */
10964 static bool
10965 readonly_data_expr (tree exp)
10967 STRIP_NOPS (exp);
10969 if (TREE_CODE (exp) != ADDR_EXPR)
10970 return false;
10972 exp = get_base_address (TREE_OPERAND (exp, 0));
10973 if (!exp)
10974 return false;
10976 /* Make sure we call decl_readonly_section only for trees it
10977 can handle (since it returns true for everything it doesn't
10978 understand). */
10979 if (TREE_CODE (exp) == STRING_CST
10980 || TREE_CODE (exp) == CONSTRUCTOR
10981 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10982 return decl_readonly_section (exp, 0);
10983 else
10984 return false;
10987 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10988 to the call, and TYPE is its return type.
10990 Return NULL_TREE if no simplification was possible, otherwise return the
10991 simplified form of the call as a tree.
10993 The simplified form may be a constant or other expression which
10994 computes the same value, but in a more efficient manner (including
10995 calls to other builtin functions).
10997 The call may contain arguments which need to be evaluated, but
10998 which are not useful to determine the result of the call. In
10999 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11000 COMPOUND_EXPR will be an argument which must be evaluated.
11001 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11002 COMPOUND_EXPR in the chain will contain the tree for the simplified
11003 form of the builtin function call. */
11005 static tree
11006 fold_builtin_strstr (tree s1, tree s2, tree type)
11008 if (!validate_arg (s1, POINTER_TYPE)
11009 || !validate_arg (s2, POINTER_TYPE))
11010 return NULL_TREE;
11011 else
11013 tree fn;
11014 const char *p1, *p2;
11016 p2 = c_getstr (s2);
11017 if (p2 == NULL)
11018 return NULL_TREE;
11020 p1 = c_getstr (s1);
11021 if (p1 != NULL)
11023 const char *r = strstr (p1, p2);
11024 tree tem;
11026 if (r == NULL)
11027 return build_int_cst (TREE_TYPE (s1), 0);
11029 /* Return an offset into the constant string argument. */
11030 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11031 s1, size_int (r - p1));
11032 return fold_convert (type, tem);
11035 /* The argument is const char *, and the result is char *, so we need
11036 a type conversion here to avoid a warning. */
11037 if (p2[0] == '\0')
11038 return fold_convert (type, s1);
11040 if (p2[1] != '\0')
11041 return NULL_TREE;
11043 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11044 if (!fn)
11045 return NULL_TREE;
11047 /* New argument list transforming strstr(s1, s2) to
11048 strchr(s1, s2[0]). */
11049 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11053 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11054 the call, and TYPE is its return type.
11056 Return NULL_TREE if no simplification was possible, otherwise return the
11057 simplified form of the call as a tree.
11059 The simplified form may be a constant or other expression which
11060 computes the same value, but in a more efficient manner (including
11061 calls to other builtin functions).
11063 The call may contain arguments which need to be evaluated, but
11064 which are not useful to determine the result of the call. In
11065 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11066 COMPOUND_EXPR will be an argument which must be evaluated.
11067 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11068 COMPOUND_EXPR in the chain will contain the tree for the simplified
11069 form of the builtin function call. */
11071 static tree
11072 fold_builtin_strchr (tree s1, tree s2, tree type)
11074 if (!validate_arg (s1, POINTER_TYPE)
11075 || !validate_arg (s2, INTEGER_TYPE))
11076 return NULL_TREE;
11077 else
11079 const char *p1;
11081 if (TREE_CODE (s2) != INTEGER_CST)
11082 return NULL_TREE;
11084 p1 = c_getstr (s1);
11085 if (p1 != NULL)
11087 char c;
11088 const char *r;
11089 tree tem;
11091 if (target_char_cast (s2, &c))
11092 return NULL_TREE;
11094 r = strchr (p1, c);
11096 if (r == NULL)
11097 return build_int_cst (TREE_TYPE (s1), 0);
11099 /* Return an offset into the constant string argument. */
11100 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11101 s1, size_int (r - p1));
11102 return fold_convert (type, tem);
11104 return NULL_TREE;
11108 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11109 the call, and TYPE is its return type.
11111 Return NULL_TREE if no simplification was possible, otherwise return the
11112 simplified form of the call as a tree.
11114 The simplified form may be a constant or other expression which
11115 computes the same value, but in a more efficient manner (including
11116 calls to other builtin functions).
11118 The call may contain arguments which need to be evaluated, but
11119 which are not useful to determine the result of the call. In
11120 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11121 COMPOUND_EXPR will be an argument which must be evaluated.
11122 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11123 COMPOUND_EXPR in the chain will contain the tree for the simplified
11124 form of the builtin function call. */
11126 static tree
11127 fold_builtin_strrchr (tree s1, tree s2, tree type)
11129 if (!validate_arg (s1, POINTER_TYPE)
11130 || !validate_arg (s2, INTEGER_TYPE))
11131 return NULL_TREE;
11132 else
11134 tree fn;
11135 const char *p1;
11137 if (TREE_CODE (s2) != INTEGER_CST)
11138 return NULL_TREE;
11140 p1 = c_getstr (s1);
11141 if (p1 != NULL)
11143 char c;
11144 const char *r;
11145 tree tem;
11147 if (target_char_cast (s2, &c))
11148 return NULL_TREE;
11150 r = strrchr (p1, c);
11152 if (r == NULL)
11153 return build_int_cst (TREE_TYPE (s1), 0);
11155 /* Return an offset into the constant string argument. */
11156 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11157 s1, size_int (r - p1));
11158 return fold_convert (type, tem);
11161 if (! integer_zerop (s2))
11162 return NULL_TREE;
11164 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11165 if (!fn)
11166 return NULL_TREE;
11168 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11169 return build_call_expr (fn, 2, s1, s2);
11173 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11174 to the call, and TYPE is its return type.
11176 Return NULL_TREE if no simplification was possible, otherwise return the
11177 simplified form of the call as a tree.
11179 The simplified form may be a constant or other expression which
11180 computes the same value, but in a more efficient manner (including
11181 calls to other builtin functions).
11183 The call may contain arguments which need to be evaluated, but
11184 which are not useful to determine the result of the call. In
11185 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11186 COMPOUND_EXPR will be an argument which must be evaluated.
11187 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11188 COMPOUND_EXPR in the chain will contain the tree for the simplified
11189 form of the builtin function call. */
11191 static tree
11192 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11194 if (!validate_arg (s1, POINTER_TYPE)
11195 || !validate_arg (s2, POINTER_TYPE))
11196 return NULL_TREE;
11197 else
11199 tree fn;
11200 const char *p1, *p2;
11202 p2 = c_getstr (s2);
11203 if (p2 == NULL)
11204 return NULL_TREE;
11206 p1 = c_getstr (s1);
11207 if (p1 != NULL)
11209 const char *r = strpbrk (p1, p2);
11210 tree tem;
11212 if (r == NULL)
11213 return build_int_cst (TREE_TYPE (s1), 0);
11215 /* Return an offset into the constant string argument. */
11216 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11217 s1, size_int (r - p1));
11218 return fold_convert (type, tem);
11221 if (p2[0] == '\0')
11222 /* strpbrk(x, "") == NULL.
11223 Evaluate and ignore s1 in case it had side-effects. */
11224 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11226 if (p2[1] != '\0')
11227 return NULL_TREE; /* Really call strpbrk. */
11229 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11230 if (!fn)
11231 return NULL_TREE;
11233 /* New argument list transforming strpbrk(s1, s2) to
11234 strchr(s1, s2[0]). */
11235 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11239 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11240 to the call.
11242 Return NULL_TREE if no simplification was possible, otherwise return the
11243 simplified form of the call as a tree.
11245 The simplified form may be a constant or other expression which
11246 computes the same value, but in a more efficient manner (including
11247 calls to other builtin functions).
11249 The call may contain arguments which need to be evaluated, but
11250 which are not useful to determine the result of the call. In
11251 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11252 COMPOUND_EXPR will be an argument which must be evaluated.
11253 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11254 COMPOUND_EXPR in the chain will contain the tree for the simplified
11255 form of the builtin function call. */
11257 static tree
11258 fold_builtin_strcat (tree dst, tree src)
11260 if (!validate_arg (dst, POINTER_TYPE)
11261 || !validate_arg (src, POINTER_TYPE))
11262 return NULL_TREE;
11263 else
11265 const char *p = c_getstr (src);
11267 /* If the string length is zero, return the dst parameter. */
11268 if (p && *p == '\0')
11269 return dst;
11271 return NULL_TREE;
11275 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11276 arguments to the call.
11278 Return NULL_TREE if no simplification was possible, otherwise return the
11279 simplified form of the call as a tree.
11281 The simplified form may be a constant or other expression which
11282 computes the same value, but in a more efficient manner (including
11283 calls to other builtin functions).
11285 The call may contain arguments which need to be evaluated, but
11286 which are not useful to determine the result of the call. In
11287 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11288 COMPOUND_EXPR will be an argument which must be evaluated.
11289 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11290 COMPOUND_EXPR in the chain will contain the tree for the simplified
11291 form of the builtin function call. */
11293 static tree
11294 fold_builtin_strncat (tree dst, tree src, tree len)
11296 if (!validate_arg (dst, POINTER_TYPE)
11297 || !validate_arg (src, POINTER_TYPE)
11298 || !validate_arg (len, INTEGER_TYPE))
11299 return NULL_TREE;
11300 else
11302 const char *p = c_getstr (src);
11304 /* If the requested length is zero, or the src parameter string
11305 length is zero, return the dst parameter. */
11306 if (integer_zerop (len) || (p && *p == '\0'))
11307 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11309 /* If the requested len is greater than or equal to the string
11310 length, call strcat. */
11311 if (TREE_CODE (len) == INTEGER_CST && p
11312 && compare_tree_int (len, strlen (p)) >= 0)
11314 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11316 /* If the replacement _DECL isn't initialized, don't do the
11317 transformation. */
11318 if (!fn)
11319 return NULL_TREE;
11321 return build_call_expr (fn, 2, dst, src);
11323 return NULL_TREE;
11327 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11328 to the call.
11330 Return NULL_TREE if no simplification was possible, otherwise return the
11331 simplified form of the call as a tree.
11333 The simplified form may be a constant or other expression which
11334 computes the same value, but in a more efficient manner (including
11335 calls to other builtin functions).
11337 The call may contain arguments which need to be evaluated, but
11338 which are not useful to determine the result of the call. In
11339 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11340 COMPOUND_EXPR will be an argument which must be evaluated.
11341 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11342 COMPOUND_EXPR in the chain will contain the tree for the simplified
11343 form of the builtin function call. */
11345 static tree
11346 fold_builtin_strspn (tree s1, tree s2)
11348 if (!validate_arg (s1, POINTER_TYPE)
11349 || !validate_arg (s2, POINTER_TYPE))
11350 return NULL_TREE;
11351 else
11353 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11355 /* If both arguments are constants, evaluate at compile-time. */
11356 if (p1 && p2)
11358 const size_t r = strspn (p1, p2);
11359 return size_int (r);
11362 /* If either argument is "", return NULL_TREE. */
11363 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11364 /* Evaluate and ignore both arguments in case either one has
11365 side-effects. */
11366 return omit_two_operands (integer_type_node, integer_zero_node,
11367 s1, s2);
11368 return NULL_TREE;
11372 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11373 to the call.
11375 Return NULL_TREE if no simplification was possible, otherwise return the
11376 simplified form of the call as a tree.
11378 The simplified form may be a constant or other expression which
11379 computes the same value, but in a more efficient manner (including
11380 calls to other builtin functions).
11382 The call may contain arguments which need to be evaluated, but
11383 which are not useful to determine the result of the call. In
11384 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11385 COMPOUND_EXPR will be an argument which must be evaluated.
11386 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11387 COMPOUND_EXPR in the chain will contain the tree for the simplified
11388 form of the builtin function call. */
11390 static tree
11391 fold_builtin_strcspn (tree s1, tree s2)
11393 if (!validate_arg (s1, POINTER_TYPE)
11394 || !validate_arg (s2, POINTER_TYPE))
11395 return NULL_TREE;
11396 else
11398 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11400 /* If both arguments are constants, evaluate at compile-time. */
11401 if (p1 && p2)
11403 const size_t r = strcspn (p1, p2);
11404 return size_int (r);
11407 /* If the first argument is "", return NULL_TREE. */
11408 if (p1 && *p1 == '\0')
11410 /* Evaluate and ignore argument s2 in case it has
11411 side-effects. */
11412 return omit_one_operand (integer_type_node,
11413 integer_zero_node, s2);
11416 /* If the second argument is "", return __builtin_strlen(s1). */
11417 if (p2 && *p2 == '\0')
11419 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11421 /* If the replacement _DECL isn't initialized, don't do the
11422 transformation. */
11423 if (!fn)
11424 return NULL_TREE;
11426 return build_call_expr (fn, 1, s1);
11428 return NULL_TREE;
11432 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11433 to the call. IGNORE is true if the value returned
11434 by the builtin will be ignored. UNLOCKED is true is true if this
11435 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11436 the known length of the string. Return NULL_TREE if no simplification
11437 was possible. */
11439 tree
11440 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11442 /* If we're using an unlocked function, assume the other unlocked
11443 functions exist explicitly. */
11444 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11445 : implicit_built_in_decls[BUILT_IN_FPUTC];
11446 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11447 : implicit_built_in_decls[BUILT_IN_FWRITE];
11449 /* If the return value is used, don't do the transformation. */
11450 if (!ignore)
11451 return NULL_TREE;
11453 /* Verify the arguments in the original call. */
11454 if (!validate_arg (arg0, POINTER_TYPE)
11455 || !validate_arg (arg1, POINTER_TYPE))
11456 return NULL_TREE;
11458 if (! len)
11459 len = c_strlen (arg0, 0);
11461 /* Get the length of the string passed to fputs. If the length
11462 can't be determined, punt. */
11463 if (!len
11464 || TREE_CODE (len) != INTEGER_CST)
11465 return NULL_TREE;
11467 switch (compare_tree_int (len, 1))
11469 case -1: /* length is 0, delete the call entirely . */
11470 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11472 case 0: /* length is 1, call fputc. */
11474 const char *p = c_getstr (arg0);
11476 if (p != NULL)
11478 if (fn_fputc)
11479 return build_call_expr (fn_fputc, 2,
11480 build_int_cst (NULL_TREE, p[0]), arg1);
11481 else
11482 return NULL_TREE;
11485 /* FALLTHROUGH */
11486 case 1: /* length is greater than 1, call fwrite. */
11488 /* If optimizing for size keep fputs. */
11489 if (optimize_size)
11490 return NULL_TREE;
11491 /* New argument list transforming fputs(string, stream) to
11492 fwrite(string, 1, len, stream). */
11493 if (fn_fwrite)
11494 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11495 else
11496 return NULL_TREE;
11498 default:
11499 gcc_unreachable ();
11501 return NULL_TREE;
11504 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11505 produced. False otherwise. This is done so that we don't output the error
11506 or warning twice or three times. */
11508 bool
11509 fold_builtin_next_arg (tree exp, bool va_start_p)
11511 tree fntype = TREE_TYPE (current_function_decl);
11512 int nargs = call_expr_nargs (exp);
11513 tree arg;
11515 if (TYPE_ARG_TYPES (fntype) == 0
11516 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11517 == void_type_node))
11519 error ("%<va_start%> used in function with fixed args");
11520 return true;
11523 if (va_start_p)
11525 if (va_start_p && (nargs != 2))
11527 error ("wrong number of arguments to function %<va_start%>");
11528 return true;
11530 arg = CALL_EXPR_ARG (exp, 1);
11532 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11533 when we checked the arguments and if needed issued a warning. */
11534 else
11536 if (nargs == 0)
11538 /* Evidently an out of date version of <stdarg.h>; can't validate
11539 va_start's second argument, but can still work as intended. */
11540 warning (0, "%<__builtin_next_arg%> called without an argument");
11541 return true;
11543 else if (nargs > 1)
11545 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11546 return true;
11548 arg = CALL_EXPR_ARG (exp, 0);
11551 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11552 or __builtin_next_arg (0) the first time we see it, after checking
11553 the arguments and if needed issuing a warning. */
11554 if (!integer_zerop (arg))
11556 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11558 /* Strip off all nops for the sake of the comparison. This
11559 is not quite the same as STRIP_NOPS. It does more.
11560 We must also strip off INDIRECT_EXPR for C++ reference
11561 parameters. */
11562 while (CONVERT_EXPR_P (arg)
11563 || TREE_CODE (arg) == INDIRECT_REF)
11564 arg = TREE_OPERAND (arg, 0);
11565 if (arg != last_parm)
11567 /* FIXME: Sometimes with the tree optimizers we can get the
11568 not the last argument even though the user used the last
11569 argument. We just warn and set the arg to be the last
11570 argument so that we will get wrong-code because of
11571 it. */
11572 warning (0, "second parameter of %<va_start%> not last named argument");
11574 /* We want to verify the second parameter just once before the tree
11575 optimizers are run and then avoid keeping it in the tree,
11576 as otherwise we could warn even for correct code like:
11577 void foo (int i, ...)
11578 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11579 if (va_start_p)
11580 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11581 else
11582 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11584 return false;
11588 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11589 ORIG may be null if this is a 2-argument call. We don't attempt to
11590 simplify calls with more than 3 arguments.
11592 Return NULL_TREE if no simplification was possible, otherwise return the
11593 simplified form of the call as a tree. If IGNORED is true, it means that
11594 the caller does not use the returned value of the function. */
11596 static tree
11597 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11599 tree call, retval;
11600 const char *fmt_str = NULL;
11602 /* Verify the required arguments in the original call. We deal with two
11603 types of sprintf() calls: 'sprintf (str, fmt)' and
11604 'sprintf (dest, "%s", orig)'. */
11605 if (!validate_arg (dest, POINTER_TYPE)
11606 || !validate_arg (fmt, POINTER_TYPE))
11607 return NULL_TREE;
11608 if (orig && !validate_arg (orig, POINTER_TYPE))
11609 return NULL_TREE;
11611 /* Check whether the format is a literal string constant. */
11612 fmt_str = c_getstr (fmt);
11613 if (fmt_str == NULL)
11614 return NULL_TREE;
11616 call = NULL_TREE;
11617 retval = NULL_TREE;
11619 if (!init_target_chars ())
11620 return NULL_TREE;
11622 /* If the format doesn't contain % args or %%, use strcpy. */
11623 if (strchr (fmt_str, target_percent) == NULL)
11625 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11627 if (!fn)
11628 return NULL_TREE;
11630 /* Don't optimize sprintf (buf, "abc", ptr++). */
11631 if (orig)
11632 return NULL_TREE;
11634 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11635 'format' is known to contain no % formats. */
11636 call = build_call_expr (fn, 2, dest, fmt);
11637 if (!ignored)
11638 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11641 /* If the format is "%s", use strcpy if the result isn't used. */
11642 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11644 tree fn;
11645 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11647 if (!fn)
11648 return NULL_TREE;
11650 /* Don't crash on sprintf (str1, "%s"). */
11651 if (!orig)
11652 return NULL_TREE;
11654 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11655 if (!ignored)
11657 retval = c_strlen (orig, 1);
11658 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11659 return NULL_TREE;
11661 call = build_call_expr (fn, 2, dest, orig);
11664 if (call && retval)
11666 retval = fold_convert
11667 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11668 retval);
11669 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11671 else
11672 return call;
11675 /* Expand a call EXP to __builtin_object_size. */
11678 expand_builtin_object_size (tree exp)
11680 tree ost;
11681 int object_size_type;
11682 tree fndecl = get_callee_fndecl (exp);
11684 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11686 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11687 exp, fndecl);
11688 expand_builtin_trap ();
11689 return const0_rtx;
11692 ost = CALL_EXPR_ARG (exp, 1);
11693 STRIP_NOPS (ost);
11695 if (TREE_CODE (ost) != INTEGER_CST
11696 || tree_int_cst_sgn (ost) < 0
11697 || compare_tree_int (ost, 3) > 0)
11699 error ("%Klast argument of %D is not integer constant between 0 and 3",
11700 exp, fndecl);
11701 expand_builtin_trap ();
11702 return const0_rtx;
11705 object_size_type = tree_low_cst (ost, 0);
11707 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11710 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11711 FCODE is the BUILT_IN_* to use.
11712 Return NULL_RTX if we failed; the caller should emit a normal call,
11713 otherwise try to get the result in TARGET, if convenient (and in
11714 mode MODE if that's convenient). */
11716 static rtx
11717 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11718 enum built_in_function fcode)
11720 tree dest, src, len, size;
11722 if (!validate_arglist (exp,
11723 POINTER_TYPE,
11724 fcode == BUILT_IN_MEMSET_CHK
11725 ? INTEGER_TYPE : POINTER_TYPE,
11726 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11727 return NULL_RTX;
11729 dest = CALL_EXPR_ARG (exp, 0);
11730 src = CALL_EXPR_ARG (exp, 1);
11731 len = CALL_EXPR_ARG (exp, 2);
11732 size = CALL_EXPR_ARG (exp, 3);
11734 if (! host_integerp (size, 1))
11735 return NULL_RTX;
11737 if (host_integerp (len, 1) || integer_all_onesp (size))
11739 tree fn;
11741 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11743 warning (0, "%Kcall to %D will always overflow destination buffer",
11744 exp, get_callee_fndecl (exp));
11745 return NULL_RTX;
11748 fn = NULL_TREE;
11749 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11750 mem{cpy,pcpy,move,set} is available. */
11751 switch (fcode)
11753 case BUILT_IN_MEMCPY_CHK:
11754 fn = built_in_decls[BUILT_IN_MEMCPY];
11755 break;
11756 case BUILT_IN_MEMPCPY_CHK:
11757 fn = built_in_decls[BUILT_IN_MEMPCPY];
11758 break;
11759 case BUILT_IN_MEMMOVE_CHK:
11760 fn = built_in_decls[BUILT_IN_MEMMOVE];
11761 break;
11762 case BUILT_IN_MEMSET_CHK:
11763 fn = built_in_decls[BUILT_IN_MEMSET];
11764 break;
11765 default:
11766 break;
11769 if (! fn)
11770 return NULL_RTX;
11772 fn = build_call_expr (fn, 3, dest, src, len);
11773 STRIP_TYPE_NOPS (fn);
11774 while (TREE_CODE (fn) == COMPOUND_EXPR)
11776 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11777 EXPAND_NORMAL);
11778 fn = TREE_OPERAND (fn, 1);
11780 if (TREE_CODE (fn) == CALL_EXPR)
11781 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11782 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11784 else if (fcode == BUILT_IN_MEMSET_CHK)
11785 return NULL_RTX;
11786 else
11788 unsigned int dest_align
11789 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11791 /* If DEST is not a pointer type, call the normal function. */
11792 if (dest_align == 0)
11793 return NULL_RTX;
11795 /* If SRC and DEST are the same (and not volatile), do nothing. */
11796 if (operand_equal_p (src, dest, 0))
11798 tree expr;
11800 if (fcode != BUILT_IN_MEMPCPY_CHK)
11802 /* Evaluate and ignore LEN in case it has side-effects. */
11803 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11804 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11807 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11808 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11811 /* __memmove_chk special case. */
11812 if (fcode == BUILT_IN_MEMMOVE_CHK)
11814 unsigned int src_align
11815 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11817 if (src_align == 0)
11818 return NULL_RTX;
11820 /* If src is categorized for a readonly section we can use
11821 normal __memcpy_chk. */
11822 if (readonly_data_expr (src))
11824 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11825 if (!fn)
11826 return NULL_RTX;
11827 fn = build_call_expr (fn, 4, dest, src, len, size);
11828 STRIP_TYPE_NOPS (fn);
11829 while (TREE_CODE (fn) == COMPOUND_EXPR)
11831 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11832 EXPAND_NORMAL);
11833 fn = TREE_OPERAND (fn, 1);
11835 if (TREE_CODE (fn) == CALL_EXPR)
11836 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11837 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11840 return NULL_RTX;
11844 /* Emit warning if a buffer overflow is detected at compile time. */
11846 static void
11847 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11849 int is_strlen = 0;
11850 tree len, size;
11852 switch (fcode)
11854 case BUILT_IN_STRCPY_CHK:
11855 case BUILT_IN_STPCPY_CHK:
11856 /* For __strcat_chk the warning will be emitted only if overflowing
11857 by at least strlen (dest) + 1 bytes. */
11858 case BUILT_IN_STRCAT_CHK:
11859 len = CALL_EXPR_ARG (exp, 1);
11860 size = CALL_EXPR_ARG (exp, 2);
11861 is_strlen = 1;
11862 break;
11863 case BUILT_IN_STRNCAT_CHK:
11864 case BUILT_IN_STRNCPY_CHK:
11865 len = CALL_EXPR_ARG (exp, 2);
11866 size = CALL_EXPR_ARG (exp, 3);
11867 break;
11868 case BUILT_IN_SNPRINTF_CHK:
11869 case BUILT_IN_VSNPRINTF_CHK:
11870 len = CALL_EXPR_ARG (exp, 1);
11871 size = CALL_EXPR_ARG (exp, 3);
11872 break;
11873 default:
11874 gcc_unreachable ();
11877 if (!len || !size)
11878 return;
11880 if (! host_integerp (size, 1) || integer_all_onesp (size))
11881 return;
11883 if (is_strlen)
11885 len = c_strlen (len, 1);
11886 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11887 return;
11889 else if (fcode == BUILT_IN_STRNCAT_CHK)
11891 tree src = CALL_EXPR_ARG (exp, 1);
11892 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11893 return;
11894 src = c_strlen (src, 1);
11895 if (! src || ! host_integerp (src, 1))
11897 warning (0, "%Kcall to %D might overflow destination buffer",
11898 exp, get_callee_fndecl (exp));
11899 return;
11901 else if (tree_int_cst_lt (src, size))
11902 return;
11904 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11905 return;
11907 warning (0, "%Kcall to %D will always overflow destination buffer",
11908 exp, get_callee_fndecl (exp));
11911 /* Emit warning if a buffer overflow is detected at compile time
11912 in __sprintf_chk/__vsprintf_chk calls. */
11914 static void
11915 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11917 tree dest, size, len, fmt, flag;
11918 const char *fmt_str;
11919 int nargs = call_expr_nargs (exp);
11921 /* Verify the required arguments in the original call. */
11923 if (nargs < 4)
11924 return;
11925 dest = CALL_EXPR_ARG (exp, 0);
11926 flag = CALL_EXPR_ARG (exp, 1);
11927 size = CALL_EXPR_ARG (exp, 2);
11928 fmt = CALL_EXPR_ARG (exp, 3);
11930 if (! host_integerp (size, 1) || integer_all_onesp (size))
11931 return;
11933 /* Check whether the format is a literal string constant. */
11934 fmt_str = c_getstr (fmt);
11935 if (fmt_str == NULL)
11936 return;
11938 if (!init_target_chars ())
11939 return;
11941 /* If the format doesn't contain % args or %%, we know its size. */
11942 if (strchr (fmt_str, target_percent) == 0)
11943 len = build_int_cstu (size_type_node, strlen (fmt_str));
11944 /* If the format is "%s" and first ... argument is a string literal,
11945 we know it too. */
11946 else if (fcode == BUILT_IN_SPRINTF_CHK
11947 && strcmp (fmt_str, target_percent_s) == 0)
11949 tree arg;
11951 if (nargs < 5)
11952 return;
11953 arg = CALL_EXPR_ARG (exp, 4);
11954 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11955 return;
11957 len = c_strlen (arg, 1);
11958 if (!len || ! host_integerp (len, 1))
11959 return;
11961 else
11962 return;
11964 if (! tree_int_cst_lt (len, size))
11966 warning (0, "%Kcall to %D will always overflow destination buffer",
11967 exp, get_callee_fndecl (exp));
11971 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11972 if possible. */
11974 tree
11975 fold_builtin_object_size (tree ptr, tree ost)
11977 tree ret = NULL_TREE;
11978 int object_size_type;
11980 if (!validate_arg (ptr, POINTER_TYPE)
11981 || !validate_arg (ost, INTEGER_TYPE))
11982 return NULL_TREE;
11984 STRIP_NOPS (ost);
11986 if (TREE_CODE (ost) != INTEGER_CST
11987 || tree_int_cst_sgn (ost) < 0
11988 || compare_tree_int (ost, 3) > 0)
11989 return NULL_TREE;
11991 object_size_type = tree_low_cst (ost, 0);
11993 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11994 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11995 and (size_t) 0 for types 2 and 3. */
11996 if (TREE_SIDE_EFFECTS (ptr))
11997 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11999 if (TREE_CODE (ptr) == ADDR_EXPR)
12000 ret = build_int_cstu (size_type_node,
12001 compute_builtin_object_size (ptr, object_size_type));
12003 else if (TREE_CODE (ptr) == SSA_NAME)
12005 unsigned HOST_WIDE_INT bytes;
12007 /* If object size is not known yet, delay folding until
12008 later. Maybe subsequent passes will help determining
12009 it. */
12010 bytes = compute_builtin_object_size (ptr, object_size_type);
12011 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12012 ? -1 : 0))
12013 ret = build_int_cstu (size_type_node, bytes);
12016 if (ret)
12018 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12019 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12020 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12021 ret = NULL_TREE;
12024 return ret;
12027 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12028 DEST, SRC, LEN, and SIZE are the arguments to the call.
12029 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12030 code of the builtin. If MAXLEN is not NULL, it is maximum length
12031 passed as third argument. */
12033 tree
12034 fold_builtin_memory_chk (tree fndecl,
12035 tree dest, tree src, tree len, tree size,
12036 tree maxlen, bool ignore,
12037 enum built_in_function fcode)
12039 tree fn;
12041 if (!validate_arg (dest, POINTER_TYPE)
12042 || !validate_arg (src,
12043 (fcode == BUILT_IN_MEMSET_CHK
12044 ? INTEGER_TYPE : POINTER_TYPE))
12045 || !validate_arg (len, INTEGER_TYPE)
12046 || !validate_arg (size, INTEGER_TYPE))
12047 return NULL_TREE;
12049 /* If SRC and DEST are the same (and not volatile), return DEST
12050 (resp. DEST+LEN for __mempcpy_chk). */
12051 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12053 if (fcode != BUILT_IN_MEMPCPY_CHK)
12054 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12055 else
12057 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12058 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12062 if (! host_integerp (size, 1))
12063 return NULL_TREE;
12065 if (! integer_all_onesp (size))
12067 if (! host_integerp (len, 1))
12069 /* If LEN is not constant, try MAXLEN too.
12070 For MAXLEN only allow optimizing into non-_ocs function
12071 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12072 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12074 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12076 /* (void) __mempcpy_chk () can be optimized into
12077 (void) __memcpy_chk (). */
12078 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12079 if (!fn)
12080 return NULL_TREE;
12082 return build_call_expr (fn, 4, dest, src, len, size);
12084 return NULL_TREE;
12087 else
12088 maxlen = len;
12090 if (tree_int_cst_lt (size, maxlen))
12091 return NULL_TREE;
12094 fn = NULL_TREE;
12095 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12096 mem{cpy,pcpy,move,set} is available. */
12097 switch (fcode)
12099 case BUILT_IN_MEMCPY_CHK:
12100 fn = built_in_decls[BUILT_IN_MEMCPY];
12101 break;
12102 case BUILT_IN_MEMPCPY_CHK:
12103 fn = built_in_decls[BUILT_IN_MEMPCPY];
12104 break;
12105 case BUILT_IN_MEMMOVE_CHK:
12106 fn = built_in_decls[BUILT_IN_MEMMOVE];
12107 break;
12108 case BUILT_IN_MEMSET_CHK:
12109 fn = built_in_decls[BUILT_IN_MEMSET];
12110 break;
12111 default:
12112 break;
12115 if (!fn)
12116 return NULL_TREE;
12118 return build_call_expr (fn, 3, dest, src, len);
12121 /* Fold a call to the __st[rp]cpy_chk builtin.
12122 DEST, SRC, and SIZE are the arguments to the call.
12123 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12124 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12125 strings passed as second argument. */
12127 tree
12128 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12129 tree maxlen, bool ignore,
12130 enum built_in_function fcode)
12132 tree len, fn;
12134 if (!validate_arg (dest, POINTER_TYPE)
12135 || !validate_arg (src, POINTER_TYPE)
12136 || !validate_arg (size, INTEGER_TYPE))
12137 return NULL_TREE;
12139 /* If SRC and DEST are the same (and not volatile), return DEST. */
12140 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12141 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12143 if (! host_integerp (size, 1))
12144 return NULL_TREE;
12146 if (! integer_all_onesp (size))
12148 len = c_strlen (src, 1);
12149 if (! len || ! host_integerp (len, 1))
12151 /* If LEN is not constant, try MAXLEN too.
12152 For MAXLEN only allow optimizing into non-_ocs function
12153 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12154 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12156 if (fcode == BUILT_IN_STPCPY_CHK)
12158 if (! ignore)
12159 return NULL_TREE;
12161 /* If return value of __stpcpy_chk is ignored,
12162 optimize into __strcpy_chk. */
12163 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12164 if (!fn)
12165 return NULL_TREE;
12167 return build_call_expr (fn, 3, dest, src, size);
12170 if (! len || TREE_SIDE_EFFECTS (len))
12171 return NULL_TREE;
12173 /* If c_strlen returned something, but not a constant,
12174 transform __strcpy_chk into __memcpy_chk. */
12175 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12176 if (!fn)
12177 return NULL_TREE;
12179 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12180 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12181 build_call_expr (fn, 4,
12182 dest, src, len, size));
12185 else
12186 maxlen = len;
12188 if (! tree_int_cst_lt (maxlen, size))
12189 return NULL_TREE;
12192 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12193 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12194 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12195 if (!fn)
12196 return NULL_TREE;
12198 return build_call_expr (fn, 2, dest, src);
12201 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12202 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12203 length passed as third argument. */
12205 tree
12206 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12207 tree maxlen)
12209 tree fn;
12211 if (!validate_arg (dest, POINTER_TYPE)
12212 || !validate_arg (src, POINTER_TYPE)
12213 || !validate_arg (len, INTEGER_TYPE)
12214 || !validate_arg (size, INTEGER_TYPE))
12215 return NULL_TREE;
12217 if (! host_integerp (size, 1))
12218 return NULL_TREE;
12220 if (! integer_all_onesp (size))
12222 if (! host_integerp (len, 1))
12224 /* If LEN is not constant, try MAXLEN too.
12225 For MAXLEN only allow optimizing into non-_ocs function
12226 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12227 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12228 return NULL_TREE;
12230 else
12231 maxlen = len;
12233 if (tree_int_cst_lt (size, maxlen))
12234 return NULL_TREE;
12237 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12238 fn = built_in_decls[BUILT_IN_STRNCPY];
12239 if (!fn)
12240 return NULL_TREE;
12242 return build_call_expr (fn, 3, dest, src, len);
12245 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12246 are the arguments to the call. */
12248 static tree
12249 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12251 tree fn;
12252 const char *p;
12254 if (!validate_arg (dest, POINTER_TYPE)
12255 || !validate_arg (src, POINTER_TYPE)
12256 || !validate_arg (size, INTEGER_TYPE))
12257 return NULL_TREE;
12259 p = c_getstr (src);
12260 /* If the SRC parameter is "", return DEST. */
12261 if (p && *p == '\0')
12262 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12264 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12265 return NULL_TREE;
12267 /* If __builtin_strcat_chk is used, assume strcat is available. */
12268 fn = built_in_decls[BUILT_IN_STRCAT];
12269 if (!fn)
12270 return NULL_TREE;
12272 return build_call_expr (fn, 2, dest, src);
12275 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12276 LEN, and SIZE. */
12278 static tree
12279 fold_builtin_strncat_chk (tree fndecl,
12280 tree dest, tree src, tree len, tree size)
12282 tree fn;
12283 const char *p;
12285 if (!validate_arg (dest, POINTER_TYPE)
12286 || !validate_arg (src, POINTER_TYPE)
12287 || !validate_arg (size, INTEGER_TYPE)
12288 || !validate_arg (size, INTEGER_TYPE))
12289 return NULL_TREE;
12291 p = c_getstr (src);
12292 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12293 if (p && *p == '\0')
12294 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12295 else if (integer_zerop (len))
12296 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12298 if (! host_integerp (size, 1))
12299 return NULL_TREE;
12301 if (! integer_all_onesp (size))
12303 tree src_len = c_strlen (src, 1);
12304 if (src_len
12305 && host_integerp (src_len, 1)
12306 && host_integerp (len, 1)
12307 && ! tree_int_cst_lt (len, src_len))
12309 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12310 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12311 if (!fn)
12312 return NULL_TREE;
12314 return build_call_expr (fn, 3, dest, src, size);
12316 return NULL_TREE;
12319 /* If __builtin_strncat_chk is used, assume strncat is available. */
12320 fn = built_in_decls[BUILT_IN_STRNCAT];
12321 if (!fn)
12322 return NULL_TREE;
12324 return build_call_expr (fn, 3, dest, src, len);
12327 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12328 a normal call should be emitted rather than expanding the function
12329 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12331 static tree
12332 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12334 tree dest, size, len, fn, fmt, flag;
12335 const char *fmt_str;
12336 int nargs = call_expr_nargs (exp);
12338 /* Verify the required arguments in the original call. */
12339 if (nargs < 4)
12340 return NULL_TREE;
12341 dest = CALL_EXPR_ARG (exp, 0);
12342 if (!validate_arg (dest, POINTER_TYPE))
12343 return NULL_TREE;
12344 flag = CALL_EXPR_ARG (exp, 1);
12345 if (!validate_arg (flag, INTEGER_TYPE))
12346 return NULL_TREE;
12347 size = CALL_EXPR_ARG (exp, 2);
12348 if (!validate_arg (size, INTEGER_TYPE))
12349 return NULL_TREE;
12350 fmt = CALL_EXPR_ARG (exp, 3);
12351 if (!validate_arg (fmt, POINTER_TYPE))
12352 return NULL_TREE;
12354 if (! host_integerp (size, 1))
12355 return NULL_TREE;
12357 len = NULL_TREE;
12359 if (!init_target_chars ())
12360 return NULL_TREE;
12362 /* Check whether the format is a literal string constant. */
12363 fmt_str = c_getstr (fmt);
12364 if (fmt_str != NULL)
12366 /* If the format doesn't contain % args or %%, we know the size. */
12367 if (strchr (fmt_str, target_percent) == 0)
12369 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12370 len = build_int_cstu (size_type_node, strlen (fmt_str));
12372 /* If the format is "%s" and first ... argument is a string literal,
12373 we know the size too. */
12374 else if (fcode == BUILT_IN_SPRINTF_CHK
12375 && strcmp (fmt_str, target_percent_s) == 0)
12377 tree arg;
12379 if (nargs == 5)
12381 arg = CALL_EXPR_ARG (exp, 4);
12382 if (validate_arg (arg, POINTER_TYPE))
12384 len = c_strlen (arg, 1);
12385 if (! len || ! host_integerp (len, 1))
12386 len = NULL_TREE;
12392 if (! integer_all_onesp (size))
12394 if (! len || ! tree_int_cst_lt (len, size))
12395 return NULL_TREE;
12398 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12399 or if format doesn't contain % chars or is "%s". */
12400 if (! integer_zerop (flag))
12402 if (fmt_str == NULL)
12403 return NULL_TREE;
12404 if (strchr (fmt_str, target_percent) != NULL
12405 && strcmp (fmt_str, target_percent_s))
12406 return NULL_TREE;
12409 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12410 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12411 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12412 if (!fn)
12413 return NULL_TREE;
12415 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12418 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12419 a normal call should be emitted rather than expanding the function
12420 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12421 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12422 passed as second argument. */
12424 tree
12425 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12426 enum built_in_function fcode)
12428 tree dest, size, len, fn, fmt, flag;
12429 const char *fmt_str;
12431 /* Verify the required arguments in the original call. */
12432 if (call_expr_nargs (exp) < 5)
12433 return NULL_TREE;
12434 dest = CALL_EXPR_ARG (exp, 0);
12435 if (!validate_arg (dest, POINTER_TYPE))
12436 return NULL_TREE;
12437 len = CALL_EXPR_ARG (exp, 1);
12438 if (!validate_arg (len, INTEGER_TYPE))
12439 return NULL_TREE;
12440 flag = CALL_EXPR_ARG (exp, 2);
12441 if (!validate_arg (flag, INTEGER_TYPE))
12442 return NULL_TREE;
12443 size = CALL_EXPR_ARG (exp, 3);
12444 if (!validate_arg (size, INTEGER_TYPE))
12445 return NULL_TREE;
12446 fmt = CALL_EXPR_ARG (exp, 4);
12447 if (!validate_arg (fmt, POINTER_TYPE))
12448 return NULL_TREE;
12450 if (! host_integerp (size, 1))
12451 return NULL_TREE;
12453 if (! integer_all_onesp (size))
12455 if (! host_integerp (len, 1))
12457 /* If LEN is not constant, try MAXLEN too.
12458 For MAXLEN only allow optimizing into non-_ocs function
12459 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12460 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12461 return NULL_TREE;
12463 else
12464 maxlen = len;
12466 if (tree_int_cst_lt (size, maxlen))
12467 return NULL_TREE;
12470 if (!init_target_chars ())
12471 return NULL_TREE;
12473 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12474 or if format doesn't contain % chars or is "%s". */
12475 if (! integer_zerop (flag))
12477 fmt_str = c_getstr (fmt);
12478 if (fmt_str == NULL)
12479 return NULL_TREE;
12480 if (strchr (fmt_str, target_percent) != NULL
12481 && strcmp (fmt_str, target_percent_s))
12482 return NULL_TREE;
12485 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12486 available. */
12487 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12488 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12489 if (!fn)
12490 return NULL_TREE;
12492 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12495 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12496 FMT and ARG are the arguments to the call; we don't fold cases with
12497 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12499 Return NULL_TREE if no simplification was possible, otherwise return the
12500 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12501 code of the function to be simplified. */
12503 static tree
12504 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12505 enum built_in_function fcode)
12507 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12508 const char *fmt_str = NULL;
12510 /* If the return value is used, don't do the transformation. */
12511 if (! ignore)
12512 return NULL_TREE;
12514 /* Verify the required arguments in the original call. */
12515 if (!validate_arg (fmt, POINTER_TYPE))
12516 return NULL_TREE;
12518 /* Check whether the format is a literal string constant. */
12519 fmt_str = c_getstr (fmt);
12520 if (fmt_str == NULL)
12521 return NULL_TREE;
12523 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12525 /* If we're using an unlocked function, assume the other
12526 unlocked functions exist explicitly. */
12527 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12528 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12530 else
12532 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12533 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12536 if (!init_target_chars ())
12537 return NULL_TREE;
12539 if (strcmp (fmt_str, target_percent_s) == 0
12540 || strchr (fmt_str, target_percent) == NULL)
12542 const char *str;
12544 if (strcmp (fmt_str, target_percent_s) == 0)
12546 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12547 return NULL_TREE;
12549 if (!arg || !validate_arg (arg, POINTER_TYPE))
12550 return NULL_TREE;
12552 str = c_getstr (arg);
12553 if (str == NULL)
12554 return NULL_TREE;
12556 else
12558 /* The format specifier doesn't contain any '%' characters. */
12559 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12560 && arg)
12561 return NULL_TREE;
12562 str = fmt_str;
12565 /* If the string was "", printf does nothing. */
12566 if (str[0] == '\0')
12567 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12569 /* If the string has length of 1, call putchar. */
12570 if (str[1] == '\0')
12572 /* Given printf("c"), (where c is any one character,)
12573 convert "c"[0] to an int and pass that to the replacement
12574 function. */
12575 newarg = build_int_cst (NULL_TREE, str[0]);
12576 if (fn_putchar)
12577 call = build_call_expr (fn_putchar, 1, newarg);
12579 else
12581 /* If the string was "string\n", call puts("string"). */
12582 size_t len = strlen (str);
12583 if ((unsigned char)str[len - 1] == target_newline)
12585 /* Create a NUL-terminated string that's one char shorter
12586 than the original, stripping off the trailing '\n'. */
12587 char *newstr = XALLOCAVEC (char, len);
12588 memcpy (newstr, str, len - 1);
12589 newstr[len - 1] = 0;
12591 newarg = build_string_literal (len, newstr);
12592 if (fn_puts)
12593 call = build_call_expr (fn_puts, 1, newarg);
12595 else
12596 /* We'd like to arrange to call fputs(string,stdout) here,
12597 but we need stdout and don't have a way to get it yet. */
12598 return NULL_TREE;
12602 /* The other optimizations can be done only on the non-va_list variants. */
12603 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12604 return NULL_TREE;
12606 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12607 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12609 if (!arg || !validate_arg (arg, POINTER_TYPE))
12610 return NULL_TREE;
12611 if (fn_puts)
12612 call = build_call_expr (fn_puts, 1, arg);
12615 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12616 else if (strcmp (fmt_str, target_percent_c) == 0)
12618 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12619 return NULL_TREE;
12620 if (fn_putchar)
12621 call = build_call_expr (fn_putchar, 1, arg);
12624 if (!call)
12625 return NULL_TREE;
12627 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12630 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12631 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12632 more than 3 arguments, and ARG may be null in the 2-argument case.
12634 Return NULL_TREE if no simplification was possible, otherwise return the
12635 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12636 code of the function to be simplified. */
12638 static tree
12639 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12640 enum built_in_function fcode)
12642 tree fn_fputc, fn_fputs, call = NULL_TREE;
12643 const char *fmt_str = NULL;
12645 /* If the return value is used, don't do the transformation. */
12646 if (! ignore)
12647 return NULL_TREE;
12649 /* Verify the required arguments in the original call. */
12650 if (!validate_arg (fp, POINTER_TYPE))
12651 return NULL_TREE;
12652 if (!validate_arg (fmt, POINTER_TYPE))
12653 return NULL_TREE;
12655 /* Check whether the format is a literal string constant. */
12656 fmt_str = c_getstr (fmt);
12657 if (fmt_str == NULL)
12658 return NULL_TREE;
12660 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12662 /* If we're using an unlocked function, assume the other
12663 unlocked functions exist explicitly. */
12664 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12665 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12667 else
12669 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12670 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12673 if (!init_target_chars ())
12674 return NULL_TREE;
12676 /* If the format doesn't contain % args or %%, use strcpy. */
12677 if (strchr (fmt_str, target_percent) == NULL)
12679 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12680 && arg)
12681 return NULL_TREE;
12683 /* If the format specifier was "", fprintf does nothing. */
12684 if (fmt_str[0] == '\0')
12686 /* If FP has side-effects, just wait until gimplification is
12687 done. */
12688 if (TREE_SIDE_EFFECTS (fp))
12689 return NULL_TREE;
12691 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12694 /* When "string" doesn't contain %, replace all cases of
12695 fprintf (fp, string) with fputs (string, fp). The fputs
12696 builtin will take care of special cases like length == 1. */
12697 if (fn_fputs)
12698 call = build_call_expr (fn_fputs, 2, fmt, fp);
12701 /* The other optimizations can be done only on the non-va_list variants. */
12702 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12703 return NULL_TREE;
12705 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12706 else if (strcmp (fmt_str, target_percent_s) == 0)
12708 if (!arg || !validate_arg (arg, POINTER_TYPE))
12709 return NULL_TREE;
12710 if (fn_fputs)
12711 call = build_call_expr (fn_fputs, 2, arg, fp);
12714 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12715 else if (strcmp (fmt_str, target_percent_c) == 0)
12717 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12718 return NULL_TREE;
12719 if (fn_fputc)
12720 call = build_call_expr (fn_fputc, 2, arg, fp);
12723 if (!call)
12724 return NULL_TREE;
12725 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12728 /* Initialize format string characters in the target charset. */
12730 static bool
12731 init_target_chars (void)
12733 static bool init;
12734 if (!init)
12736 target_newline = lang_hooks.to_target_charset ('\n');
12737 target_percent = lang_hooks.to_target_charset ('%');
12738 target_c = lang_hooks.to_target_charset ('c');
12739 target_s = lang_hooks.to_target_charset ('s');
12740 if (target_newline == 0 || target_percent == 0 || target_c == 0
12741 || target_s == 0)
12742 return false;
12744 target_percent_c[0] = target_percent;
12745 target_percent_c[1] = target_c;
12746 target_percent_c[2] = '\0';
12748 target_percent_s[0] = target_percent;
12749 target_percent_s[1] = target_s;
12750 target_percent_s[2] = '\0';
12752 target_percent_s_newline[0] = target_percent;
12753 target_percent_s_newline[1] = target_s;
12754 target_percent_s_newline[2] = target_newline;
12755 target_percent_s_newline[3] = '\0';
12757 init = true;
12759 return true;
12762 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12763 and no overflow/underflow occurred. INEXACT is true if M was not
12764 exactly calculated. TYPE is the tree type for the result. This
12765 function assumes that you cleared the MPFR flags and then
12766 calculated M to see if anything subsequently set a flag prior to
12767 entering this function. Return NULL_TREE if any checks fail. */
12769 static tree
12770 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12772 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12773 overflow/underflow occurred. If -frounding-math, proceed iff the
12774 result of calling FUNC was exact. */
12775 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12776 && (!flag_rounding_math || !inexact))
12778 REAL_VALUE_TYPE rr;
12780 real_from_mpfr (&rr, m, type, GMP_RNDN);
12781 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12782 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12783 but the mpft_t is not, then we underflowed in the
12784 conversion. */
12785 if (real_isfinite (&rr)
12786 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12788 REAL_VALUE_TYPE rmode;
12790 real_convert (&rmode, TYPE_MODE (type), &rr);
12791 /* Proceed iff the specified mode can hold the value. */
12792 if (real_identical (&rmode, &rr))
12793 return build_real (type, rmode);
12796 return NULL_TREE;
12799 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12800 FUNC on it and return the resulting value as a tree with type TYPE.
12801 If MIN and/or MAX are not NULL, then the supplied ARG must be
12802 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12803 acceptable values, otherwise they are not. The mpfr precision is
12804 set to the precision of TYPE. We assume that function FUNC returns
12805 zero if the result could be calculated exactly within the requested
12806 precision. */
12808 static tree
12809 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12810 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12811 bool inclusive)
12813 tree result = NULL_TREE;
12815 STRIP_NOPS (arg);
12817 /* To proceed, MPFR must exactly represent the target floating point
12818 format, which only happens when the target base equals two. */
12819 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12820 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12822 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12824 if (real_isfinite (ra)
12825 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12826 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12828 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12829 int inexact;
12830 mpfr_t m;
12832 mpfr_init2 (m, prec);
12833 mpfr_from_real (m, ra, GMP_RNDN);
12834 mpfr_clear_flags ();
12835 inexact = func (m, m, GMP_RNDN);
12836 result = do_mpfr_ckconv (m, type, inexact);
12837 mpfr_clear (m);
12841 return result;
12844 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12845 FUNC on it and return the resulting value as a tree with type TYPE.
12846 The mpfr precision is set to the precision of TYPE. We assume that
12847 function FUNC returns zero if the result could be calculated
12848 exactly within the requested precision. */
12850 static tree
12851 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12852 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12854 tree result = NULL_TREE;
12856 STRIP_NOPS (arg1);
12857 STRIP_NOPS (arg2);
12859 /* To proceed, MPFR must exactly represent the target floating point
12860 format, which only happens when the target base equals two. */
12861 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12862 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12863 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12865 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12866 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12868 if (real_isfinite (ra1) && real_isfinite (ra2))
12870 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12871 int inexact;
12872 mpfr_t m1, m2;
12874 mpfr_inits2 (prec, m1, m2, NULL);
12875 mpfr_from_real (m1, ra1, GMP_RNDN);
12876 mpfr_from_real (m2, ra2, GMP_RNDN);
12877 mpfr_clear_flags ();
12878 inexact = func (m1, m1, m2, GMP_RNDN);
12879 result = do_mpfr_ckconv (m1, type, inexact);
12880 mpfr_clears (m1, m2, NULL);
12884 return result;
12887 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12888 FUNC on it and return the resulting value as a tree with type TYPE.
12889 The mpfr precision is set to the precision of TYPE. We assume that
12890 function FUNC returns zero if the result could be calculated
12891 exactly within the requested precision. */
12893 static tree
12894 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12895 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12897 tree result = NULL_TREE;
12899 STRIP_NOPS (arg1);
12900 STRIP_NOPS (arg2);
12901 STRIP_NOPS (arg3);
12903 /* To proceed, MPFR must exactly represent the target floating point
12904 format, which only happens when the target base equals two. */
12905 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12906 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12907 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12908 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12910 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12911 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12912 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12914 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12916 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12917 int inexact;
12918 mpfr_t m1, m2, m3;
12920 mpfr_inits2 (prec, m1, m2, m3, NULL);
12921 mpfr_from_real (m1, ra1, GMP_RNDN);
12922 mpfr_from_real (m2, ra2, GMP_RNDN);
12923 mpfr_from_real (m3, ra3, GMP_RNDN);
12924 mpfr_clear_flags ();
12925 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12926 result = do_mpfr_ckconv (m1, type, inexact);
12927 mpfr_clears (m1, m2, m3, NULL);
12931 return result;
12934 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12935 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12936 If ARG_SINP and ARG_COSP are NULL then the result is returned
12937 as a complex value.
12938 The type is taken from the type of ARG and is used for setting the
12939 precision of the calculation and results. */
12941 static tree
12942 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12944 tree const type = TREE_TYPE (arg);
12945 tree result = NULL_TREE;
12947 STRIP_NOPS (arg);
12949 /* To proceed, MPFR must exactly represent the target floating point
12950 format, which only happens when the target base equals two. */
12951 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12952 && TREE_CODE (arg) == REAL_CST
12953 && !TREE_OVERFLOW (arg))
12955 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12957 if (real_isfinite (ra))
12959 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12960 tree result_s, result_c;
12961 int inexact;
12962 mpfr_t m, ms, mc;
12964 mpfr_inits2 (prec, m, ms, mc, NULL);
12965 mpfr_from_real (m, ra, GMP_RNDN);
12966 mpfr_clear_flags ();
12967 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12968 result_s = do_mpfr_ckconv (ms, type, inexact);
12969 result_c = do_mpfr_ckconv (mc, type, inexact);
12970 mpfr_clears (m, ms, mc, NULL);
12971 if (result_s && result_c)
12973 /* If we are to return in a complex value do so. */
12974 if (!arg_sinp && !arg_cosp)
12975 return build_complex (build_complex_type (type),
12976 result_c, result_s);
12978 /* Dereference the sin/cos pointer arguments. */
12979 arg_sinp = build_fold_indirect_ref (arg_sinp);
12980 arg_cosp = build_fold_indirect_ref (arg_cosp);
12981 /* Proceed if valid pointer type were passed in. */
12982 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12983 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12985 /* Set the values. */
12986 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12987 result_s);
12988 TREE_SIDE_EFFECTS (result_s) = 1;
12989 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12990 result_c);
12991 TREE_SIDE_EFFECTS (result_c) = 1;
12992 /* Combine the assignments into a compound expr. */
12993 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12994 result_s, result_c));
12999 return result;
13002 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
13003 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13004 two-argument mpfr order N Bessel function FUNC on them and return
13005 the resulting value as a tree with type TYPE. The mpfr precision
13006 is set to the precision of TYPE. We assume that function FUNC
13007 returns zero if the result could be calculated exactly within the
13008 requested precision. */
13009 static tree
13010 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13011 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13012 const REAL_VALUE_TYPE *min, bool inclusive)
13014 tree result = NULL_TREE;
13016 STRIP_NOPS (arg1);
13017 STRIP_NOPS (arg2);
13019 /* To proceed, MPFR must exactly represent the target floating point
13020 format, which only happens when the target base equals two. */
13021 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13022 && host_integerp (arg1, 0)
13023 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13025 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13026 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13028 if (n == (long)n
13029 && real_isfinite (ra)
13030 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13032 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13033 int inexact;
13034 mpfr_t m;
13036 mpfr_init2 (m, prec);
13037 mpfr_from_real (m, ra, GMP_RNDN);
13038 mpfr_clear_flags ();
13039 inexact = func (m, n, m, GMP_RNDN);
13040 result = do_mpfr_ckconv (m, type, inexact);
13041 mpfr_clear (m);
13045 return result;
13048 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13049 the pointer *(ARG_QUO) and return the result. The type is taken
13050 from the type of ARG0 and is used for setting the precision of the
13051 calculation and results. */
13053 static tree
13054 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13056 tree const type = TREE_TYPE (arg0);
13057 tree result = NULL_TREE;
13059 STRIP_NOPS (arg0);
13060 STRIP_NOPS (arg1);
13062 /* To proceed, MPFR must exactly represent the target floating point
13063 format, which only happens when the target base equals two. */
13064 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13065 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13066 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13068 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13069 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13071 if (real_isfinite (ra0) && real_isfinite (ra1))
13073 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13074 tree result_rem;
13075 long integer_quo;
13076 mpfr_t m0, m1;
13078 mpfr_inits2 (prec, m0, m1, NULL);
13079 mpfr_from_real (m0, ra0, GMP_RNDN);
13080 mpfr_from_real (m1, ra1, GMP_RNDN);
13081 mpfr_clear_flags ();
13082 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13083 /* Remquo is independent of the rounding mode, so pass
13084 inexact=0 to do_mpfr_ckconv(). */
13085 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13086 mpfr_clears (m0, m1, NULL);
13087 if (result_rem)
13089 /* MPFR calculates quo in the host's long so it may
13090 return more bits in quo than the target int can hold
13091 if sizeof(host long) > sizeof(target int). This can
13092 happen even for native compilers in LP64 mode. In
13093 these cases, modulo the quo value with the largest
13094 number that the target int can hold while leaving one
13095 bit for the sign. */
13096 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13097 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13099 /* Dereference the quo pointer argument. */
13100 arg_quo = build_fold_indirect_ref (arg_quo);
13101 /* Proceed iff a valid pointer type was passed in. */
13102 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13104 /* Set the value. */
13105 tree result_quo = fold_build2 (MODIFY_EXPR,
13106 TREE_TYPE (arg_quo), arg_quo,
13107 build_int_cst (NULL, integer_quo));
13108 TREE_SIDE_EFFECTS (result_quo) = 1;
13109 /* Combine the quo assignment with the rem. */
13110 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13111 result_quo, result_rem));
13116 return result;
13119 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13120 resulting value as a tree with type TYPE. The mpfr precision is
13121 set to the precision of TYPE. We assume that this mpfr function
13122 returns zero if the result could be calculated exactly within the
13123 requested precision. In addition, the integer pointer represented
13124 by ARG_SG will be dereferenced and set to the appropriate signgam
13125 (-1,1) value. */
13127 static tree
13128 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13130 tree result = NULL_TREE;
13132 STRIP_NOPS (arg);
13134 /* To proceed, MPFR must exactly represent the target floating point
13135 format, which only happens when the target base equals two. Also
13136 verify ARG is a constant and that ARG_SG is an int pointer. */
13137 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13138 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13139 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13140 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13142 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13144 /* In addition to NaN and Inf, the argument cannot be zero or a
13145 negative integer. */
13146 if (real_isfinite (ra)
13147 && ra->cl != rvc_zero
13148 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13150 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13151 int inexact, sg;
13152 mpfr_t m;
13153 tree result_lg;
13155 mpfr_init2 (m, prec);
13156 mpfr_from_real (m, ra, GMP_RNDN);
13157 mpfr_clear_flags ();
13158 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13159 result_lg = do_mpfr_ckconv (m, type, inexact);
13160 mpfr_clear (m);
13161 if (result_lg)
13163 tree result_sg;
13165 /* Dereference the arg_sg pointer argument. */
13166 arg_sg = build_fold_indirect_ref (arg_sg);
13167 /* Assign the signgam value into *arg_sg. */
13168 result_sg = fold_build2 (MODIFY_EXPR,
13169 TREE_TYPE (arg_sg), arg_sg,
13170 build_int_cst (NULL, sg));
13171 TREE_SIDE_EFFECTS (result_sg) = 1;
13172 /* Combine the signgam assignment with the lgamma result. */
13173 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13174 result_sg, result_lg));
13179 return result;
13181 #endif
13183 /* FIXME tuples.
13184 The functions below provide an alternate interface for folding
13185 builtin function calls presented as GIMPLE_CALL statements rather
13186 than as CALL_EXPRs. The folded result is still expressed as a
13187 tree. There is too much code duplication in the handling of
13188 varargs functions, and a more intrusive re-factoring would permit
13189 better sharing of code between the tree and statement-based
13190 versions of these functions. */
13192 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13193 along with N new arguments specified as the "..." parameters. SKIP
13194 is the number of arguments in STMT to be omitted. This function is used
13195 to do varargs-to-varargs transformations. */
13197 static tree
13198 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13200 int oldnargs = gimple_call_num_args (stmt);
13201 int nargs = oldnargs - skip + n;
13202 tree fntype = TREE_TYPE (fndecl);
13203 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13204 tree *buffer;
13205 int i, j;
13206 va_list ap;
13208 buffer = XALLOCAVEC (tree, nargs);
13209 va_start (ap, n);
13210 for (i = 0; i < n; i++)
13211 buffer[i] = va_arg (ap, tree);
13212 va_end (ap);
13213 for (j = skip; j < oldnargs; j++, i++)
13214 buffer[i] = gimple_call_arg (stmt, j);
13216 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13219 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13220 a normal call should be emitted rather than expanding the function
13221 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13223 static tree
13224 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13226 tree dest, size, len, fn, fmt, flag;
13227 const char *fmt_str;
13228 int nargs = gimple_call_num_args (stmt);
13230 /* Verify the required arguments in the original call. */
13231 if (nargs < 4)
13232 return NULL_TREE;
13233 dest = gimple_call_arg (stmt, 0);
13234 if (!validate_arg (dest, POINTER_TYPE))
13235 return NULL_TREE;
13236 flag = gimple_call_arg (stmt, 1);
13237 if (!validate_arg (flag, INTEGER_TYPE))
13238 return NULL_TREE;
13239 size = gimple_call_arg (stmt, 2);
13240 if (!validate_arg (size, INTEGER_TYPE))
13241 return NULL_TREE;
13242 fmt = gimple_call_arg (stmt, 3);
13243 if (!validate_arg (fmt, POINTER_TYPE))
13244 return NULL_TREE;
13246 if (! host_integerp (size, 1))
13247 return NULL_TREE;
13249 len = NULL_TREE;
13251 if (!init_target_chars ())
13252 return NULL_TREE;
13254 /* Check whether the format is a literal string constant. */
13255 fmt_str = c_getstr (fmt);
13256 if (fmt_str != NULL)
13258 /* If the format doesn't contain % args or %%, we know the size. */
13259 if (strchr (fmt_str, target_percent) == 0)
13261 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13262 len = build_int_cstu (size_type_node, strlen (fmt_str));
13264 /* If the format is "%s" and first ... argument is a string literal,
13265 we know the size too. */
13266 else if (fcode == BUILT_IN_SPRINTF_CHK
13267 && strcmp (fmt_str, target_percent_s) == 0)
13269 tree arg;
13271 if (nargs == 5)
13273 arg = gimple_call_arg (stmt, 4);
13274 if (validate_arg (arg, POINTER_TYPE))
13276 len = c_strlen (arg, 1);
13277 if (! len || ! host_integerp (len, 1))
13278 len = NULL_TREE;
13284 if (! integer_all_onesp (size))
13286 if (! len || ! tree_int_cst_lt (len, size))
13287 return NULL_TREE;
13290 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13291 or if format doesn't contain % chars or is "%s". */
13292 if (! integer_zerop (flag))
13294 if (fmt_str == NULL)
13295 return NULL_TREE;
13296 if (strchr (fmt_str, target_percent) != NULL
13297 && strcmp (fmt_str, target_percent_s))
13298 return NULL_TREE;
13301 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13302 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13303 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13304 if (!fn)
13305 return NULL_TREE;
13307 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13310 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13311 a normal call should be emitted rather than expanding the function
13312 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13313 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13314 passed as second argument. */
13316 tree
13317 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13318 enum built_in_function fcode)
13320 tree dest, size, len, fn, fmt, flag;
13321 const char *fmt_str;
13323 /* Verify the required arguments in the original call. */
13324 if (gimple_call_num_args (stmt) < 5)
13325 return NULL_TREE;
13326 dest = gimple_call_arg (stmt, 0);
13327 if (!validate_arg (dest, POINTER_TYPE))
13328 return NULL_TREE;
13329 len = gimple_call_arg (stmt, 1);
13330 if (!validate_arg (len, INTEGER_TYPE))
13331 return NULL_TREE;
13332 flag = gimple_call_arg (stmt, 2);
13333 if (!validate_arg (flag, INTEGER_TYPE))
13334 return NULL_TREE;
13335 size = gimple_call_arg (stmt, 3);
13336 if (!validate_arg (size, INTEGER_TYPE))
13337 return NULL_TREE;
13338 fmt = gimple_call_arg (stmt, 4);
13339 if (!validate_arg (fmt, POINTER_TYPE))
13340 return NULL_TREE;
13342 if (! host_integerp (size, 1))
13343 return NULL_TREE;
13345 if (! integer_all_onesp (size))
13347 if (! host_integerp (len, 1))
13349 /* If LEN is not constant, try MAXLEN too.
13350 For MAXLEN only allow optimizing into non-_ocs function
13351 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13352 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13353 return NULL_TREE;
13355 else
13356 maxlen = len;
13358 if (tree_int_cst_lt (size, maxlen))
13359 return NULL_TREE;
13362 if (!init_target_chars ())
13363 return NULL_TREE;
13365 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13366 or if format doesn't contain % chars or is "%s". */
13367 if (! integer_zerop (flag))
13369 fmt_str = c_getstr (fmt);
13370 if (fmt_str == NULL)
13371 return NULL_TREE;
13372 if (strchr (fmt_str, target_percent) != NULL
13373 && strcmp (fmt_str, target_percent_s))
13374 return NULL_TREE;
13377 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13378 available. */
13379 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13380 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13381 if (!fn)
13382 return NULL_TREE;
13384 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13387 /* Builtins with folding operations that operate on "..." arguments
13388 need special handling; we need to store the arguments in a convenient
13389 data structure before attempting any folding. Fortunately there are
13390 only a few builtins that fall into this category. FNDECL is the
13391 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13392 result of the function call is ignored. */
13394 static tree
13395 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13397 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13398 tree ret = NULL_TREE;
13400 switch (fcode)
13402 case BUILT_IN_SPRINTF_CHK:
13403 case BUILT_IN_VSPRINTF_CHK:
13404 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13405 break;
13407 case BUILT_IN_SNPRINTF_CHK:
13408 case BUILT_IN_VSNPRINTF_CHK:
13409 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13411 default:
13412 break;
13414 if (ret)
13416 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13417 TREE_NO_WARNING (ret) = 1;
13418 return ret;
13420 return NULL_TREE;
13423 /* A wrapper function for builtin folding that prevents warnings for
13424 "statement without effect" and the like, caused by removing the
13425 call node earlier than the warning is generated. */
13427 tree
13428 fold_call_stmt (gimple stmt, bool ignore)
13430 tree ret = NULL_TREE;
13431 tree fndecl = gimple_call_fndecl (stmt);
13432 if (fndecl
13433 && TREE_CODE (fndecl) == FUNCTION_DECL
13434 && DECL_BUILT_IN (fndecl)
13435 && !gimple_call_va_arg_pack_p (stmt))
13437 int nargs = gimple_call_num_args (stmt);
13439 /* FIXME: Don't use a list in this interface. */
13440 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13442 tree arglist = NULL_TREE;
13443 int i;
13444 for (i = nargs - 1; i >= 0; i--)
13445 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13446 return targetm.fold_builtin (fndecl, arglist, ignore);
13448 else
13450 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13452 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13453 int i;
13454 for (i = 0; i < nargs; i++)
13455 args[i] = gimple_call_arg (stmt, i);
13456 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13458 if (!ret)
13459 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13460 if (ret)
13462 /* Propagate location information from original call to
13463 expansion of builtin. Otherwise things like
13464 maybe_emit_chk_warning, that operate on the expansion
13465 of a builtin, will use the wrong location information. */
13466 if (gimple_has_location (stmt))
13468 tree realret = ret;
13469 if (TREE_CODE (ret) == NOP_EXPR)
13470 realret = TREE_OPERAND (ret, 0);
13471 if (CAN_HAVE_LOCATION_P (realret)
13472 && !EXPR_HAS_LOCATION (realret))
13473 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13474 return realret;
13476 return ret;
13480 return NULL_TREE;