2008-07-06 Kai Tietz <kai.tietz@onevision.com>
[official-gcc.git] / gcc / builtins.c
blob731955b5699f38e9e1dd87a6c5671dd3eebfee5f
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
278 while (1)
280 switch (TREE_CODE (exp))
282 CASE_CONVERT:
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
285 return align;
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
289 break;
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
294 ALIGN. */
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
296 return align;
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
300 != 0)
301 max_align >>= 1;
303 exp = TREE_OPERAND (exp, 0);
304 break;
306 case ADDR_EXPR:
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
309 inner = max_align;
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
313 tree offset;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319 if (bitpos)
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
325 the alignment. */
326 unsigned offset_bits
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
328 * BITS_PER_UNIT);
330 if (offset_bits)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
338 the alignment. */
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
341 * BITS_PER_UNIT);
343 if (offset_factor)
344 inner = MIN (inner, (offset_factor & -offset_factor));
346 else if (offset)
347 inner = MIN (inner, BITS_PER_UNIT);
349 if (DECL_P (exp))
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
354 #endif
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
358 else
359 align = MIN (align, inner);
360 return MIN (align, max_align);
362 default:
363 return align;
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
384 tree
385 c_strlen (tree src, int only_value)
387 tree offset_node;
388 HOST_WIDE_INT offset;
389 int max;
390 const char *ptr;
392 STRIP_NOPS (src);
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
396 tree len1, len2;
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
401 return len1;
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
409 if (src == 0)
410 return NULL_TREE;
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
420 int i;
422 for (i = 0; i < max; i++)
423 if (ptr[i] == 0)
424 return NULL_TREE;
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
439 offset = 0;
440 else if (! host_integerp (offset_node, 0))
441 offset = -1;
442 else
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
446 runtime. */
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
455 return NULL_TREE;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
470 static const char *
471 c_getstr (tree src)
473 tree offset_node;
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
525 static int
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542 if (val != hostval)
543 return 1;
545 *p = hostval;
546 return 0;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
553 static tree
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 int i;
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
596 #endif
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
750 #endif
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
775 static void
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
799 else
800 #endif
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 else
821 #endif
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
846 if (JUMP_P (insn))
848 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
849 break;
851 else if (CALL_P (insn))
852 break;
856 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
857 and the address of the save area. */
859 static rtx
860 expand_builtin_nonlocal_goto (tree exp)
862 tree t_label, t_save_area;
863 rtx r_label, r_save_area, r_fp, r_sp, insn;
865 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
866 return NULL_RTX;
868 t_label = CALL_EXPR_ARG (exp, 0);
869 t_save_area = CALL_EXPR_ARG (exp, 1);
871 r_label = expand_normal (t_label);
872 r_label = convert_memory_address (Pmode, r_label);
873 r_save_area = expand_normal (t_save_area);
874 r_save_area = convert_memory_address (Pmode, r_save_area);
875 /* Copy the address of the save location to a register just in case it was based
876 on the frame pointer. */
877 r_save_area = copy_to_reg (r_save_area);
878 r_fp = gen_rtx_MEM (Pmode, r_save_area);
879 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
880 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
882 crtl->has_nonlocal_goto = 1;
884 #ifdef HAVE_nonlocal_goto
885 /* ??? We no longer need to pass the static chain value, afaik. */
886 if (HAVE_nonlocal_goto)
887 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
888 else
889 #endif
891 r_label = copy_to_reg (r_label);
893 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
894 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
896 /* Restore frame pointer for containing function.
897 This sets the actual hard register used for the frame pointer
898 to the location of the function's incoming static chain info.
899 The non-local goto handler will then adjust it to contain the
900 proper value and reload the argument pointer, if needed. */
901 emit_move_insn (hard_frame_pointer_rtx, r_fp);
902 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
904 /* USE of hard_frame_pointer_rtx added for consistency;
905 not clear if really needed. */
906 emit_use (hard_frame_pointer_rtx);
907 emit_use (stack_pointer_rtx);
909 /* If the architecture is using a GP register, we must
910 conservatively assume that the target function makes use of it.
911 The prologue of functions with nonlocal gotos must therefore
912 initialize the GP register to the appropriate value, and we
913 must then make sure that this value is live at the point
914 of the jump. (Note that this doesn't necessarily apply
915 to targets with a nonlocal_goto pattern; they are free
916 to implement it in their own way. Note also that this is
917 a no-op if the GP register is a global invariant.) */
918 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
919 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
920 emit_use (pic_offset_table_rtx);
922 emit_indirect_jump (r_label);
925 /* Search backwards to the jump insn and mark it as a
926 non-local goto. */
927 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
929 if (JUMP_P (insn))
931 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
932 break;
934 else if (CALL_P (insn))
935 break;
938 return const0_rtx;
941 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
942 (not all will be used on all machines) that was passed to __builtin_setjmp.
943 It updates the stack pointer in that block to correspond to the current
944 stack pointer. */
946 static void
947 expand_builtin_update_setjmp_buf (rtx buf_addr)
949 enum machine_mode sa_mode = Pmode;
950 rtx stack_save;
953 #ifdef HAVE_save_stack_nonlocal
954 if (HAVE_save_stack_nonlocal)
955 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
956 #endif
957 #ifdef STACK_SAVEAREA_MODE
958 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
959 #endif
961 stack_save
962 = gen_rtx_MEM (sa_mode,
963 memory_address
964 (sa_mode,
965 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
967 #ifdef HAVE_setjmp
968 if (HAVE_setjmp)
969 emit_insn (gen_setjmp ());
970 #endif
972 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
975 /* Expand a call to __builtin_prefetch. For a target that does not support
976 data prefetch, evaluate the memory address argument in case it has side
977 effects. */
979 static void
980 expand_builtin_prefetch (tree exp)
982 tree arg0, arg1, arg2;
983 int nargs;
984 rtx op0, op1, op2;
986 if (!validate_arglist (exp, POINTER_TYPE, 0))
987 return;
989 arg0 = CALL_EXPR_ARG (exp, 0);
991 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
992 zero (read) and argument 2 (locality) defaults to 3 (high degree of
993 locality). */
994 nargs = call_expr_nargs (exp);
995 if (nargs > 1)
996 arg1 = CALL_EXPR_ARG (exp, 1);
997 else
998 arg1 = integer_zero_node;
999 if (nargs > 2)
1000 arg2 = CALL_EXPR_ARG (exp, 2);
1001 else
1002 arg2 = build_int_cst (NULL_TREE, 3);
1004 /* Argument 0 is an address. */
1005 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1007 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1008 if (TREE_CODE (arg1) != INTEGER_CST)
1010 error ("second argument to %<__builtin_prefetch%> must be a constant");
1011 arg1 = integer_zero_node;
1013 op1 = expand_normal (arg1);
1014 /* Argument 1 must be either zero or one. */
1015 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1017 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1018 " using zero");
1019 op1 = const0_rtx;
1022 /* Argument 2 (locality) must be a compile-time constant int. */
1023 if (TREE_CODE (arg2) != INTEGER_CST)
1025 error ("third argument to %<__builtin_prefetch%> must be a constant");
1026 arg2 = integer_zero_node;
1028 op2 = expand_normal (arg2);
1029 /* Argument 2 must be 0, 1, 2, or 3. */
1030 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1032 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1033 op2 = const0_rtx;
1036 #ifdef HAVE_prefetch
1037 if (HAVE_prefetch)
1039 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1040 (op0,
1041 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1042 || (GET_MODE (op0) != Pmode))
1044 op0 = convert_memory_address (Pmode, op0);
1045 op0 = force_reg (Pmode, op0);
1047 emit_insn (gen_prefetch (op0, op1, op2));
1049 #endif
1051 /* Don't do anything with direct references to volatile memory, but
1052 generate code to handle other side effects. */
1053 if (!MEM_P (op0) && side_effects_p (op0))
1054 emit_insn (op0);
1057 /* Get a MEM rtx for expression EXP which is the address of an operand
1058 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1059 the maximum length of the block of memory that might be accessed or
1060 NULL if unknown. */
1062 static rtx
1063 get_memory_rtx (tree exp, tree len)
1065 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1066 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1068 /* Get an expression we can use to find the attributes to assign to MEM.
1069 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1070 we can. First remove any nops. */
1071 while (CONVERT_EXPR_P (exp)
1072 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1073 exp = TREE_OPERAND (exp, 0);
1075 if (TREE_CODE (exp) == ADDR_EXPR)
1076 exp = TREE_OPERAND (exp, 0);
1077 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1078 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1079 else
1080 exp = NULL;
1082 /* Honor attributes derived from exp, except for the alias set
1083 (as builtin stringops may alias with anything) and the size
1084 (as stringops may access multiple array elements). */
1085 if (exp)
1087 set_mem_attributes (mem, exp, 0);
1089 /* Allow the string and memory builtins to overflow from one
1090 field into another, see http://gcc.gnu.org/PR23561.
1091 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1092 memory accessed by the string or memory builtin will fit
1093 within the field. */
1094 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1096 tree mem_expr = MEM_EXPR (mem);
1097 HOST_WIDE_INT offset = -1, length = -1;
1098 tree inner = exp;
1100 while (TREE_CODE (inner) == ARRAY_REF
1101 || CONVERT_EXPR_P (inner)
1102 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1103 || TREE_CODE (inner) == SAVE_EXPR)
1104 inner = TREE_OPERAND (inner, 0);
1106 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1108 if (MEM_OFFSET (mem)
1109 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1110 offset = INTVAL (MEM_OFFSET (mem));
1112 if (offset >= 0 && len && host_integerp (len, 0))
1113 length = tree_low_cst (len, 0);
1115 while (TREE_CODE (inner) == COMPONENT_REF)
1117 tree field = TREE_OPERAND (inner, 1);
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 /* Bitfields are generally not byte-addressable. */
1122 gcc_assert (!DECL_BIT_FIELD (field)
1123 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1124 % BITS_PER_UNIT) == 0
1125 && host_integerp (DECL_SIZE (field), 0)
1126 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1127 % BITS_PER_UNIT) == 0));
1129 /* If we can prove that the memory starting at XEXP (mem, 0) and
1130 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1131 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1132 fields without DECL_SIZE_UNIT like flexible array members. */
1133 if (length >= 0
1134 && DECL_SIZE_UNIT (field)
1135 && host_integerp (DECL_SIZE_UNIT (field), 0))
1137 HOST_WIDE_INT size
1138 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1139 if (offset <= size
1140 && length <= size
1141 && offset + length <= size)
1142 break;
1145 if (offset >= 0
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1149 / BITS_PER_UNIT;
1150 else
1152 offset = -1;
1153 length = -1;
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1161 offset = -1;
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1172 return mem;
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1200 static int
1201 apply_args_size (void)
1203 static int size = -1;
1204 int align;
1205 unsigned int regno;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1209 if (size < 0)
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1233 else
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1239 return size;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1245 static int
1246 apply_result_size (void)
1248 static int size = -1;
1249 int align, regno;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1253 if (size < 0)
1255 size = 0;
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1270 else
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1277 #endif
1279 return size;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1287 static rtx
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1292 rtx reg, mem;
1293 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1295 size = nelts = 0;
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1316 static rtx
1317 expand_builtin_apply_args_1 (void)
1319 rtx registers, tem;
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1355 NULL_RTX);
1356 #endif
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1379 saved. */
1381 static rtx
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1392 rtx temp;
1393 rtx seq;
1395 start_sequence ();
1396 temp = expand_builtin_apply_args_1 ();
1397 seq = get_insns ();
1398 end_sequence ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1405 function. */
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1409 return temp;
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1416 static rtx
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1437 #endif
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1441 manipulations. */
1442 do_pending_stack_adjust ();
1443 NO_DEFER_POP;
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1449 else
1450 #endif
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1460 else
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1462 #endif
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1470 apply_args_size ();
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1476 if (struct_value)
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1496 if (struct_value)
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1520 else
1521 #endif
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1525 rtx valreg = 0;
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1545 else
1546 #endif
1547 gcc_unreachable ();
1549 /* Find the CALL insn we just emitted, and attach the register usage
1550 information. */
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1558 else
1559 #endif
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1562 OK_DEFER_POP;
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1571 static void
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1576 rtx reg;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1588 emit_barrier ();
1589 return;
1591 #endif
1593 /* Restore the return value and note that each value is used. */
1594 size = 0;
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1605 emit_use (reg);
1606 call_fusage = get_insns ();
1607 end_sequence ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1615 of the function. */
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1638 case UNION_TYPE:
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1649 static rtx
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1676 static tree
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1683 switch (fn)
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1769 default:
1770 return NULL_TREE;
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1779 else
1780 return NULL_TREE;
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1785 tree
1786 mathfn_built_in (tree type, enum built_in_function fn)
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1793 errno to EDOM. */
1795 static void
1796 expand_errno_check (tree exp, rtx target)
1798 rtx lab = gen_label_rtx ();
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1803 0, lab);
1805 #ifdef TARGET_EDOM
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1811 #else
1812 rtx errno_rtx
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1814 #endif
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1816 emit_label (lab);
1817 return;
1819 #endif
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1826 NO_DEFER_POP;
1827 expand_call (exp, target, 0);
1828 OK_DEFER_POP;
1829 emit_label (lab);
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1838 static rtx
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1846 tree arg;
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1849 return NULL_RTX;
1851 arg = CALL_EXPR_ARG (exp, 0);
1853 switch (DECL_FUNCTION_CODE (fndecl))
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1858 break;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1897 break;
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1901 default:
1902 gcc_unreachable ();
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1909 errno_set = false;
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1914 target = gen_reg_rtx (mode);
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1923 start_sequence ();
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1929 if (target != 0)
1931 if (errno_set)
1932 expand_errno_check (exp, target);
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1936 end_sequence ();
1937 emit_insn (insns);
1938 return target;
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1944 end_sequence ();
1947 before_call = get_last_insn ();
1949 return expand_call (exp, target, target == const0_rtx);
1952 /* Expand a call to the builtin binary math functions (pow and atan2).
1953 Return NULL_RTX if a normal call should be emitted rather than expanding the
1954 function in-line. EXP is the expression that is a call to the builtin
1955 function; if convenient, the result should be placed in TARGET.
1956 SUBTARGET may be used as the target for computing one of EXP's
1957 operands. */
1959 static rtx
1960 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1962 optab builtin_optab;
1963 rtx op0, op1, insns;
1964 int op1_type = REAL_TYPE;
1965 tree fndecl = get_callee_fndecl (exp);
1966 tree arg0, arg1;
1967 enum machine_mode mode;
1968 bool errno_set = true;
1970 switch (DECL_FUNCTION_CODE (fndecl))
1972 CASE_FLT_FN (BUILT_IN_SCALBN):
1973 CASE_FLT_FN (BUILT_IN_SCALBLN):
1974 CASE_FLT_FN (BUILT_IN_LDEXP):
1975 op1_type = INTEGER_TYPE;
1976 default:
1977 break;
1980 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1981 return NULL_RTX;
1983 arg0 = CALL_EXPR_ARG (exp, 0);
1984 arg1 = CALL_EXPR_ARG (exp, 1);
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 CASE_FLT_FN (BUILT_IN_POW):
1989 builtin_optab = pow_optab; break;
1990 CASE_FLT_FN (BUILT_IN_ATAN2):
1991 builtin_optab = atan2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_SCALB):
1993 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1994 return 0;
1995 builtin_optab = scalb_optab; break;
1996 CASE_FLT_FN (BUILT_IN_SCALBN):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN):
1998 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1999 return 0;
2000 /* Fall through... */
2001 CASE_FLT_FN (BUILT_IN_LDEXP):
2002 builtin_optab = ldexp_optab; break;
2003 CASE_FLT_FN (BUILT_IN_FMOD):
2004 builtin_optab = fmod_optab; break;
2005 CASE_FLT_FN (BUILT_IN_REMAINDER):
2006 CASE_FLT_FN (BUILT_IN_DREM):
2007 builtin_optab = remainder_optab; break;
2008 default:
2009 gcc_unreachable ();
2012 /* Make a suitable register to place result in. */
2013 mode = TYPE_MODE (TREE_TYPE (exp));
2015 /* Before working hard, check whether the instruction is available. */
2016 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2017 return NULL_RTX;
2019 target = gen_reg_rtx (mode);
2021 if (! flag_errno_math || ! HONOR_NANS (mode))
2022 errno_set = false;
2024 /* Always stabilize the argument list. */
2025 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2026 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2028 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2029 op1 = expand_normal (arg1);
2031 start_sequence ();
2033 /* Compute into TARGET.
2034 Set TARGET to wherever the result comes back. */
2035 target = expand_binop (mode, builtin_optab, op0, op1,
2036 target, 0, OPTAB_DIRECT);
2038 /* If we were unable to expand via the builtin, stop the sequence
2039 (without outputting the insns) and call to the library function
2040 with the stabilized argument list. */
2041 if (target == 0)
2043 end_sequence ();
2044 return expand_call (exp, target, target == const0_rtx);
2047 if (errno_set)
2048 expand_errno_check (exp, target);
2050 /* Output the entire sequence. */
2051 insns = get_insns ();
2052 end_sequence ();
2053 emit_insn (insns);
2055 return target;
2058 /* Expand a call to the builtin sin and cos math functions.
2059 Return NULL_RTX if a normal call should be emitted rather than expanding the
2060 function in-line. EXP is the expression that is a call to the builtin
2061 function; if convenient, the result should be placed in TARGET.
2062 SUBTARGET may be used as the target for computing one of EXP's
2063 operands. */
2065 static rtx
2066 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2068 optab builtin_optab;
2069 rtx op0, insns;
2070 tree fndecl = get_callee_fndecl (exp);
2071 enum machine_mode mode;
2072 tree arg;
2074 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2075 return NULL_RTX;
2077 arg = CALL_EXPR_ARG (exp, 0);
2079 switch (DECL_FUNCTION_CODE (fndecl))
2081 CASE_FLT_FN (BUILT_IN_SIN):
2082 CASE_FLT_FN (BUILT_IN_COS):
2083 builtin_optab = sincos_optab; break;
2084 default:
2085 gcc_unreachable ();
2088 /* Make a suitable register to place result in. */
2089 mode = TYPE_MODE (TREE_TYPE (exp));
2091 /* Check if sincos insn is available, otherwise fallback
2092 to sin or cos insn. */
2093 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 switch (DECL_FUNCTION_CODE (fndecl))
2096 CASE_FLT_FN (BUILT_IN_SIN):
2097 builtin_optab = sin_optab; break;
2098 CASE_FLT_FN (BUILT_IN_COS):
2099 builtin_optab = cos_optab; break;
2100 default:
2101 gcc_unreachable ();
2104 /* Before working hard, check whether the instruction is available. */
2105 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2107 target = gen_reg_rtx (mode);
2109 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2110 need to expand the argument again. This way, we will not perform
2111 side-effects more the once. */
2112 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2116 start_sequence ();
2118 /* Compute into TARGET.
2119 Set TARGET to wherever the result comes back. */
2120 if (builtin_optab == sincos_optab)
2122 int result;
2124 switch (DECL_FUNCTION_CODE (fndecl))
2126 CASE_FLT_FN (BUILT_IN_SIN):
2127 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2128 break;
2129 CASE_FLT_FN (BUILT_IN_COS):
2130 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2131 break;
2132 default:
2133 gcc_unreachable ();
2135 gcc_assert (result);
2137 else
2139 target = expand_unop (mode, builtin_optab, op0, target, 0);
2142 if (target != 0)
2144 /* Output the entire sequence. */
2145 insns = get_insns ();
2146 end_sequence ();
2147 emit_insn (insns);
2148 return target;
2151 /* If we were unable to expand via the builtin, stop the sequence
2152 (without outputting the insns) and call to the library function
2153 with the stabilized argument list. */
2154 end_sequence ();
2157 target = expand_call (exp, target, target == const0_rtx);
2159 return target;
2162 /* Expand a call to one of the builtin math functions that operate on
2163 floating point argument and output an integer result (ilogb, isinf,
2164 isnan, etc).
2165 Return 0 if a normal call should be emitted rather than expanding the
2166 function in-line. EXP is the expression that is a call to the builtin
2167 function; if convenient, the result should be placed in TARGET.
2168 SUBTARGET may be used as the target for computing one of EXP's operands. */
2170 static rtx
2171 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2173 optab builtin_optab = 0;
2174 enum insn_code icode = CODE_FOR_nothing;
2175 rtx op0;
2176 tree fndecl = get_callee_fndecl (exp);
2177 enum machine_mode mode;
2178 bool errno_set = false;
2179 tree arg;
2181 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2182 return NULL_RTX;
2184 arg = CALL_EXPR_ARG (exp, 0);
2186 switch (DECL_FUNCTION_CODE (fndecl))
2188 CASE_FLT_FN (BUILT_IN_ILOGB):
2189 errno_set = true; builtin_optab = ilogb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_ISINF):
2191 builtin_optab = isinf_optab; break;
2192 case BUILT_IN_ISNORMAL:
2193 case BUILT_IN_ISFINITE:
2194 CASE_FLT_FN (BUILT_IN_FINITE):
2195 /* These builtins have no optabs (yet). */
2196 break;
2197 default:
2198 gcc_unreachable ();
2201 /* There's no easy way to detect the case we need to set EDOM. */
2202 if (flag_errno_math && errno_set)
2203 return NULL_RTX;
2205 /* Optab mode depends on the mode of the input argument. */
2206 mode = TYPE_MODE (TREE_TYPE (arg));
2208 if (builtin_optab)
2209 icode = optab_handler (builtin_optab, mode)->insn_code;
2211 /* Before working hard, check whether the instruction is available. */
2212 if (icode != CODE_FOR_nothing)
2214 /* Make a suitable register to place result in. */
2215 if (!target
2216 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2217 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2219 gcc_assert (insn_data[icode].operand[0].predicate
2220 (target, GET_MODE (target)));
2222 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2223 need to expand the argument again. This way, we will not perform
2224 side-effects more the once. */
2225 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2227 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2229 if (mode != GET_MODE (op0))
2230 op0 = convert_to_mode (mode, op0, 0);
2232 /* Compute into TARGET.
2233 Set TARGET to wherever the result comes back. */
2234 emit_unop_insn (icode, target, op0, UNKNOWN);
2235 return target;
2238 /* If there is no optab, try generic code. */
2239 switch (DECL_FUNCTION_CODE (fndecl))
2241 tree result;
2243 CASE_FLT_FN (BUILT_IN_ISINF):
2245 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2246 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2247 tree const type = TREE_TYPE (arg);
2248 REAL_VALUE_TYPE r;
2249 char buf[128];
2251 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2252 real_from_string (&r, buf);
2253 result = build_call_expr (isgr_fn, 2,
2254 fold_build1 (ABS_EXPR, type, arg),
2255 build_real (type, r));
2256 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 case BUILT_IN_ISFINITE:
2261 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2262 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2263 tree const type = TREE_TYPE (arg);
2264 REAL_VALUE_TYPE r;
2265 char buf[128];
2267 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2268 real_from_string (&r, buf);
2269 result = build_call_expr (isle_fn, 2,
2270 fold_build1 (ABS_EXPR, type, arg),
2271 build_real (type, r));
2272 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2274 case BUILT_IN_ISNORMAL:
2276 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2277 islessequal(fabs(x),DBL_MAX). */
2278 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2279 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2280 tree const type = TREE_TYPE (arg);
2281 REAL_VALUE_TYPE rmax, rmin;
2282 char buf[128];
2284 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2285 real_from_string (&rmax, buf);
2286 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2287 real_from_string (&rmin, buf);
2288 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2289 result = build_call_expr (isle_fn, 2, arg,
2290 build_real (type, rmax));
2291 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2292 build_call_expr (isge_fn, 2, arg,
2293 build_real (type, rmin)));
2294 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2296 default:
2297 break;
2300 target = expand_call (exp, target, target == const0_rtx);
2302 return target;
2305 /* Expand a call to the builtin sincos math function.
2306 Return NULL_RTX if a normal call should be emitted rather than expanding the
2307 function in-line. EXP is the expression that is a call to the builtin
2308 function. */
2310 static rtx
2311 expand_builtin_sincos (tree exp)
2313 rtx op0, op1, op2, target1, target2;
2314 enum machine_mode mode;
2315 tree arg, sinp, cosp;
2316 int result;
2318 if (!validate_arglist (exp, REAL_TYPE,
2319 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2320 return NULL_RTX;
2322 arg = CALL_EXPR_ARG (exp, 0);
2323 sinp = CALL_EXPR_ARG (exp, 1);
2324 cosp = CALL_EXPR_ARG (exp, 2);
2326 /* Make a suitable register to place result in. */
2327 mode = TYPE_MODE (TREE_TYPE (arg));
2329 /* Check if sincos insn is available, otherwise emit the call. */
2330 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2331 return NULL_RTX;
2333 target1 = gen_reg_rtx (mode);
2334 target2 = gen_reg_rtx (mode);
2336 op0 = expand_normal (arg);
2337 op1 = expand_normal (build_fold_indirect_ref (sinp));
2338 op2 = expand_normal (build_fold_indirect_ref (cosp));
2340 /* Compute into target1 and target2.
2341 Set TARGET to wherever the result comes back. */
2342 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2343 gcc_assert (result);
2345 /* Move target1 and target2 to the memory locations indicated
2346 by op1 and op2. */
2347 emit_move_insn (op1, target1);
2348 emit_move_insn (op2, target2);
2350 return const0_rtx;
2353 /* Expand a call to the internal cexpi builtin to the sincos math function.
2354 EXP is the expression that is a call to the builtin function; if convenient,
2355 the result should be placed in TARGET. SUBTARGET may be used as the target
2356 for computing one of EXP's operands. */
2358 static rtx
2359 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2361 tree fndecl = get_callee_fndecl (exp);
2362 tree arg, type;
2363 enum machine_mode mode;
2364 rtx op0, op1, op2;
2366 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2367 return NULL_RTX;
2369 arg = CALL_EXPR_ARG (exp, 0);
2370 type = TREE_TYPE (arg);
2371 mode = TYPE_MODE (TREE_TYPE (arg));
2373 /* Try expanding via a sincos optab, fall back to emitting a libcall
2374 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2375 is only generated from sincos, cexp or if we have either of them. */
2376 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2378 op1 = gen_reg_rtx (mode);
2379 op2 = gen_reg_rtx (mode);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 /* Compute into op1 and op2. */
2384 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2386 else if (TARGET_HAS_SINCOS)
2388 tree call, fn = NULL_TREE;
2389 tree top1, top2;
2390 rtx op1a, op2a;
2392 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2393 fn = built_in_decls[BUILT_IN_SINCOSF];
2394 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2395 fn = built_in_decls[BUILT_IN_SINCOS];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2397 fn = built_in_decls[BUILT_IN_SINCOSL];
2398 else
2399 gcc_unreachable ();
2401 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2402 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2403 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2404 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2405 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2406 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2408 /* Make sure not to fold the sincos call again. */
2409 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2410 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2411 call, 3, arg, top1, top2));
2413 else
2415 tree call, fn = NULL_TREE, narg;
2416 tree ctype = build_complex_type (type);
2418 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2419 fn = built_in_decls[BUILT_IN_CEXPF];
2420 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2421 fn = built_in_decls[BUILT_IN_CEXP];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2423 fn = built_in_decls[BUILT_IN_CEXPL];
2424 else
2425 gcc_unreachable ();
2427 /* If we don't have a decl for cexp create one. This is the
2428 friendliest fallback if the user calls __builtin_cexpi
2429 without full target C99 function support. */
2430 if (fn == NULL_TREE)
2432 tree fntype;
2433 const char *name = NULL;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2436 name = "cexpf";
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2438 name = "cexp";
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2440 name = "cexpl";
2442 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2443 fn = build_fn_decl (name, fntype);
2446 narg = fold_build2 (COMPLEX_EXPR, ctype,
2447 build_real (type, dconst0), arg);
2449 /* Make sure not to fold the cexp call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 return expand_expr (build_call_nary (ctype, call, 1, narg),
2452 target, VOIDmode, EXPAND_NORMAL);
2455 /* Now build the proper return type. */
2456 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2457 make_tree (TREE_TYPE (arg), op2),
2458 make_tree (TREE_TYPE (arg), op1)),
2459 target, VOIDmode, EXPAND_NORMAL);
2462 /* Expand a call to one of the builtin rounding functions gcc defines
2463 as an extension (lfloor and lceil). As these are gcc extensions we
2464 do not need to worry about setting errno to EDOM.
2465 If expanding via optab fails, lower expression to (int)(floor(x)).
2466 EXP is the expression that is a call to the builtin function;
2467 if convenient, the result should be placed in TARGET. SUBTARGET may
2468 be used as the target for computing one of EXP's operands. */
2470 static rtx
2471 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2473 convert_optab builtin_optab;
2474 rtx op0, insns, tmp;
2475 tree fndecl = get_callee_fndecl (exp);
2476 enum built_in_function fallback_fn;
2477 tree fallback_fndecl;
2478 enum machine_mode mode;
2479 tree arg;
2481 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2482 gcc_unreachable ();
2484 arg = CALL_EXPR_ARG (exp, 0);
2486 switch (DECL_FUNCTION_CODE (fndecl))
2488 CASE_FLT_FN (BUILT_IN_LCEIL):
2489 CASE_FLT_FN (BUILT_IN_LLCEIL):
2490 builtin_optab = lceil_optab;
2491 fallback_fn = BUILT_IN_CEIL;
2492 break;
2494 CASE_FLT_FN (BUILT_IN_LFLOOR):
2495 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2496 builtin_optab = lfloor_optab;
2497 fallback_fn = BUILT_IN_FLOOR;
2498 break;
2500 default:
2501 gcc_unreachable ();
2504 /* Make a suitable register to place result in. */
2505 mode = TYPE_MODE (TREE_TYPE (exp));
2507 target = gen_reg_rtx (mode);
2509 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2510 need to expand the argument again. This way, we will not perform
2511 side-effects more the once. */
2512 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2514 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2516 start_sequence ();
2518 /* Compute into TARGET. */
2519 if (expand_sfix_optab (target, op0, builtin_optab))
2521 /* Output the entire sequence. */
2522 insns = get_insns ();
2523 end_sequence ();
2524 emit_insn (insns);
2525 return target;
2528 /* If we were unable to expand via the builtin, stop the sequence
2529 (without outputting the insns). */
2530 end_sequence ();
2532 /* Fall back to floating point rounding optab. */
2533 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2535 /* For non-C99 targets we may end up without a fallback fndecl here
2536 if the user called __builtin_lfloor directly. In this case emit
2537 a call to the floor/ceil variants nevertheless. This should result
2538 in the best user experience for not full C99 targets. */
2539 if (fallback_fndecl == NULL_TREE)
2541 tree fntype;
2542 const char *name = NULL;
2544 switch (DECL_FUNCTION_CODE (fndecl))
2546 case BUILT_IN_LCEIL:
2547 case BUILT_IN_LLCEIL:
2548 name = "ceil";
2549 break;
2550 case BUILT_IN_LCEILF:
2551 case BUILT_IN_LLCEILF:
2552 name = "ceilf";
2553 break;
2554 case BUILT_IN_LCEILL:
2555 case BUILT_IN_LLCEILL:
2556 name = "ceill";
2557 break;
2558 case BUILT_IN_LFLOOR:
2559 case BUILT_IN_LLFLOOR:
2560 name = "floor";
2561 break;
2562 case BUILT_IN_LFLOORF:
2563 case BUILT_IN_LLFLOORF:
2564 name = "floorf";
2565 break;
2566 case BUILT_IN_LFLOORL:
2567 case BUILT_IN_LLFLOORL:
2568 name = "floorl";
2569 break;
2570 default:
2571 gcc_unreachable ();
2574 fntype = build_function_type_list (TREE_TYPE (arg),
2575 TREE_TYPE (arg), NULL_TREE);
2576 fallback_fndecl = build_fn_decl (name, fntype);
2579 exp = build_call_expr (fallback_fndecl, 1, arg);
2581 tmp = expand_normal (exp);
2583 /* Truncate the result of floating point optab to integer
2584 via expand_fix (). */
2585 target = gen_reg_rtx (mode);
2586 expand_fix (target, tmp, 0);
2588 return target;
2591 /* Expand a call to one of the builtin math functions doing integer
2592 conversion (lrint).
2593 Return 0 if a normal call should be emitted rather than expanding the
2594 function in-line. EXP is the expression that is a call to the builtin
2595 function; if convenient, the result should be placed in TARGET.
2596 SUBTARGET may be used as the target for computing one of EXP's operands. */
2598 static rtx
2599 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2601 convert_optab builtin_optab;
2602 rtx op0, insns;
2603 tree fndecl = get_callee_fndecl (exp);
2604 tree arg;
2605 enum machine_mode mode;
2607 /* There's no easy way to detect the case we need to set EDOM. */
2608 if (flag_errno_math)
2609 return NULL_RTX;
2611 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2612 gcc_unreachable ();
2614 arg = CALL_EXPR_ARG (exp, 0);
2616 switch (DECL_FUNCTION_CODE (fndecl))
2618 CASE_FLT_FN (BUILT_IN_LRINT):
2619 CASE_FLT_FN (BUILT_IN_LLRINT):
2620 builtin_optab = lrint_optab; break;
2621 CASE_FLT_FN (BUILT_IN_LROUND):
2622 CASE_FLT_FN (BUILT_IN_LLROUND):
2623 builtin_optab = lround_optab; break;
2624 default:
2625 gcc_unreachable ();
2628 /* Make a suitable register to place result in. */
2629 mode = TYPE_MODE (TREE_TYPE (exp));
2631 target = gen_reg_rtx (mode);
2633 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2634 need to expand the argument again. This way, we will not perform
2635 side-effects more the once. */
2636 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2638 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2640 start_sequence ();
2642 if (expand_sfix_optab (target, op0, builtin_optab))
2644 /* Output the entire sequence. */
2645 insns = get_insns ();
2646 end_sequence ();
2647 emit_insn (insns);
2648 return target;
2651 /* If we were unable to expand via the builtin, stop the sequence
2652 (without outputting the insns) and call to the library function
2653 with the stabilized argument list. */
2654 end_sequence ();
2656 target = expand_call (exp, target, target == const0_rtx);
2658 return target;
2661 /* To evaluate powi(x,n), the floating point value x raised to the
2662 constant integer exponent n, we use a hybrid algorithm that
2663 combines the "window method" with look-up tables. For an
2664 introduction to exponentiation algorithms and "addition chains",
2665 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2666 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2667 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2668 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2670 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2671 multiplications to inline before calling the system library's pow
2672 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2673 so this default never requires calling pow, powf or powl. */
2675 #ifndef POWI_MAX_MULTS
2676 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2677 #endif
2679 /* The size of the "optimal power tree" lookup table. All
2680 exponents less than this value are simply looked up in the
2681 powi_table below. This threshold is also used to size the
2682 cache of pseudo registers that hold intermediate results. */
2683 #define POWI_TABLE_SIZE 256
2685 /* The size, in bits of the window, used in the "window method"
2686 exponentiation algorithm. This is equivalent to a radix of
2687 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2688 #define POWI_WINDOW_SIZE 3
2690 /* The following table is an efficient representation of an
2691 "optimal power tree". For each value, i, the corresponding
2692 value, j, in the table states than an optimal evaluation
2693 sequence for calculating pow(x,i) can be found by evaluating
2694 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2695 100 integers is given in Knuth's "Seminumerical algorithms". */
2697 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2699 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2700 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2701 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2702 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2703 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2704 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2705 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2706 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2707 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2708 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2709 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2710 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2711 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2712 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2713 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2714 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2715 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2716 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2717 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2718 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2719 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2720 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2721 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2722 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2723 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2724 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2725 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2726 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2727 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2728 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2729 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2730 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2734 /* Return the number of multiplications required to calculate
2735 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2736 subroutine of powi_cost. CACHE is an array indicating
2737 which exponents have already been calculated. */
2739 static int
2740 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2742 /* If we've already calculated this exponent, then this evaluation
2743 doesn't require any additional multiplications. */
2744 if (cache[n])
2745 return 0;
2747 cache[n] = true;
2748 return powi_lookup_cost (n - powi_table[n], cache)
2749 + powi_lookup_cost (powi_table[n], cache) + 1;
2752 /* Return the number of multiplications required to calculate
2753 powi(x,n) for an arbitrary x, given the exponent N. This
2754 function needs to be kept in sync with expand_powi below. */
2756 static int
2757 powi_cost (HOST_WIDE_INT n)
2759 bool cache[POWI_TABLE_SIZE];
2760 unsigned HOST_WIDE_INT digit;
2761 unsigned HOST_WIDE_INT val;
2762 int result;
2764 if (n == 0)
2765 return 0;
2767 /* Ignore the reciprocal when calculating the cost. */
2768 val = (n < 0) ? -n : n;
2770 /* Initialize the exponent cache. */
2771 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2772 cache[1] = true;
2774 result = 0;
2776 while (val >= POWI_TABLE_SIZE)
2778 if (val & 1)
2780 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2781 result += powi_lookup_cost (digit, cache)
2782 + POWI_WINDOW_SIZE + 1;
2783 val >>= POWI_WINDOW_SIZE;
2785 else
2787 val >>= 1;
2788 result++;
2792 return result + powi_lookup_cost (val, cache);
2795 /* Recursive subroutine of expand_powi. This function takes the array,
2796 CACHE, of already calculated exponents and an exponent N and returns
2797 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2799 static rtx
2800 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2802 unsigned HOST_WIDE_INT digit;
2803 rtx target, result;
2804 rtx op0, op1;
2806 if (n < POWI_TABLE_SIZE)
2808 if (cache[n])
2809 return cache[n];
2811 target = gen_reg_rtx (mode);
2812 cache[n] = target;
2814 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2815 op1 = expand_powi_1 (mode, powi_table[n], cache);
2817 else if (n & 1)
2819 target = gen_reg_rtx (mode);
2820 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2821 op0 = expand_powi_1 (mode, n - digit, cache);
2822 op1 = expand_powi_1 (mode, digit, cache);
2824 else
2826 target = gen_reg_rtx (mode);
2827 op0 = expand_powi_1 (mode, n >> 1, cache);
2828 op1 = op0;
2831 result = expand_mult (mode, op0, op1, target, 0);
2832 if (result != target)
2833 emit_move_insn (target, result);
2834 return target;
2837 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2838 floating point operand in mode MODE, and N is the exponent. This
2839 function needs to be kept in sync with powi_cost above. */
2841 static rtx
2842 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2844 unsigned HOST_WIDE_INT val;
2845 rtx cache[POWI_TABLE_SIZE];
2846 rtx result;
2848 if (n == 0)
2849 return CONST1_RTX (mode);
2851 val = (n < 0) ? -n : n;
2853 memset (cache, 0, sizeof (cache));
2854 cache[1] = x;
2856 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2858 /* If the original exponent was negative, reciprocate the result. */
2859 if (n < 0)
2860 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2861 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2863 return result;
2866 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2867 a normal call should be emitted rather than expanding the function
2868 in-line. EXP is the expression that is a call to the builtin
2869 function; if convenient, the result should be placed in TARGET. */
2871 static rtx
2872 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2874 tree arg0, arg1;
2875 tree fn, narg0;
2876 tree type = TREE_TYPE (exp);
2877 REAL_VALUE_TYPE cint, c, c2;
2878 HOST_WIDE_INT n;
2879 rtx op, op2;
2880 enum machine_mode mode = TYPE_MODE (type);
2882 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2883 return NULL_RTX;
2885 arg0 = CALL_EXPR_ARG (exp, 0);
2886 arg1 = CALL_EXPR_ARG (exp, 1);
2888 if (TREE_CODE (arg1) != REAL_CST
2889 || TREE_OVERFLOW (arg1))
2890 return expand_builtin_mathfn_2 (exp, target, subtarget);
2892 /* Handle constant exponents. */
2894 /* For integer valued exponents we can expand to an optimal multiplication
2895 sequence using expand_powi. */
2896 c = TREE_REAL_CST (arg1);
2897 n = real_to_integer (&c);
2898 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2899 if (real_identical (&c, &cint)
2900 && ((n >= -1 && n <= 2)
2901 || (flag_unsafe_math_optimizations
2902 && !optimize_size
2903 && powi_cost (n) <= POWI_MAX_MULTS)))
2905 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2906 if (n != 1)
2908 op = force_reg (mode, op);
2909 op = expand_powi (op, mode, n);
2911 return op;
2914 narg0 = builtin_save_expr (arg0);
2916 /* If the exponent is not integer valued, check if it is half of an integer.
2917 In this case we can expand to sqrt (x) * x**(n/2). */
2918 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2919 if (fn != NULL_TREE)
2921 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2922 n = real_to_integer (&c2);
2923 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2924 if (real_identical (&c2, &cint)
2925 && ((flag_unsafe_math_optimizations
2926 && !optimize_size
2927 && powi_cost (n/2) <= POWI_MAX_MULTS)
2928 || n == 1))
2930 tree call_expr = build_call_expr (fn, 1, narg0);
2931 /* Use expand_expr in case the newly built call expression
2932 was folded to a non-call. */
2933 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2934 if (n != 1)
2936 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2937 op2 = force_reg (mode, op2);
2938 op2 = expand_powi (op2, mode, abs (n / 2));
2939 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2940 0, OPTAB_LIB_WIDEN);
2941 /* If the original exponent was negative, reciprocate the
2942 result. */
2943 if (n < 0)
2944 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2945 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2947 return op;
2951 /* Try if the exponent is a third of an integer. In this case
2952 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2953 different from pow (x, 1./3.) due to rounding and behavior
2954 with negative x we need to constrain this transformation to
2955 unsafe math and positive x or finite math. */
2956 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2957 if (fn != NULL_TREE
2958 && flag_unsafe_math_optimizations
2959 && (tree_expr_nonnegative_p (arg0)
2960 || !HONOR_NANS (mode)))
2962 REAL_VALUE_TYPE dconst3;
2963 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2964 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2965 real_round (&c2, mode, &c2);
2966 n = real_to_integer (&c2);
2967 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2968 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2969 real_convert (&c2, mode, &c2);
2970 if (real_identical (&c2, &c)
2971 && ((!optimize_size
2972 && powi_cost (n/3) <= POWI_MAX_MULTS)
2973 || n == 1))
2975 tree call_expr = build_call_expr (fn, 1,narg0);
2976 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2977 if (abs (n) % 3 == 2)
2978 op = expand_simple_binop (mode, MULT, op, op, op,
2979 0, OPTAB_LIB_WIDEN);
2980 if (n != 1)
2982 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2983 op2 = force_reg (mode, op2);
2984 op2 = expand_powi (op2, mode, abs (n / 3));
2985 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2986 0, OPTAB_LIB_WIDEN);
2987 /* If the original exponent was negative, reciprocate the
2988 result. */
2989 if (n < 0)
2990 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2991 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2993 return op;
2997 /* Fall back to optab expansion. */
2998 return expand_builtin_mathfn_2 (exp, target, subtarget);
3001 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3002 a normal call should be emitted rather than expanding the function
3003 in-line. EXP is the expression that is a call to the builtin
3004 function; if convenient, the result should be placed in TARGET. */
3006 static rtx
3007 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3009 tree arg0, arg1;
3010 rtx op0, op1;
3011 enum machine_mode mode;
3012 enum machine_mode mode2;
3014 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3015 return NULL_RTX;
3017 arg0 = CALL_EXPR_ARG (exp, 0);
3018 arg1 = CALL_EXPR_ARG (exp, 1);
3019 mode = TYPE_MODE (TREE_TYPE (exp));
3021 /* Handle constant power. */
3023 if (TREE_CODE (arg1) == INTEGER_CST
3024 && !TREE_OVERFLOW (arg1))
3026 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3028 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3029 Otherwise, check the number of multiplications required. */
3030 if ((TREE_INT_CST_HIGH (arg1) == 0
3031 || TREE_INT_CST_HIGH (arg1) == -1)
3032 && ((n >= -1 && n <= 2)
3033 || (! optimize_size
3034 && powi_cost (n) <= POWI_MAX_MULTS)))
3036 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3037 op0 = force_reg (mode, op0);
3038 return expand_powi (op0, mode, n);
3042 /* Emit a libcall to libgcc. */
3044 /* Mode of the 2nd argument must match that of an int. */
3045 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3047 if (target == NULL_RTX)
3048 target = gen_reg_rtx (mode);
3050 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3051 if (GET_MODE (op0) != mode)
3052 op0 = convert_to_mode (mode, op0, 0);
3053 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3054 if (GET_MODE (op1) != mode2)
3055 op1 = convert_to_mode (mode2, op1, 0);
3057 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3058 target, LCT_CONST, mode, 2,
3059 op0, mode, op1, mode2);
3061 return target;
3064 /* Expand expression EXP which is a call to the strlen builtin. Return
3065 NULL_RTX if we failed the caller should emit a normal call, otherwise
3066 try to get the result in TARGET, if convenient. */
3068 static rtx
3069 expand_builtin_strlen (tree exp, rtx target,
3070 enum machine_mode target_mode)
3072 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3073 return NULL_RTX;
3074 else
3076 rtx pat;
3077 tree len;
3078 tree src = CALL_EXPR_ARG (exp, 0);
3079 rtx result, src_reg, char_rtx, before_strlen;
3080 enum machine_mode insn_mode = target_mode, char_mode;
3081 enum insn_code icode = CODE_FOR_nothing;
3082 int align;
3084 /* If the length can be computed at compile-time, return it. */
3085 len = c_strlen (src, 0);
3086 if (len)
3087 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3089 /* If the length can be computed at compile-time and is constant
3090 integer, but there are side-effects in src, evaluate
3091 src for side-effects, then return len.
3092 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3093 can be optimized into: i++; x = 3; */
3094 len = c_strlen (src, 1);
3095 if (len && TREE_CODE (len) == INTEGER_CST)
3097 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3101 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3103 /* If SRC is not a pointer type, don't do this operation inline. */
3104 if (align == 0)
3105 return NULL_RTX;
3107 /* Bail out if we can't compute strlen in the right mode. */
3108 while (insn_mode != VOIDmode)
3110 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3111 if (icode != CODE_FOR_nothing)
3112 break;
3114 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3116 if (insn_mode == VOIDmode)
3117 return NULL_RTX;
3119 /* Make a place to write the result of the instruction. */
3120 result = target;
3121 if (! (result != 0
3122 && REG_P (result)
3123 && GET_MODE (result) == insn_mode
3124 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3125 result = gen_reg_rtx (insn_mode);
3127 /* Make a place to hold the source address. We will not expand
3128 the actual source until we are sure that the expansion will
3129 not fail -- there are trees that cannot be expanded twice. */
3130 src_reg = gen_reg_rtx (Pmode);
3132 /* Mark the beginning of the strlen sequence so we can emit the
3133 source operand later. */
3134 before_strlen = get_last_insn ();
3136 char_rtx = const0_rtx;
3137 char_mode = insn_data[(int) icode].operand[2].mode;
3138 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3139 char_mode))
3140 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3142 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3143 char_rtx, GEN_INT (align));
3144 if (! pat)
3145 return NULL_RTX;
3146 emit_insn (pat);
3148 /* Now that we are assured of success, expand the source. */
3149 start_sequence ();
3150 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3151 if (pat != src_reg)
3152 emit_move_insn (src_reg, pat);
3153 pat = get_insns ();
3154 end_sequence ();
3156 if (before_strlen)
3157 emit_insn_after (pat, before_strlen);
3158 else
3159 emit_insn_before (pat, get_insns ());
3161 /* Return the value in the proper mode for this function. */
3162 if (GET_MODE (result) == target_mode)
3163 target = result;
3164 else if (target != 0)
3165 convert_move (target, result, 0);
3166 else
3167 target = convert_to_mode (target_mode, result, 0);
3169 return target;
3173 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3174 caller should emit a normal call, otherwise try to get the result
3175 in TARGET, if convenient (and in mode MODE if that's convenient). */
3177 static rtx
3178 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3180 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3182 tree type = TREE_TYPE (exp);
3183 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3184 CALL_EXPR_ARG (exp, 1), type);
3185 if (result)
3186 return expand_expr (result, target, mode, EXPAND_NORMAL);
3188 return NULL_RTX;
3191 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3192 caller should emit a normal call, otherwise try to get the result
3193 in TARGET, if convenient (and in mode MODE if that's convenient). */
3195 static rtx
3196 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3198 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3200 tree type = TREE_TYPE (exp);
3201 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3202 CALL_EXPR_ARG (exp, 1), type);
3203 if (result)
3204 return expand_expr (result, target, mode, EXPAND_NORMAL);
3206 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3208 return NULL_RTX;
3211 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3212 caller should emit a normal call, otherwise try to get the result
3213 in TARGET, if convenient (and in mode MODE if that's convenient). */
3215 static rtx
3216 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3218 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 tree type = TREE_TYPE (exp);
3221 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3222 CALL_EXPR_ARG (exp, 1), type);
3223 if (result)
3224 return expand_expr (result, target, mode, EXPAND_NORMAL);
3226 return NULL_RTX;
3229 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3230 caller should emit a normal call, otherwise try to get the result
3231 in TARGET, if convenient (and in mode MODE if that's convenient). */
3233 static rtx
3234 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3236 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3238 tree type = TREE_TYPE (exp);
3239 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3240 CALL_EXPR_ARG (exp, 1), type);
3241 if (result)
3242 return expand_expr (result, target, mode, EXPAND_NORMAL);
3244 return NULL_RTX;
3247 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3248 bytes from constant string DATA + OFFSET and return it as target
3249 constant. */
3251 static rtx
3252 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3253 enum machine_mode mode)
3255 const char *str = (const char *) data;
3257 gcc_assert (offset >= 0
3258 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3259 <= strlen (str) + 1));
3261 return c_readstr (str + offset, mode);
3264 /* Expand a call EXP to the memcpy builtin.
3265 Return NULL_RTX if we failed, the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). */
3269 static rtx
3270 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3272 tree fndecl = get_callee_fndecl (exp);
3274 if (!validate_arglist (exp,
3275 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3276 return NULL_RTX;
3277 else
3279 tree dest = CALL_EXPR_ARG (exp, 0);
3280 tree src = CALL_EXPR_ARG (exp, 1);
3281 tree len = CALL_EXPR_ARG (exp, 2);
3282 const char *src_str;
3283 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3284 unsigned int dest_align
3285 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3286 rtx dest_mem, src_mem, dest_addr, len_rtx;
3287 tree result = fold_builtin_memory_op (dest, src, len,
3288 TREE_TYPE (TREE_TYPE (fndecl)),
3289 false, /*endp=*/0);
3290 HOST_WIDE_INT expected_size = -1;
3291 unsigned int expected_align = 0;
3293 if (result)
3295 while (TREE_CODE (result) == COMPOUND_EXPR)
3297 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3298 EXPAND_NORMAL);
3299 result = TREE_OPERAND (result, 1);
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3304 /* If DEST is not a pointer type, call the normal function. */
3305 if (dest_align == 0)
3306 return NULL_RTX;
3308 /* If either SRC is not a pointer type, don't do this
3309 operation in-line. */
3310 if (src_align == 0)
3311 return NULL_RTX;
3313 stringop_block_profile (exp, &expected_align, &expected_size);
3314 if (expected_align < dest_align)
3315 expected_align = dest_align;
3316 dest_mem = get_memory_rtx (dest, len);
3317 set_mem_align (dest_mem, dest_align);
3318 len_rtx = expand_normal (len);
3319 src_str = c_getstr (src);
3321 /* If SRC is a string constant and block move would be done
3322 by pieces, we can avoid loading the string from memory
3323 and only stored the computed constants. */
3324 if (src_str
3325 && GET_CODE (len_rtx) == CONST_INT
3326 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3327 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3328 CONST_CAST (char *, src_str),
3329 dest_align, false))
3331 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3332 builtin_memcpy_read_str,
3333 CONST_CAST (char *, src_str),
3334 dest_align, false, 0);
3335 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3336 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3337 return dest_mem;
3340 src_mem = get_memory_rtx (src, len);
3341 set_mem_align (src_mem, src_align);
3343 /* Copy word part most expediently. */
3344 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3345 CALL_EXPR_TAILCALL (exp)
3346 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3347 expected_align, expected_size);
3349 if (dest_addr == 0)
3351 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3352 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3354 return dest_addr;
3358 /* Expand a call EXP to the mempcpy builtin.
3359 Return NULL_RTX if we failed; the caller should emit a normal call,
3360 otherwise try to get the result in TARGET, if convenient (and in
3361 mode MODE if that's convenient). If ENDP is 0 return the
3362 destination pointer, if ENDP is 1 return the end pointer ala
3363 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3364 stpcpy. */
3366 static rtx
3367 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3369 if (!validate_arglist (exp,
3370 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3371 return NULL_RTX;
3372 else
3374 tree dest = CALL_EXPR_ARG (exp, 0);
3375 tree src = CALL_EXPR_ARG (exp, 1);
3376 tree len = CALL_EXPR_ARG (exp, 2);
3377 return expand_builtin_mempcpy_args (dest, src, len,
3378 TREE_TYPE (exp),
3379 target, mode, /*endp=*/ 1);
3383 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3384 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3385 so that this can also be called without constructing an actual CALL_EXPR.
3386 TYPE is the return type of the call. The other arguments and return value
3387 are the same as for expand_builtin_mempcpy. */
3389 static rtx
3390 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3391 rtx target, enum machine_mode mode, int endp)
3393 /* If return value is ignored, transform mempcpy into memcpy. */
3394 if (target == const0_rtx)
3396 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3398 if (!fn)
3399 return NULL_RTX;
3401 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3402 target, mode, EXPAND_NORMAL);
3404 else
3406 const char *src_str;
3407 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3408 unsigned int dest_align
3409 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3410 rtx dest_mem, src_mem, len_rtx;
3411 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3413 if (result)
3415 while (TREE_CODE (result) == COMPOUND_EXPR)
3417 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3418 EXPAND_NORMAL);
3419 result = TREE_OPERAND (result, 1);
3421 return expand_expr (result, target, mode, EXPAND_NORMAL);
3424 /* If either SRC or DEST is not a pointer type, don't do this
3425 operation in-line. */
3426 if (dest_align == 0 || src_align == 0)
3427 return NULL_RTX;
3429 /* If LEN is not constant, call the normal function. */
3430 if (! host_integerp (len, 1))
3431 return NULL_RTX;
3433 len_rtx = expand_normal (len);
3434 src_str = c_getstr (src);
3436 /* If SRC is a string constant and block move would be done
3437 by pieces, we can avoid loading the string from memory
3438 and only stored the computed constants. */
3439 if (src_str
3440 && GET_CODE (len_rtx) == CONST_INT
3441 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3442 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3443 CONST_CAST (char *, src_str),
3444 dest_align, false))
3446 dest_mem = get_memory_rtx (dest, len);
3447 set_mem_align (dest_mem, dest_align);
3448 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3449 builtin_memcpy_read_str,
3450 CONST_CAST (char *, src_str),
3451 dest_align, false, endp);
3452 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3453 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3454 return dest_mem;
3457 if (GET_CODE (len_rtx) == CONST_INT
3458 && can_move_by_pieces (INTVAL (len_rtx),
3459 MIN (dest_align, src_align)))
3461 dest_mem = get_memory_rtx (dest, len);
3462 set_mem_align (dest_mem, dest_align);
3463 src_mem = get_memory_rtx (src, len);
3464 set_mem_align (src_mem, src_align);
3465 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3466 MIN (dest_align, src_align), endp);
3467 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3468 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3469 return dest_mem;
3472 return NULL_RTX;
3476 /* Expand expression EXP, which is a call to the memmove builtin. Return
3477 NULL_RTX if we failed; the caller should emit a normal call. */
3479 static rtx
3480 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3482 if (!validate_arglist (exp,
3483 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3484 return NULL_RTX;
3485 else
3487 tree dest = CALL_EXPR_ARG (exp, 0);
3488 tree src = CALL_EXPR_ARG (exp, 1);
3489 tree len = CALL_EXPR_ARG (exp, 2);
3490 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3491 target, mode, ignore);
3495 /* Helper function to do the actual work for expand_builtin_memmove. The
3496 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3497 so that this can also be called without constructing an actual CALL_EXPR.
3498 TYPE is the return type of the call. The other arguments and return value
3499 are the same as for expand_builtin_memmove. */
3501 static rtx
3502 expand_builtin_memmove_args (tree dest, tree src, tree len,
3503 tree type, rtx target, enum machine_mode mode,
3504 int ignore)
3506 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3508 if (result)
3510 STRIP_TYPE_NOPS (result);
3511 while (TREE_CODE (result) == COMPOUND_EXPR)
3513 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3514 EXPAND_NORMAL);
3515 result = TREE_OPERAND (result, 1);
3517 return expand_expr (result, target, mode, EXPAND_NORMAL);
3520 /* Otherwise, call the normal function. */
3521 return NULL_RTX;
3524 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3525 NULL_RTX if we failed the caller should emit a normal call. */
3527 static rtx
3528 expand_builtin_bcopy (tree exp, int ignore)
3530 tree type = TREE_TYPE (exp);
3531 tree src, dest, size;
3533 if (!validate_arglist (exp,
3534 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3535 return NULL_RTX;
3537 src = CALL_EXPR_ARG (exp, 0);
3538 dest = CALL_EXPR_ARG (exp, 1);
3539 size = CALL_EXPR_ARG (exp, 2);
3541 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3542 This is done this way so that if it isn't expanded inline, we fall
3543 back to calling bcopy instead of memmove. */
3544 return expand_builtin_memmove_args (dest, src,
3545 fold_convert (sizetype, size),
3546 type, const0_rtx, VOIDmode,
3547 ignore);
3550 #ifndef HAVE_movstr
3551 # define HAVE_movstr 0
3552 # define CODE_FOR_movstr CODE_FOR_nothing
3553 #endif
3555 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3556 we failed, the caller should emit a normal call, otherwise try to
3557 get the result in TARGET, if convenient. If ENDP is 0 return the
3558 destination pointer, if ENDP is 1 return the end pointer ala
3559 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3560 stpcpy. */
3562 static rtx
3563 expand_movstr (tree dest, tree src, rtx target, int endp)
3565 rtx end;
3566 rtx dest_mem;
3567 rtx src_mem;
3568 rtx insn;
3569 const struct insn_data * data;
3571 if (!HAVE_movstr)
3572 return NULL_RTX;
3574 dest_mem = get_memory_rtx (dest, NULL);
3575 src_mem = get_memory_rtx (src, NULL);
3576 if (!endp)
3578 target = force_reg (Pmode, XEXP (dest_mem, 0));
3579 dest_mem = replace_equiv_address (dest_mem, target);
3580 end = gen_reg_rtx (Pmode);
3582 else
3584 if (target == 0 || target == const0_rtx)
3586 end = gen_reg_rtx (Pmode);
3587 if (target == 0)
3588 target = end;
3590 else
3591 end = target;
3594 data = insn_data + CODE_FOR_movstr;
3596 if (data->operand[0].mode != VOIDmode)
3597 end = gen_lowpart (data->operand[0].mode, end);
3599 insn = data->genfun (end, dest_mem, src_mem);
3601 gcc_assert (insn);
3603 emit_insn (insn);
3605 /* movstr is supposed to set end to the address of the NUL
3606 terminator. If the caller requested a mempcpy-like return value,
3607 adjust it. */
3608 if (endp == 1 && target != const0_rtx)
3610 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3611 emit_move_insn (target, force_operand (tem, NULL_RTX));
3614 return target;
3617 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3618 NULL_RTX if we failed the caller should emit a normal call, otherwise
3619 try to get the result in TARGET, if convenient (and in mode MODE if that's
3620 convenient). */
3622 static rtx
3623 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3625 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3627 tree dest = CALL_EXPR_ARG (exp, 0);
3628 tree src = CALL_EXPR_ARG (exp, 1);
3629 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3631 return NULL_RTX;
3634 /* Helper function to do the actual work for expand_builtin_strcpy. The
3635 arguments to the builtin_strcpy call DEST and SRC are broken out
3636 so that this can also be called without constructing an actual CALL_EXPR.
3637 The other arguments and return value are the same as for
3638 expand_builtin_strcpy. */
3640 static rtx
3641 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3642 rtx target, enum machine_mode mode)
3644 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3645 if (result)
3646 return expand_expr (result, target, mode, EXPAND_NORMAL);
3647 return expand_movstr (dest, src, target, /*endp=*/0);
3651 /* Expand a call EXP to the stpcpy builtin.
3652 Return NULL_RTX if we failed the caller should emit a normal call,
3653 otherwise try to get the result in TARGET, if convenient (and in
3654 mode MODE if that's convenient). */
3656 static rtx
3657 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3659 tree dst, src;
3661 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3662 return NULL_RTX;
3664 dst = CALL_EXPR_ARG (exp, 0);
3665 src = CALL_EXPR_ARG (exp, 1);
3667 /* If return value is ignored, transform stpcpy into strcpy. */
3668 if (target == const0_rtx)
3670 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3671 if (!fn)
3672 return NULL_RTX;
3674 return expand_expr (build_call_expr (fn, 2, dst, src),
3675 target, mode, EXPAND_NORMAL);
3677 else
3679 tree len, lenp1;
3680 rtx ret;
3682 /* Ensure we get an actual string whose length can be evaluated at
3683 compile-time, not an expression containing a string. This is
3684 because the latter will potentially produce pessimized code
3685 when used to produce the return value. */
3686 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3687 return expand_movstr (dst, src, target, /*endp=*/2);
3689 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3690 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3691 target, mode, /*endp=*/2);
3693 if (ret)
3694 return ret;
3696 if (TREE_CODE (len) == INTEGER_CST)
3698 rtx len_rtx = expand_normal (len);
3700 if (GET_CODE (len_rtx) == CONST_INT)
3702 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3703 dst, src, target, mode);
3705 if (ret)
3707 if (! target)
3709 if (mode != VOIDmode)
3710 target = gen_reg_rtx (mode);
3711 else
3712 target = gen_reg_rtx (GET_MODE (ret));
3714 if (GET_MODE (target) != GET_MODE (ret))
3715 ret = gen_lowpart (GET_MODE (target), ret);
3717 ret = plus_constant (ret, INTVAL (len_rtx));
3718 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3719 gcc_assert (ret);
3721 return target;
3726 return expand_movstr (dst, src, target, /*endp=*/2);
3730 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3731 bytes from constant string DATA + OFFSET and return it as target
3732 constant. */
3735 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3736 enum machine_mode mode)
3738 const char *str = (const char *) data;
3740 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3741 return const0_rtx;
3743 return c_readstr (str + offset, mode);
3746 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3747 NULL_RTX if we failed the caller should emit a normal call. */
3749 static rtx
3750 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3752 tree fndecl = get_callee_fndecl (exp);
3754 if (validate_arglist (exp,
3755 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3757 tree dest = CALL_EXPR_ARG (exp, 0);
3758 tree src = CALL_EXPR_ARG (exp, 1);
3759 tree len = CALL_EXPR_ARG (exp, 2);
3760 tree slen = c_strlen (src, 1);
3761 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3763 if (result)
3765 while (TREE_CODE (result) == COMPOUND_EXPR)
3767 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3768 EXPAND_NORMAL);
3769 result = TREE_OPERAND (result, 1);
3771 return expand_expr (result, target, mode, EXPAND_NORMAL);
3774 /* We must be passed a constant len and src parameter. */
3775 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3776 return NULL_RTX;
3778 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3780 /* We're required to pad with trailing zeros if the requested
3781 len is greater than strlen(s2)+1. In that case try to
3782 use store_by_pieces, if it fails, punt. */
3783 if (tree_int_cst_lt (slen, len))
3785 unsigned int dest_align
3786 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3787 const char *p = c_getstr (src);
3788 rtx dest_mem;
3790 if (!p || dest_align == 0 || !host_integerp (len, 1)
3791 || !can_store_by_pieces (tree_low_cst (len, 1),
3792 builtin_strncpy_read_str,
3793 CONST_CAST (char *, p),
3794 dest_align, false))
3795 return NULL_RTX;
3797 dest_mem = get_memory_rtx (dest, len);
3798 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3799 builtin_strncpy_read_str,
3800 CONST_CAST (char *, p), dest_align, false, 0);
3801 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3802 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3803 return dest_mem;
3806 return NULL_RTX;
3809 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3810 bytes from constant string DATA + OFFSET and return it as target
3811 constant. */
3814 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3815 enum machine_mode mode)
3817 const char *c = (const char *) data;
3818 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3820 memset (p, *c, GET_MODE_SIZE (mode));
3822 return c_readstr (p, mode);
3825 /* Callback routine for store_by_pieces. Return the RTL of a register
3826 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3827 char value given in the RTL register data. For example, if mode is
3828 4 bytes wide, return the RTL for 0x01010101*data. */
3830 static rtx
3831 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3832 enum machine_mode mode)
3834 rtx target, coeff;
3835 size_t size;
3836 char *p;
3838 size = GET_MODE_SIZE (mode);
3839 if (size == 1)
3840 return (rtx) data;
3842 p = XALLOCAVEC (char, size);
3843 memset (p, 1, size);
3844 coeff = c_readstr (p, mode);
3846 target = convert_to_mode (mode, (rtx) data, 1);
3847 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3848 return force_reg (mode, target);
3851 /* Expand expression EXP, which is a call to the memset builtin. Return
3852 NULL_RTX if we failed the caller should emit a normal call, otherwise
3853 try to get the result in TARGET, if convenient (and in mode MODE if that's
3854 convenient). */
3856 static rtx
3857 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3859 if (!validate_arglist (exp,
3860 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3861 return NULL_RTX;
3862 else
3864 tree dest = CALL_EXPR_ARG (exp, 0);
3865 tree val = CALL_EXPR_ARG (exp, 1);
3866 tree len = CALL_EXPR_ARG (exp, 2);
3867 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3871 /* Helper function to do the actual work for expand_builtin_memset. The
3872 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3873 so that this can also be called without constructing an actual CALL_EXPR.
3874 The other arguments and return value are the same as for
3875 expand_builtin_memset. */
3877 static rtx
3878 expand_builtin_memset_args (tree dest, tree val, tree len,
3879 rtx target, enum machine_mode mode, tree orig_exp)
3881 tree fndecl, fn;
3882 enum built_in_function fcode;
3883 char c;
3884 unsigned int dest_align;
3885 rtx dest_mem, dest_addr, len_rtx;
3886 HOST_WIDE_INT expected_size = -1;
3887 unsigned int expected_align = 0;
3889 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3891 /* If DEST is not a pointer type, don't do this operation in-line. */
3892 if (dest_align == 0)
3893 return NULL_RTX;
3895 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3896 if (expected_align < dest_align)
3897 expected_align = dest_align;
3899 /* If the LEN parameter is zero, return DEST. */
3900 if (integer_zerop (len))
3902 /* Evaluate and ignore VAL in case it has side-effects. */
3903 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3904 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3907 /* Stabilize the arguments in case we fail. */
3908 dest = builtin_save_expr (dest);
3909 val = builtin_save_expr (val);
3910 len = builtin_save_expr (len);
3912 len_rtx = expand_normal (len);
3913 dest_mem = get_memory_rtx (dest, len);
3915 if (TREE_CODE (val) != INTEGER_CST)
3917 rtx val_rtx;
3919 val_rtx = expand_normal (val);
3920 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3921 val_rtx, 0);
3923 /* Assume that we can memset by pieces if we can store
3924 * the coefficients by pieces (in the required modes).
3925 * We can't pass builtin_memset_gen_str as that emits RTL. */
3926 c = 1;
3927 if (host_integerp (len, 1)
3928 && can_store_by_pieces (tree_low_cst (len, 1),
3929 builtin_memset_read_str, &c, dest_align,
3930 true))
3932 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3933 val_rtx);
3934 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3935 builtin_memset_gen_str, val_rtx, dest_align,
3936 true, 0);
3938 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3939 dest_align, expected_align,
3940 expected_size))
3941 goto do_libcall;
3943 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3945 return dest_mem;
3948 if (target_char_cast (val, &c))
3949 goto do_libcall;
3951 if (c)
3953 if (host_integerp (len, 1)
3954 && can_store_by_pieces (tree_low_cst (len, 1),
3955 builtin_memset_read_str, &c, dest_align,
3956 true))
3957 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3958 builtin_memset_read_str, &c, dest_align, true, 0);
3959 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3960 dest_align, expected_align,
3961 expected_size))
3962 goto do_libcall;
3964 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3965 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3966 return dest_mem;
3969 set_mem_align (dest_mem, dest_align);
3970 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3971 CALL_EXPR_TAILCALL (orig_exp)
3972 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3973 expected_align, expected_size);
3975 if (dest_addr == 0)
3977 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3978 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3981 return dest_addr;
3983 do_libcall:
3984 fndecl = get_callee_fndecl (orig_exp);
3985 fcode = DECL_FUNCTION_CODE (fndecl);
3986 if (fcode == BUILT_IN_MEMSET)
3987 fn = build_call_expr (fndecl, 3, dest, val, len);
3988 else if (fcode == BUILT_IN_BZERO)
3989 fn = build_call_expr (fndecl, 2, dest, len);
3990 else
3991 gcc_unreachable ();
3992 if (TREE_CODE (fn) == CALL_EXPR)
3993 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3994 return expand_call (fn, target, target == const0_rtx);
3997 /* Expand expression EXP, which is a call to the bzero builtin. Return
3998 NULL_RTX if we failed the caller should emit a normal call. */
4000 static rtx
4001 expand_builtin_bzero (tree exp)
4003 tree dest, size;
4005 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4006 return NULL_RTX;
4008 dest = CALL_EXPR_ARG (exp, 0);
4009 size = CALL_EXPR_ARG (exp, 1);
4011 /* New argument list transforming bzero(ptr x, int y) to
4012 memset(ptr x, int 0, size_t y). This is done this way
4013 so that if it isn't expanded inline, we fallback to
4014 calling bzero instead of memset. */
4016 return expand_builtin_memset_args (dest, integer_zero_node,
4017 fold_convert (sizetype, size),
4018 const0_rtx, VOIDmode, exp);
4021 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4022 caller should emit a normal call, otherwise try to get the result
4023 in TARGET, if convenient (and in mode MODE if that's convenient). */
4025 static rtx
4026 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4028 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4029 INTEGER_TYPE, VOID_TYPE))
4031 tree type = TREE_TYPE (exp);
4032 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4033 CALL_EXPR_ARG (exp, 1),
4034 CALL_EXPR_ARG (exp, 2), type);
4035 if (result)
4036 return expand_expr (result, target, mode, EXPAND_NORMAL);
4038 return NULL_RTX;
4041 /* Expand expression EXP, which is a call to the memcmp built-in function.
4042 Return NULL_RTX if we failed and the
4043 caller should emit a normal call, otherwise try to get the result in
4044 TARGET, if convenient (and in mode MODE, if that's convenient). */
4046 static rtx
4047 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4049 if (!validate_arglist (exp,
4050 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052 else
4054 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4055 CALL_EXPR_ARG (exp, 1),
4056 CALL_EXPR_ARG (exp, 2));
4057 if (result)
4058 return expand_expr (result, target, mode, EXPAND_NORMAL);
4061 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4063 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4064 rtx result;
4065 rtx insn;
4066 tree arg1 = CALL_EXPR_ARG (exp, 0);
4067 tree arg2 = CALL_EXPR_ARG (exp, 1);
4068 tree len = CALL_EXPR_ARG (exp, 2);
4070 int arg1_align
4071 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4072 int arg2_align
4073 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4074 enum machine_mode insn_mode;
4076 #ifdef HAVE_cmpmemsi
4077 if (HAVE_cmpmemsi)
4078 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4079 else
4080 #endif
4081 #ifdef HAVE_cmpstrnsi
4082 if (HAVE_cmpstrnsi)
4083 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4084 else
4085 #endif
4086 return NULL_RTX;
4088 /* If we don't have POINTER_TYPE, call the function. */
4089 if (arg1_align == 0 || arg2_align == 0)
4090 return NULL_RTX;
4092 /* Make a place to write the result of the instruction. */
4093 result = target;
4094 if (! (result != 0
4095 && REG_P (result) && GET_MODE (result) == insn_mode
4096 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4097 result = gen_reg_rtx (insn_mode);
4099 arg1_rtx = get_memory_rtx (arg1, len);
4100 arg2_rtx = get_memory_rtx (arg2, len);
4101 arg3_rtx = expand_normal (len);
4103 /* Set MEM_SIZE as appropriate. */
4104 if (GET_CODE (arg3_rtx) == CONST_INT)
4106 set_mem_size (arg1_rtx, arg3_rtx);
4107 set_mem_size (arg2_rtx, arg3_rtx);
4110 #ifdef HAVE_cmpmemsi
4111 if (HAVE_cmpmemsi)
4112 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4113 GEN_INT (MIN (arg1_align, arg2_align)));
4114 else
4115 #endif
4116 #ifdef HAVE_cmpstrnsi
4117 if (HAVE_cmpstrnsi)
4118 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4119 GEN_INT (MIN (arg1_align, arg2_align)));
4120 else
4121 #endif
4122 gcc_unreachable ();
4124 if (insn)
4125 emit_insn (insn);
4126 else
4127 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4128 TYPE_MODE (integer_type_node), 3,
4129 XEXP (arg1_rtx, 0), Pmode,
4130 XEXP (arg2_rtx, 0), Pmode,
4131 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4132 TYPE_UNSIGNED (sizetype)),
4133 TYPE_MODE (sizetype));
4135 /* Return the value in the proper mode for this function. */
4136 mode = TYPE_MODE (TREE_TYPE (exp));
4137 if (GET_MODE (result) == mode)
4138 return result;
4139 else if (target != 0)
4141 convert_move (target, result, 0);
4142 return target;
4144 else
4145 return convert_to_mode (mode, result, 0);
4147 #endif
4149 return NULL_RTX;
4152 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4153 if we failed the caller should emit a normal call, otherwise try to get
4154 the result in TARGET, if convenient. */
4156 static rtx
4157 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4159 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4160 return NULL_RTX;
4161 else
4163 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4164 CALL_EXPR_ARG (exp, 1));
4165 if (result)
4166 return expand_expr (result, target, mode, EXPAND_NORMAL);
4169 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4170 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4171 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4173 rtx arg1_rtx, arg2_rtx;
4174 rtx result, insn = NULL_RTX;
4175 tree fndecl, fn;
4176 tree arg1 = CALL_EXPR_ARG (exp, 0);
4177 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 int arg1_align
4180 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4181 int arg2_align
4182 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4184 /* If we don't have POINTER_TYPE, call the function. */
4185 if (arg1_align == 0 || arg2_align == 0)
4186 return NULL_RTX;
4188 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4189 arg1 = builtin_save_expr (arg1);
4190 arg2 = builtin_save_expr (arg2);
4192 arg1_rtx = get_memory_rtx (arg1, NULL);
4193 arg2_rtx = get_memory_rtx (arg2, NULL);
4195 #ifdef HAVE_cmpstrsi
4196 /* Try to call cmpstrsi. */
4197 if (HAVE_cmpstrsi)
4199 enum machine_mode insn_mode
4200 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4202 /* Make a place to write the result of the instruction. */
4203 result = target;
4204 if (! (result != 0
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4209 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4210 GEN_INT (MIN (arg1_align, arg2_align)));
4212 #endif
4213 #ifdef HAVE_cmpstrnsi
4214 /* Try to determine at least one length and call cmpstrnsi. */
4215 if (!insn && HAVE_cmpstrnsi)
4217 tree len;
4218 rtx arg3_rtx;
4220 enum machine_mode insn_mode
4221 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4222 tree len1 = c_strlen (arg1, 1);
4223 tree len2 = c_strlen (arg2, 1);
4225 if (len1)
4226 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4227 if (len2)
4228 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4230 /* If we don't have a constant length for the first, use the length
4231 of the second, if we know it. We don't require a constant for
4232 this case; some cost analysis could be done if both are available
4233 but neither is constant. For now, assume they're equally cheap,
4234 unless one has side effects. If both strings have constant lengths,
4235 use the smaller. */
4237 if (!len1)
4238 len = len2;
4239 else if (!len2)
4240 len = len1;
4241 else if (TREE_SIDE_EFFECTS (len1))
4242 len = len2;
4243 else if (TREE_SIDE_EFFECTS (len2))
4244 len = len1;
4245 else if (TREE_CODE (len1) != INTEGER_CST)
4246 len = len2;
4247 else if (TREE_CODE (len2) != INTEGER_CST)
4248 len = len1;
4249 else if (tree_int_cst_lt (len1, len2))
4250 len = len1;
4251 else
4252 len = len2;
4254 /* If both arguments have side effects, we cannot optimize. */
4255 if (!len || TREE_SIDE_EFFECTS (len))
4256 goto do_libcall;
4258 arg3_rtx = expand_normal (len);
4260 /* Make a place to write the result of the instruction. */
4261 result = target;
4262 if (! (result != 0
4263 && REG_P (result) && GET_MODE (result) == insn_mode
4264 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4265 result = gen_reg_rtx (insn_mode);
4267 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4268 GEN_INT (MIN (arg1_align, arg2_align)));
4270 #endif
4272 if (insn)
4274 emit_insn (insn);
4276 /* Return the value in the proper mode for this function. */
4277 mode = TYPE_MODE (TREE_TYPE (exp));
4278 if (GET_MODE (result) == mode)
4279 return result;
4280 if (target == 0)
4281 return convert_to_mode (mode, result, 0);
4282 convert_move (target, result, 0);
4283 return target;
4286 /* Expand the library call ourselves using a stabilized argument
4287 list to avoid re-evaluating the function's arguments twice. */
4288 #ifdef HAVE_cmpstrnsi
4289 do_libcall:
4290 #endif
4291 fndecl = get_callee_fndecl (exp);
4292 fn = build_call_expr (fndecl, 2, arg1, arg2);
4293 if (TREE_CODE (fn) == CALL_EXPR)
4294 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4295 return expand_call (fn, target, target == const0_rtx);
4297 #endif
4298 return NULL_RTX;
4301 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4302 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4303 the result in TARGET, if convenient. */
4305 static rtx
4306 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4308 if (!validate_arglist (exp,
4309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4310 return NULL_RTX;
4311 else
4313 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4314 CALL_EXPR_ARG (exp, 1),
4315 CALL_EXPR_ARG (exp, 2));
4316 if (result)
4317 return expand_expr (result, target, mode, EXPAND_NORMAL);
4320 /* If c_strlen can determine an expression for one of the string
4321 lengths, and it doesn't have side effects, then emit cmpstrnsi
4322 using length MIN(strlen(string)+1, arg3). */
4323 #ifdef HAVE_cmpstrnsi
4324 if (HAVE_cmpstrnsi)
4326 tree len, len1, len2;
4327 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4328 rtx result, insn;
4329 tree fndecl, fn;
4330 tree arg1 = CALL_EXPR_ARG (exp, 0);
4331 tree arg2 = CALL_EXPR_ARG (exp, 1);
4332 tree arg3 = CALL_EXPR_ARG (exp, 2);
4334 int arg1_align
4335 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4336 int arg2_align
4337 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4338 enum machine_mode insn_mode
4339 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4341 len1 = c_strlen (arg1, 1);
4342 len2 = c_strlen (arg2, 1);
4344 if (len1)
4345 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4346 if (len2)
4347 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4349 /* If we don't have a constant length for the first, use the length
4350 of the second, if we know it. We don't require a constant for
4351 this case; some cost analysis could be done if both are available
4352 but neither is constant. For now, assume they're equally cheap,
4353 unless one has side effects. If both strings have constant lengths,
4354 use the smaller. */
4356 if (!len1)
4357 len = len2;
4358 else if (!len2)
4359 len = len1;
4360 else if (TREE_SIDE_EFFECTS (len1))
4361 len = len2;
4362 else if (TREE_SIDE_EFFECTS (len2))
4363 len = len1;
4364 else if (TREE_CODE (len1) != INTEGER_CST)
4365 len = len2;
4366 else if (TREE_CODE (len2) != INTEGER_CST)
4367 len = len1;
4368 else if (tree_int_cst_lt (len1, len2))
4369 len = len1;
4370 else
4371 len = len2;
4373 /* If both arguments have side effects, we cannot optimize. */
4374 if (!len || TREE_SIDE_EFFECTS (len))
4375 return NULL_RTX;
4377 /* The actual new length parameter is MIN(len,arg3). */
4378 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4379 fold_convert (TREE_TYPE (len), arg3));
4381 /* If we don't have POINTER_TYPE, call the function. */
4382 if (arg1_align == 0 || arg2_align == 0)
4383 return NULL_RTX;
4385 /* Make a place to write the result of the instruction. */
4386 result = target;
4387 if (! (result != 0
4388 && REG_P (result) && GET_MODE (result) == insn_mode
4389 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4390 result = gen_reg_rtx (insn_mode);
4392 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4393 arg1 = builtin_save_expr (arg1);
4394 arg2 = builtin_save_expr (arg2);
4395 len = builtin_save_expr (len);
4397 arg1_rtx = get_memory_rtx (arg1, len);
4398 arg2_rtx = get_memory_rtx (arg2, len);
4399 arg3_rtx = expand_normal (len);
4400 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4401 GEN_INT (MIN (arg1_align, arg2_align)));
4402 if (insn)
4404 emit_insn (insn);
4406 /* Return the value in the proper mode for this function. */
4407 mode = TYPE_MODE (TREE_TYPE (exp));
4408 if (GET_MODE (result) == mode)
4409 return result;
4410 if (target == 0)
4411 return convert_to_mode (mode, result, 0);
4412 convert_move (target, result, 0);
4413 return target;
4416 /* Expand the library call ourselves using a stabilized argument
4417 list to avoid re-evaluating the function's arguments twice. */
4418 fndecl = get_callee_fndecl (exp);
4419 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4420 if (TREE_CODE (fn) == CALL_EXPR)
4421 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4422 return expand_call (fn, target, target == const0_rtx);
4424 #endif
4425 return NULL_RTX;
4428 /* Expand expression EXP, which is a call to the strcat builtin.
4429 Return NULL_RTX if we failed the caller should emit a normal call,
4430 otherwise try to get the result in TARGET, if convenient. */
4432 static rtx
4433 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4435 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4436 return NULL_RTX;
4437 else
4439 tree dst = CALL_EXPR_ARG (exp, 0);
4440 tree src = CALL_EXPR_ARG (exp, 1);
4441 const char *p = c_getstr (src);
4443 /* If the string length is zero, return the dst parameter. */
4444 if (p && *p == '\0')
4445 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4447 if (!optimize_size)
4449 /* See if we can store by pieces into (dst + strlen(dst)). */
4450 tree newsrc, newdst,
4451 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4452 rtx insns;
4454 /* Stabilize the argument list. */
4455 newsrc = builtin_save_expr (src);
4456 dst = builtin_save_expr (dst);
4458 start_sequence ();
4460 /* Create strlen (dst). */
4461 newdst = build_call_expr (strlen_fn, 1, dst);
4462 /* Create (dst p+ strlen (dst)). */
4464 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4465 newdst = builtin_save_expr (newdst);
4467 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4469 end_sequence (); /* Stop sequence. */
4470 return NULL_RTX;
4473 /* Output the entire sequence. */
4474 insns = get_insns ();
4475 end_sequence ();
4476 emit_insn (insns);
4478 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4481 return NULL_RTX;
4485 /* Expand expression EXP, which is a call to the strncat builtin.
4486 Return NULL_RTX if we failed the caller should emit a normal call,
4487 otherwise try to get the result in TARGET, if convenient. */
4489 static rtx
4490 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4492 if (validate_arglist (exp,
4493 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4495 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4496 CALL_EXPR_ARG (exp, 1),
4497 CALL_EXPR_ARG (exp, 2));
4498 if (result)
4499 return expand_expr (result, target, mode, EXPAND_NORMAL);
4501 return NULL_RTX;
4504 /* Expand expression EXP, which is a call to the strspn builtin.
4505 Return NULL_RTX if we failed the caller should emit a normal call,
4506 otherwise try to get the result in TARGET, if convenient. */
4508 static rtx
4509 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4511 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4513 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4514 CALL_EXPR_ARG (exp, 1));
4515 if (result)
4516 return expand_expr (result, target, mode, EXPAND_NORMAL);
4518 return NULL_RTX;
4521 /* Expand expression EXP, which is a call to the strcspn builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4525 static rtx
4526 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4528 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4530 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4531 CALL_EXPR_ARG (exp, 1));
4532 if (result)
4533 return expand_expr (result, target, mode, EXPAND_NORMAL);
4535 return NULL_RTX;
4538 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4539 if that's convenient. */
4542 expand_builtin_saveregs (void)
4544 rtx val, seq;
4546 /* Don't do __builtin_saveregs more than once in a function.
4547 Save the result of the first call and reuse it. */
4548 if (saveregs_value != 0)
4549 return saveregs_value;
4551 /* When this function is called, it means that registers must be
4552 saved on entry to this function. So we migrate the call to the
4553 first insn of this function. */
4555 start_sequence ();
4557 /* Do whatever the machine needs done in this case. */
4558 val = targetm.calls.expand_builtin_saveregs ();
4560 seq = get_insns ();
4561 end_sequence ();
4563 saveregs_value = val;
4565 /* Put the insns after the NOTE that starts the function. If this
4566 is inside a start_sequence, make the outer-level insn chain current, so
4567 the code is placed at the start of the function. */
4568 push_topmost_sequence ();
4569 emit_insn_after (seq, entry_of_function ());
4570 pop_topmost_sequence ();
4572 return val;
4575 /* __builtin_args_info (N) returns word N of the arg space info
4576 for the current function. The number and meanings of words
4577 is controlled by the definition of CUMULATIVE_ARGS. */
4579 static rtx
4580 expand_builtin_args_info (tree exp)
4582 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4583 int *word_ptr = (int *) &crtl->args.info;
4585 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4587 if (call_expr_nargs (exp) != 0)
4589 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4590 error ("argument of %<__builtin_args_info%> must be constant");
4591 else
4593 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4595 if (wordnum < 0 || wordnum >= nwords)
4596 error ("argument of %<__builtin_args_info%> out of range");
4597 else
4598 return GEN_INT (word_ptr[wordnum]);
4601 else
4602 error ("missing argument in %<__builtin_args_info%>");
4604 return const0_rtx;
4607 /* Expand a call to __builtin_next_arg. */
4609 static rtx
4610 expand_builtin_next_arg (void)
4612 /* Checking arguments is already done in fold_builtin_next_arg
4613 that must be called before this function. */
4614 return expand_binop (ptr_mode, add_optab,
4615 crtl->args.internal_arg_pointer,
4616 crtl->args.arg_offset_rtx,
4617 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4620 /* Make it easier for the backends by protecting the valist argument
4621 from multiple evaluations. */
4623 static tree
4624 stabilize_va_list (tree valist, int needs_lvalue)
4626 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4628 if (TREE_SIDE_EFFECTS (valist))
4629 valist = save_expr (valist);
4631 /* For this case, the backends will be expecting a pointer to
4632 TREE_TYPE (va_list_type_node), but it's possible we've
4633 actually been given an array (an actual va_list_type_node).
4634 So fix it. */
4635 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4637 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4638 valist = build_fold_addr_expr_with_type (valist, p1);
4641 else
4643 tree pt;
4645 if (! needs_lvalue)
4647 if (! TREE_SIDE_EFFECTS (valist))
4648 return valist;
4650 pt = build_pointer_type (va_list_type_node);
4651 valist = fold_build1 (ADDR_EXPR, pt, valist);
4652 TREE_SIDE_EFFECTS (valist) = 1;
4655 if (TREE_SIDE_EFFECTS (valist))
4656 valist = save_expr (valist);
4657 valist = build_fold_indirect_ref (valist);
4660 return valist;
4663 /* The "standard" definition of va_list is void*. */
4665 tree
4666 std_build_builtin_va_list (void)
4668 return ptr_type_node;
4671 /* The "standard" implementation of va_start: just assign `nextarg' to
4672 the variable. */
4674 void
4675 std_expand_builtin_va_start (tree valist, rtx nextarg)
4677 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4678 convert_move (va_r, nextarg, 0);
4681 /* Expand EXP, a call to __builtin_va_start. */
4683 static rtx
4684 expand_builtin_va_start (tree exp)
4686 rtx nextarg;
4687 tree valist;
4689 if (call_expr_nargs (exp) < 2)
4691 error ("too few arguments to function %<va_start%>");
4692 return const0_rtx;
4695 if (fold_builtin_next_arg (exp, true))
4696 return const0_rtx;
4698 nextarg = expand_builtin_next_arg ();
4699 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4701 if (targetm.expand_builtin_va_start)
4702 targetm.expand_builtin_va_start (valist, nextarg);
4703 else
4704 std_expand_builtin_va_start (valist, nextarg);
4706 return const0_rtx;
4709 /* The "standard" implementation of va_arg: read the value from the
4710 current (padded) address and increment by the (padded) size. */
4712 tree
4713 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4715 tree addr, t, type_size, rounded_size, valist_tmp;
4716 unsigned HOST_WIDE_INT align, boundary;
4717 bool indirect;
4719 #ifdef ARGS_GROW_DOWNWARD
4720 /* All of the alignment and movement below is for args-grow-up machines.
4721 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4722 implement their own specialized gimplify_va_arg_expr routines. */
4723 gcc_unreachable ();
4724 #endif
4726 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4727 if (indirect)
4728 type = build_pointer_type (type);
4730 align = PARM_BOUNDARY / BITS_PER_UNIT;
4731 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4733 /* Hoist the valist value into a temporary for the moment. */
4734 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4736 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4737 requires greater alignment, we must perform dynamic alignment. */
4738 if (boundary > align
4739 && !integer_zerop (TYPE_SIZE (type)))
4741 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4742 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4743 valist_tmp, size_int (boundary - 1)));
4744 gimplify_and_add (t, pre_p);
4746 t = fold_convert (sizetype, valist_tmp);
4747 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4748 fold_convert (TREE_TYPE (valist),
4749 fold_build2 (BIT_AND_EXPR, sizetype, t,
4750 size_int (-boundary))));
4751 gimplify_and_add (t, pre_p);
4753 else
4754 boundary = align;
4756 /* If the actual alignment is less than the alignment of the type,
4757 adjust the type accordingly so that we don't assume strict alignment
4758 when dereferencing the pointer. */
4759 boundary *= BITS_PER_UNIT;
4760 if (boundary < TYPE_ALIGN (type))
4762 type = build_variant_type_copy (type);
4763 TYPE_ALIGN (type) = boundary;
4766 /* Compute the rounded size of the type. */
4767 type_size = size_in_bytes (type);
4768 rounded_size = round_up (type_size, align);
4770 /* Reduce rounded_size so it's sharable with the postqueue. */
4771 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4773 /* Get AP. */
4774 addr = valist_tmp;
4775 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4777 /* Small args are padded downward. */
4778 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4779 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4780 size_binop (MINUS_EXPR, rounded_size, type_size));
4781 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4784 /* Compute new value for AP. */
4785 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4786 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4787 gimplify_and_add (t, pre_p);
4789 addr = fold_convert (build_pointer_type (type), addr);
4791 if (indirect)
4792 addr = build_va_arg_indirect_ref (addr);
4794 return build_va_arg_indirect_ref (addr);
4797 /* Build an indirect-ref expression over the given TREE, which represents a
4798 piece of a va_arg() expansion. */
4799 tree
4800 build_va_arg_indirect_ref (tree addr)
4802 addr = build_fold_indirect_ref (addr);
4804 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4805 mf_mark (addr);
4807 return addr;
4810 /* Return a dummy expression of type TYPE in order to keep going after an
4811 error. */
4813 static tree
4814 dummy_object (tree type)
4816 tree t = build_int_cst (build_pointer_type (type), 0);
4817 return build1 (INDIRECT_REF, type, t);
4820 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4821 builtin function, but a very special sort of operator. */
4823 enum gimplify_status
4824 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4826 tree promoted_type, want_va_type, have_va_type;
4827 tree valist = TREE_OPERAND (*expr_p, 0);
4828 tree type = TREE_TYPE (*expr_p);
4829 tree t;
4831 /* Verify that valist is of the proper type. */
4832 want_va_type = va_list_type_node;
4833 have_va_type = TREE_TYPE (valist);
4835 if (have_va_type == error_mark_node)
4836 return GS_ERROR;
4838 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4840 /* If va_list is an array type, the argument may have decayed
4841 to a pointer type, e.g. by being passed to another function.
4842 In that case, unwrap both types so that we can compare the
4843 underlying records. */
4844 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4845 || POINTER_TYPE_P (have_va_type))
4847 want_va_type = TREE_TYPE (want_va_type);
4848 have_va_type = TREE_TYPE (have_va_type);
4852 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4854 error ("first argument to %<va_arg%> not of type %<va_list%>");
4855 return GS_ERROR;
4858 /* Generate a diagnostic for requesting data of a type that cannot
4859 be passed through `...' due to type promotion at the call site. */
4860 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4861 != type)
4863 static bool gave_help;
4865 /* Unfortunately, this is merely undefined, rather than a constraint
4866 violation, so we cannot make this an error. If this call is never
4867 executed, the program is still strictly conforming. */
4868 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4869 type, promoted_type);
4870 if (! gave_help)
4872 gave_help = true;
4873 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4874 promoted_type, type);
4877 /* We can, however, treat "undefined" any way we please.
4878 Call abort to encourage the user to fix the program. */
4879 inform ("if this code is reached, the program will abort");
4880 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4881 append_to_statement_list (t, pre_p);
4883 /* This is dead code, but go ahead and finish so that the
4884 mode of the result comes out right. */
4885 *expr_p = dummy_object (type);
4886 return GS_ALL_DONE;
4888 else
4890 /* Make it easier for the backends by protecting the valist argument
4891 from multiple evaluations. */
4892 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4894 /* For this case, the backends will be expecting a pointer to
4895 TREE_TYPE (va_list_type_node), but it's possible we've
4896 actually been given an array (an actual va_list_type_node).
4897 So fix it. */
4898 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4900 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4901 valist = build_fold_addr_expr_with_type (valist, p1);
4903 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4905 else
4906 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4908 if (!targetm.gimplify_va_arg_expr)
4909 /* FIXME:Once most targets are converted we should merely
4910 assert this is non-null. */
4911 return GS_ALL_DONE;
4913 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4914 return GS_OK;
4918 /* Expand EXP, a call to __builtin_va_end. */
4920 static rtx
4921 expand_builtin_va_end (tree exp)
4923 tree valist = CALL_EXPR_ARG (exp, 0);
4925 /* Evaluate for side effects, if needed. I hate macros that don't
4926 do that. */
4927 if (TREE_SIDE_EFFECTS (valist))
4928 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4930 return const0_rtx;
4933 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4934 builtin rather than just as an assignment in stdarg.h because of the
4935 nastiness of array-type va_list types. */
4937 static rtx
4938 expand_builtin_va_copy (tree exp)
4940 tree dst, src, t;
4942 dst = CALL_EXPR_ARG (exp, 0);
4943 src = CALL_EXPR_ARG (exp, 1);
4945 dst = stabilize_va_list (dst, 1);
4946 src = stabilize_va_list (src, 0);
4948 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4950 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4951 TREE_SIDE_EFFECTS (t) = 1;
4952 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4954 else
4956 rtx dstb, srcb, size;
4958 /* Evaluate to pointers. */
4959 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4960 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4961 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4962 VOIDmode, EXPAND_NORMAL);
4964 dstb = convert_memory_address (Pmode, dstb);
4965 srcb = convert_memory_address (Pmode, srcb);
4967 /* "Dereference" to BLKmode memories. */
4968 dstb = gen_rtx_MEM (BLKmode, dstb);
4969 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4970 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4971 srcb = gen_rtx_MEM (BLKmode, srcb);
4972 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4973 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4975 /* Copy. */
4976 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4979 return const0_rtx;
4982 /* Expand a call to one of the builtin functions __builtin_frame_address or
4983 __builtin_return_address. */
4985 static rtx
4986 expand_builtin_frame_address (tree fndecl, tree exp)
4988 /* The argument must be a nonnegative integer constant.
4989 It counts the number of frames to scan up the stack.
4990 The value is the return address saved in that frame. */
4991 if (call_expr_nargs (exp) == 0)
4992 /* Warning about missing arg was already issued. */
4993 return const0_rtx;
4994 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4996 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4997 error ("invalid argument to %<__builtin_frame_address%>");
4998 else
4999 error ("invalid argument to %<__builtin_return_address%>");
5000 return const0_rtx;
5002 else
5004 rtx tem
5005 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5006 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5008 /* Some ports cannot access arbitrary stack frames. */
5009 if (tem == NULL)
5011 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5012 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5013 else
5014 warning (0, "unsupported argument to %<__builtin_return_address%>");
5015 return const0_rtx;
5018 /* For __builtin_frame_address, return what we've got. */
5019 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5020 return tem;
5022 if (!REG_P (tem)
5023 && ! CONSTANT_P (tem))
5024 tem = copy_to_mode_reg (Pmode, tem);
5025 return tem;
5029 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5030 we failed and the caller should emit a normal call, otherwise try to get
5031 the result in TARGET, if convenient. */
5033 static rtx
5034 expand_builtin_alloca (tree exp, rtx target)
5036 rtx op0;
5037 rtx result;
5039 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5040 should always expand to function calls. These can be intercepted
5041 in libmudflap. */
5042 if (flag_mudflap)
5043 return NULL_RTX;
5045 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5046 return NULL_RTX;
5048 /* Compute the argument. */
5049 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5051 /* Allocate the desired space. */
5052 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5053 result = convert_memory_address (ptr_mode, result);
5055 return result;
5058 /* Expand a call to a bswap builtin with argument ARG0. MODE
5059 is the mode to expand with. */
5061 static rtx
5062 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5064 enum machine_mode mode;
5065 tree arg;
5066 rtx op0;
5068 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5069 return NULL_RTX;
5071 arg = CALL_EXPR_ARG (exp, 0);
5072 mode = TYPE_MODE (TREE_TYPE (arg));
5073 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5075 target = expand_unop (mode, bswap_optab, op0, target, 1);
5077 gcc_assert (target);
5079 return convert_to_mode (mode, target, 0);
5082 /* Expand a call to a unary builtin in EXP.
5083 Return NULL_RTX if a normal call should be emitted rather than expanding the
5084 function in-line. If convenient, the result should be placed in TARGET.
5085 SUBTARGET may be used as the target for computing one of EXP's operands. */
5087 static rtx
5088 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5089 rtx subtarget, optab op_optab)
5091 rtx op0;
5093 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5094 return NULL_RTX;
5096 /* Compute the argument. */
5097 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5098 VOIDmode, EXPAND_NORMAL);
5099 /* Compute op, into TARGET if possible.
5100 Set TARGET to wherever the result comes back. */
5101 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5102 op_optab, op0, target, 1);
5103 gcc_assert (target);
5105 return convert_to_mode (target_mode, target, 0);
5108 /* If the string passed to fputs is a constant and is one character
5109 long, we attempt to transform this call into __builtin_fputc(). */
5111 static rtx
5112 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5114 /* Verify the arguments in the original call. */
5115 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5117 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5118 CALL_EXPR_ARG (exp, 1),
5119 (target == const0_rtx),
5120 unlocked, NULL_TREE);
5121 if (result)
5122 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5124 return NULL_RTX;
5127 /* Expand a call to __builtin_expect. We just return our argument
5128 as the builtin_expect semantic should've been already executed by
5129 tree branch prediction pass. */
5131 static rtx
5132 expand_builtin_expect (tree exp, rtx target)
5134 tree arg, c;
5136 if (call_expr_nargs (exp) < 2)
5137 return const0_rtx;
5138 arg = CALL_EXPR_ARG (exp, 0);
5139 c = CALL_EXPR_ARG (exp, 1);
5141 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5142 /* When guessing was done, the hints should be already stripped away. */
5143 gcc_assert (!flag_guess_branch_prob
5144 || optimize == 0 || errorcount || sorrycount);
5145 return target;
5148 void
5149 expand_builtin_trap (void)
5151 #ifdef HAVE_trap
5152 if (HAVE_trap)
5153 emit_insn (gen_trap ());
5154 else
5155 #endif
5156 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5157 emit_barrier ();
5160 /* Expand EXP, a call to fabs, fabsf or fabsl.
5161 Return NULL_RTX if a normal call should be emitted rather than expanding
5162 the function inline. If convenient, the result should be placed
5163 in TARGET. SUBTARGET may be used as the target for computing
5164 the operand. */
5166 static rtx
5167 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5169 enum machine_mode mode;
5170 tree arg;
5171 rtx op0;
5173 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5174 return NULL_RTX;
5176 arg = CALL_EXPR_ARG (exp, 0);
5177 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5178 mode = TYPE_MODE (TREE_TYPE (arg));
5179 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5180 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5183 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5184 Return NULL is a normal call should be emitted rather than expanding the
5185 function inline. If convenient, the result should be placed in TARGET.
5186 SUBTARGET may be used as the target for computing the operand. */
5188 static rtx
5189 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5191 rtx op0, op1;
5192 tree arg;
5194 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5195 return NULL_RTX;
5197 arg = CALL_EXPR_ARG (exp, 0);
5198 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5200 arg = CALL_EXPR_ARG (exp, 1);
5201 op1 = expand_normal (arg);
5203 return expand_copysign (op0, op1, target);
5206 /* Create a new constant string literal and return a char* pointer to it.
5207 The STRING_CST value is the LEN characters at STR. */
5208 tree
5209 build_string_literal (int len, const char *str)
5211 tree t, elem, index, type;
5213 t = build_string (len, str);
5214 elem = build_type_variant (char_type_node, 1, 0);
5215 index = build_index_type (size_int (len - 1));
5216 type = build_array_type (elem, index);
5217 TREE_TYPE (t) = type;
5218 TREE_CONSTANT (t) = 1;
5219 TREE_READONLY (t) = 1;
5220 TREE_STATIC (t) = 1;
5222 type = build_pointer_type (elem);
5223 t = build1 (ADDR_EXPR, type,
5224 build4 (ARRAY_REF, elem,
5225 t, integer_zero_node, NULL_TREE, NULL_TREE));
5226 return t;
5229 /* Expand EXP, a call to printf or printf_unlocked.
5230 Return NULL_RTX if a normal call should be emitted rather than transforming
5231 the function inline. If convenient, the result should be placed in
5232 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5233 call. */
5234 static rtx
5235 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5236 bool unlocked)
5238 /* If we're using an unlocked function, assume the other unlocked
5239 functions exist explicitly. */
5240 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5241 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5242 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5243 : implicit_built_in_decls[BUILT_IN_PUTS];
5244 const char *fmt_str;
5245 tree fn = 0;
5246 tree fmt, arg;
5247 int nargs = call_expr_nargs (exp);
5249 /* If the return value is used, don't do the transformation. */
5250 if (target != const0_rtx)
5251 return NULL_RTX;
5253 /* Verify the required arguments in the original call. */
5254 if (nargs == 0)
5255 return NULL_RTX;
5256 fmt = CALL_EXPR_ARG (exp, 0);
5257 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5258 return NULL_RTX;
5260 /* Check whether the format is a literal string constant. */
5261 fmt_str = c_getstr (fmt);
5262 if (fmt_str == NULL)
5263 return NULL_RTX;
5265 if (!init_target_chars ())
5266 return NULL_RTX;
5268 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5269 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5271 if ((nargs != 2)
5272 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5273 return NULL_RTX;
5274 if (fn_puts)
5275 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5277 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5278 else if (strcmp (fmt_str, target_percent_c) == 0)
5280 if ((nargs != 2)
5281 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5282 return NULL_RTX;
5283 if (fn_putchar)
5284 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5286 else
5288 /* We can't handle anything else with % args or %% ... yet. */
5289 if (strchr (fmt_str, target_percent))
5290 return NULL_RTX;
5292 if (nargs > 1)
5293 return NULL_RTX;
5295 /* If the format specifier was "", printf does nothing. */
5296 if (fmt_str[0] == '\0')
5297 return const0_rtx;
5298 /* If the format specifier has length of 1, call putchar. */
5299 if (fmt_str[1] == '\0')
5301 /* Given printf("c"), (where c is any one character,)
5302 convert "c"[0] to an int and pass that to the replacement
5303 function. */
5304 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5305 if (fn_putchar)
5306 fn = build_call_expr (fn_putchar, 1, arg);
5308 else
5310 /* If the format specifier was "string\n", call puts("string"). */
5311 size_t len = strlen (fmt_str);
5312 if ((unsigned char)fmt_str[len - 1] == target_newline)
5314 /* Create a NUL-terminated string that's one char shorter
5315 than the original, stripping off the trailing '\n'. */
5316 char *newstr = XALLOCAVEC (char, len);
5317 memcpy (newstr, fmt_str, len - 1);
5318 newstr[len - 1] = 0;
5319 arg = build_string_literal (len, newstr);
5320 if (fn_puts)
5321 fn = build_call_expr (fn_puts, 1, arg);
5323 else
5324 /* We'd like to arrange to call fputs(string,stdout) here,
5325 but we need stdout and don't have a way to get it yet. */
5326 return NULL_RTX;
5330 if (!fn)
5331 return NULL_RTX;
5332 if (TREE_CODE (fn) == CALL_EXPR)
5333 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5334 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5337 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5338 Return NULL_RTX if a normal call should be emitted rather than transforming
5339 the function inline. If convenient, the result should be placed in
5340 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5341 call. */
5342 static rtx
5343 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5344 bool unlocked)
5346 /* If we're using an unlocked function, assume the other unlocked
5347 functions exist explicitly. */
5348 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5349 : implicit_built_in_decls[BUILT_IN_FPUTC];
5350 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5351 : implicit_built_in_decls[BUILT_IN_FPUTS];
5352 const char *fmt_str;
5353 tree fn = 0;
5354 tree fmt, fp, arg;
5355 int nargs = call_expr_nargs (exp);
5357 /* If the return value is used, don't do the transformation. */
5358 if (target != const0_rtx)
5359 return NULL_RTX;
5361 /* Verify the required arguments in the original call. */
5362 if (nargs < 2)
5363 return NULL_RTX;
5364 fp = CALL_EXPR_ARG (exp, 0);
5365 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5366 return NULL_RTX;
5367 fmt = CALL_EXPR_ARG (exp, 1);
5368 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5369 return NULL_RTX;
5371 /* Check whether the format is a literal string constant. */
5372 fmt_str = c_getstr (fmt);
5373 if (fmt_str == NULL)
5374 return NULL_RTX;
5376 if (!init_target_chars ())
5377 return NULL_RTX;
5379 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5380 if (strcmp (fmt_str, target_percent_s) == 0)
5382 if ((nargs != 3)
5383 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5384 return NULL_RTX;
5385 arg = CALL_EXPR_ARG (exp, 2);
5386 if (fn_fputs)
5387 fn = build_call_expr (fn_fputs, 2, arg, fp);
5389 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5390 else if (strcmp (fmt_str, target_percent_c) == 0)
5392 if ((nargs != 3)
5393 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5394 return NULL_RTX;
5395 arg = CALL_EXPR_ARG (exp, 2);
5396 if (fn_fputc)
5397 fn = build_call_expr (fn_fputc, 2, arg, fp);
5399 else
5401 /* We can't handle anything else with % args or %% ... yet. */
5402 if (strchr (fmt_str, target_percent))
5403 return NULL_RTX;
5405 if (nargs > 2)
5406 return NULL_RTX;
5408 /* If the format specifier was "", fprintf does nothing. */
5409 if (fmt_str[0] == '\0')
5411 /* Evaluate and ignore FILE* argument for side-effects. */
5412 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5413 return const0_rtx;
5416 /* When "string" doesn't contain %, replace all cases of
5417 fprintf(stream,string) with fputs(string,stream). The fputs
5418 builtin will take care of special cases like length == 1. */
5419 if (fn_fputs)
5420 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5423 if (!fn)
5424 return NULL_RTX;
5425 if (TREE_CODE (fn) == CALL_EXPR)
5426 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5427 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5430 /* Expand a call EXP to sprintf. Return NULL_RTX if
5431 a normal call should be emitted rather than expanding the function
5432 inline. If convenient, the result should be placed in TARGET with
5433 mode MODE. */
5435 static rtx
5436 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5438 tree dest, fmt;
5439 const char *fmt_str;
5440 int nargs = call_expr_nargs (exp);
5442 /* Verify the required arguments in the original call. */
5443 if (nargs < 2)
5444 return NULL_RTX;
5445 dest = CALL_EXPR_ARG (exp, 0);
5446 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5447 return NULL_RTX;
5448 fmt = CALL_EXPR_ARG (exp, 0);
5449 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5450 return NULL_RTX;
5452 /* Check whether the format is a literal string constant. */
5453 fmt_str = c_getstr (fmt);
5454 if (fmt_str == NULL)
5455 return NULL_RTX;
5457 if (!init_target_chars ())
5458 return NULL_RTX;
5460 /* If the format doesn't contain % args or %%, use strcpy. */
5461 if (strchr (fmt_str, target_percent) == 0)
5463 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5464 tree exp;
5466 if ((nargs > 2) || ! fn)
5467 return NULL_RTX;
5468 expand_expr (build_call_expr (fn, 2, dest, fmt),
5469 const0_rtx, VOIDmode, EXPAND_NORMAL);
5470 if (target == const0_rtx)
5471 return const0_rtx;
5472 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5473 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5475 /* If the format is "%s", use strcpy if the result isn't used. */
5476 else if (strcmp (fmt_str, target_percent_s) == 0)
5478 tree fn, arg, len;
5479 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5481 if (! fn)
5482 return NULL_RTX;
5483 if (nargs != 3)
5484 return NULL_RTX;
5485 arg = CALL_EXPR_ARG (exp, 2);
5486 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5487 return NULL_RTX;
5489 if (target != const0_rtx)
5491 len = c_strlen (arg, 1);
5492 if (! len || TREE_CODE (len) != INTEGER_CST)
5493 return NULL_RTX;
5495 else
5496 len = NULL_TREE;
5498 expand_expr (build_call_expr (fn, 2, dest, arg),
5499 const0_rtx, VOIDmode, EXPAND_NORMAL);
5501 if (target == const0_rtx)
5502 return const0_rtx;
5503 return expand_expr (len, target, mode, EXPAND_NORMAL);
5506 return NULL_RTX;
5509 /* Expand a call to either the entry or exit function profiler. */
5511 static rtx
5512 expand_builtin_profile_func (bool exitp)
5514 rtx this, which;
5516 this = DECL_RTL (current_function_decl);
5517 gcc_assert (MEM_P (this));
5518 this = XEXP (this, 0);
5520 if (exitp)
5521 which = profile_function_exit_libfunc;
5522 else
5523 which = profile_function_entry_libfunc;
5525 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5526 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5528 Pmode);
5530 return const0_rtx;
5533 /* Expand a call to __builtin___clear_cache. */
5535 static rtx
5536 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5538 #ifndef HAVE_clear_cache
5539 #ifdef CLEAR_INSN_CACHE
5540 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5541 does something. Just do the default expansion to a call to
5542 __clear_cache(). */
5543 return NULL_RTX;
5544 #else
5545 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5546 does nothing. There is no need to call it. Do nothing. */
5547 return const0_rtx;
5548 #endif /* CLEAR_INSN_CACHE */
5549 #else
5550 /* We have a "clear_cache" insn, and it will handle everything. */
5551 tree begin, end;
5552 rtx begin_rtx, end_rtx;
5553 enum insn_code icode;
5555 /* We must not expand to a library call. If we did, any
5556 fallback library function in libgcc that might contain a call to
5557 __builtin___clear_cache() would recurse infinitely. */
5558 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5560 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5561 return const0_rtx;
5564 if (HAVE_clear_cache)
5566 icode = CODE_FOR_clear_cache;
5568 begin = CALL_EXPR_ARG (exp, 0);
5569 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5570 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5571 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5572 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5574 end = CALL_EXPR_ARG (exp, 1);
5575 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5576 end_rtx = convert_memory_address (Pmode, end_rtx);
5577 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5578 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5580 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5582 return const0_rtx;
5583 #endif /* HAVE_clear_cache */
5586 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5588 static rtx
5589 round_trampoline_addr (rtx tramp)
5591 rtx temp, addend, mask;
5593 /* If we don't need too much alignment, we'll have been guaranteed
5594 proper alignment by get_trampoline_type. */
5595 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5596 return tramp;
5598 /* Round address up to desired boundary. */
5599 temp = gen_reg_rtx (Pmode);
5600 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5601 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5603 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5604 temp, 0, OPTAB_LIB_WIDEN);
5605 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5606 temp, 0, OPTAB_LIB_WIDEN);
5608 return tramp;
5611 static rtx
5612 expand_builtin_init_trampoline (tree exp)
5614 tree t_tramp, t_func, t_chain;
5615 rtx r_tramp, r_func, r_chain;
5616 #ifdef TRAMPOLINE_TEMPLATE
5617 rtx blktramp;
5618 #endif
5620 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5621 POINTER_TYPE, VOID_TYPE))
5622 return NULL_RTX;
5624 t_tramp = CALL_EXPR_ARG (exp, 0);
5625 t_func = CALL_EXPR_ARG (exp, 1);
5626 t_chain = CALL_EXPR_ARG (exp, 2);
5628 r_tramp = expand_normal (t_tramp);
5629 r_func = expand_normal (t_func);
5630 r_chain = expand_normal (t_chain);
5632 /* Generate insns to initialize the trampoline. */
5633 r_tramp = round_trampoline_addr (r_tramp);
5634 #ifdef TRAMPOLINE_TEMPLATE
5635 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5636 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5637 emit_block_move (blktramp, assemble_trampoline_template (),
5638 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5639 #endif
5640 trampolines_created = 1;
5641 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5643 return const0_rtx;
5646 static rtx
5647 expand_builtin_adjust_trampoline (tree exp)
5649 rtx tramp;
5651 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5652 return NULL_RTX;
5654 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5655 tramp = round_trampoline_addr (tramp);
5656 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5657 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5658 #endif
5660 return tramp;
5663 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5664 function. The function first checks whether the back end provides
5665 an insn to implement signbit for the respective mode. If not, it
5666 checks whether the floating point format of the value is such that
5667 the sign bit can be extracted. If that is not the case, the
5668 function returns NULL_RTX to indicate that a normal call should be
5669 emitted rather than expanding the function in-line. EXP is the
5670 expression that is a call to the builtin function; if convenient,
5671 the result should be placed in TARGET. */
5672 static rtx
5673 expand_builtin_signbit (tree exp, rtx target)
5675 const struct real_format *fmt;
5676 enum machine_mode fmode, imode, rmode;
5677 HOST_WIDE_INT hi, lo;
5678 tree arg;
5679 int word, bitpos;
5680 enum insn_code icode;
5681 rtx temp;
5683 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5684 return NULL_RTX;
5686 arg = CALL_EXPR_ARG (exp, 0);
5687 fmode = TYPE_MODE (TREE_TYPE (arg));
5688 rmode = TYPE_MODE (TREE_TYPE (exp));
5689 fmt = REAL_MODE_FORMAT (fmode);
5691 arg = builtin_save_expr (arg);
5693 /* Expand the argument yielding a RTX expression. */
5694 temp = expand_normal (arg);
5696 /* Check if the back end provides an insn that handles signbit for the
5697 argument's mode. */
5698 icode = signbit_optab->handlers [(int) fmode].insn_code;
5699 if (icode != CODE_FOR_nothing)
5701 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5702 emit_unop_insn (icode, target, temp, UNKNOWN);
5703 return target;
5706 /* For floating point formats without a sign bit, implement signbit
5707 as "ARG < 0.0". */
5708 bitpos = fmt->signbit_ro;
5709 if (bitpos < 0)
5711 /* But we can't do this if the format supports signed zero. */
5712 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5713 return NULL_RTX;
5715 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5716 build_real (TREE_TYPE (arg), dconst0));
5717 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5720 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5722 imode = int_mode_for_mode (fmode);
5723 if (imode == BLKmode)
5724 return NULL_RTX;
5725 temp = gen_lowpart (imode, temp);
5727 else
5729 imode = word_mode;
5730 /* Handle targets with different FP word orders. */
5731 if (FLOAT_WORDS_BIG_ENDIAN)
5732 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5733 else
5734 word = bitpos / BITS_PER_WORD;
5735 temp = operand_subword_force (temp, word, fmode);
5736 bitpos = bitpos % BITS_PER_WORD;
5739 /* Force the intermediate word_mode (or narrower) result into a
5740 register. This avoids attempting to create paradoxical SUBREGs
5741 of floating point modes below. */
5742 temp = force_reg (imode, temp);
5744 /* If the bitpos is within the "result mode" lowpart, the operation
5745 can be implement with a single bitwise AND. Otherwise, we need
5746 a right shift and an AND. */
5748 if (bitpos < GET_MODE_BITSIZE (rmode))
5750 if (bitpos < HOST_BITS_PER_WIDE_INT)
5752 hi = 0;
5753 lo = (HOST_WIDE_INT) 1 << bitpos;
5755 else
5757 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5758 lo = 0;
5761 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5762 temp = gen_lowpart (rmode, temp);
5763 temp = expand_binop (rmode, and_optab, temp,
5764 immed_double_const (lo, hi, rmode),
5765 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5767 else
5769 /* Perform a logical right shift to place the signbit in the least
5770 significant bit, then truncate the result to the desired mode
5771 and mask just this bit. */
5772 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5773 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5774 temp = gen_lowpart (rmode, temp);
5775 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5776 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5779 return temp;
5782 /* Expand fork or exec calls. TARGET is the desired target of the
5783 call. EXP is the call. FN is the
5784 identificator of the actual function. IGNORE is nonzero if the
5785 value is to be ignored. */
5787 static rtx
5788 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5790 tree id, decl;
5791 tree call;
5793 /* If we are not profiling, just call the function. */
5794 if (!profile_arc_flag)
5795 return NULL_RTX;
5797 /* Otherwise call the wrapper. This should be equivalent for the rest of
5798 compiler, so the code does not diverge, and the wrapper may run the
5799 code necessary for keeping the profiling sane. */
5801 switch (DECL_FUNCTION_CODE (fn))
5803 case BUILT_IN_FORK:
5804 id = get_identifier ("__gcov_fork");
5805 break;
5807 case BUILT_IN_EXECL:
5808 id = get_identifier ("__gcov_execl");
5809 break;
5811 case BUILT_IN_EXECV:
5812 id = get_identifier ("__gcov_execv");
5813 break;
5815 case BUILT_IN_EXECLP:
5816 id = get_identifier ("__gcov_execlp");
5817 break;
5819 case BUILT_IN_EXECLE:
5820 id = get_identifier ("__gcov_execle");
5821 break;
5823 case BUILT_IN_EXECVP:
5824 id = get_identifier ("__gcov_execvp");
5825 break;
5827 case BUILT_IN_EXECVE:
5828 id = get_identifier ("__gcov_execve");
5829 break;
5831 default:
5832 gcc_unreachable ();
5835 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5836 DECL_EXTERNAL (decl) = 1;
5837 TREE_PUBLIC (decl) = 1;
5838 DECL_ARTIFICIAL (decl) = 1;
5839 TREE_NOTHROW (decl) = 1;
5840 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5841 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5842 call = rewrite_call_expr (exp, 0, decl, 0);
5843 return expand_call (call, target, ignore);
5848 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5849 the pointer in these functions is void*, the tree optimizers may remove
5850 casts. The mode computed in expand_builtin isn't reliable either, due
5851 to __sync_bool_compare_and_swap.
5853 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5854 group of builtins. This gives us log2 of the mode size. */
5856 static inline enum machine_mode
5857 get_builtin_sync_mode (int fcode_diff)
5859 /* The size is not negotiable, so ask not to get BLKmode in return
5860 if the target indicates that a smaller size would be better. */
5861 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5864 /* Expand the memory expression LOC and return the appropriate memory operand
5865 for the builtin_sync operations. */
5867 static rtx
5868 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5870 rtx addr, mem;
5872 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5874 /* Note that we explicitly do not want any alias information for this
5875 memory, so that we kill all other live memories. Otherwise we don't
5876 satisfy the full barrier semantics of the intrinsic. */
5877 mem = validize_mem (gen_rtx_MEM (mode, addr));
5879 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5880 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5881 MEM_VOLATILE_P (mem) = 1;
5883 return mem;
5886 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5887 EXP is the CALL_EXPR. CODE is the rtx code
5888 that corresponds to the arithmetic or logical operation from the name;
5889 an exception here is that NOT actually means NAND. TARGET is an optional
5890 place for us to store the results; AFTER is true if this is the
5891 fetch_and_xxx form. IGNORE is true if we don't actually care about
5892 the result of the operation at all. */
5894 static rtx
5895 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5896 enum rtx_code code, bool after,
5897 rtx target, bool ignore)
5899 rtx val, mem;
5900 enum machine_mode old_mode;
5902 /* Expand the operands. */
5903 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5905 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5906 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5907 of CONST_INTs, where we know the old_mode only from the call argument. */
5908 old_mode = GET_MODE (val);
5909 if (old_mode == VOIDmode)
5910 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5911 val = convert_modes (mode, old_mode, val, 1);
5913 if (ignore)
5914 return expand_sync_operation (mem, val, code);
5915 else
5916 return expand_sync_fetch_operation (mem, val, code, after, target);
5919 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5920 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5921 true if this is the boolean form. TARGET is a place for us to store the
5922 results; this is NOT optional if IS_BOOL is true. */
5924 static rtx
5925 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5926 bool is_bool, rtx target)
5928 rtx old_val, new_val, mem;
5929 enum machine_mode old_mode;
5931 /* Expand the operands. */
5932 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5935 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5936 mode, EXPAND_NORMAL);
5937 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5938 of CONST_INTs, where we know the old_mode only from the call argument. */
5939 old_mode = GET_MODE (old_val);
5940 if (old_mode == VOIDmode)
5941 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5942 old_val = convert_modes (mode, old_mode, old_val, 1);
5944 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5945 mode, EXPAND_NORMAL);
5946 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5947 of CONST_INTs, where we know the old_mode only from the call argument. */
5948 old_mode = GET_MODE (new_val);
5949 if (old_mode == VOIDmode)
5950 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5951 new_val = convert_modes (mode, old_mode, new_val, 1);
5953 if (is_bool)
5954 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5955 else
5956 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5959 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5960 general form is actually an atomic exchange, and some targets only
5961 support a reduced form with the second argument being a constant 1.
5962 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5963 the results. */
5965 static rtx
5966 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5967 rtx target)
5969 rtx val, mem;
5970 enum machine_mode old_mode;
5972 /* Expand the operands. */
5973 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5974 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5975 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5976 of CONST_INTs, where we know the old_mode only from the call argument. */
5977 old_mode = GET_MODE (val);
5978 if (old_mode == VOIDmode)
5979 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5980 val = convert_modes (mode, old_mode, val, 1);
5982 return expand_sync_lock_test_and_set (mem, val, target);
5985 /* Expand the __sync_synchronize intrinsic. */
5987 static void
5988 expand_builtin_synchronize (void)
5990 tree x;
5992 #ifdef HAVE_memory_barrier
5993 if (HAVE_memory_barrier)
5995 emit_insn (gen_memory_barrier ());
5996 return;
5998 #endif
6000 if (synchronize_libfunc != NULL_RTX)
6002 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6003 return;
6006 /* If no explicit memory barrier instruction is available, create an
6007 empty asm stmt with a memory clobber. */
6008 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6009 tree_cons (NULL, build_string (6, "memory"), NULL));
6010 ASM_VOLATILE_P (x) = 1;
6011 expand_asm_expr (x);
6014 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6016 static void
6017 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6019 enum insn_code icode;
6020 rtx mem, insn;
6021 rtx val = const0_rtx;
6023 /* Expand the operands. */
6024 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6026 /* If there is an explicit operation in the md file, use it. */
6027 icode = sync_lock_release[mode];
6028 if (icode != CODE_FOR_nothing)
6030 if (!insn_data[icode].operand[1].predicate (val, mode))
6031 val = force_reg (mode, val);
6033 insn = GEN_FCN (icode) (mem, val);
6034 if (insn)
6036 emit_insn (insn);
6037 return;
6041 /* Otherwise we can implement this operation by emitting a barrier
6042 followed by a store of zero. */
6043 expand_builtin_synchronize ();
6044 emit_move_insn (mem, val);
6047 /* Expand an expression EXP that calls a built-in function,
6048 with result going to TARGET if that's convenient
6049 (and in mode MODE if that's convenient).
6050 SUBTARGET may be used as the target for computing one of EXP's operands.
6051 IGNORE is nonzero if the value is to be ignored. */
6054 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6055 int ignore)
6057 tree fndecl = get_callee_fndecl (exp);
6058 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6059 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6061 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6062 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6064 /* When not optimizing, generate calls to library functions for a certain
6065 set of builtins. */
6066 if (!optimize
6067 && !called_as_built_in (fndecl)
6068 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6069 && fcode != BUILT_IN_ALLOCA)
6070 return expand_call (exp, target, ignore);
6072 /* The built-in function expanders test for target == const0_rtx
6073 to determine whether the function's result will be ignored. */
6074 if (ignore)
6075 target = const0_rtx;
6077 /* If the result of a pure or const built-in function is ignored, and
6078 none of its arguments are volatile, we can avoid expanding the
6079 built-in call and just evaluate the arguments for side-effects. */
6080 if (target == const0_rtx
6081 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6083 bool volatilep = false;
6084 tree arg;
6085 call_expr_arg_iterator iter;
6087 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6088 if (TREE_THIS_VOLATILE (arg))
6090 volatilep = true;
6091 break;
6094 if (! volatilep)
6096 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6097 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6098 return const0_rtx;
6102 switch (fcode)
6104 CASE_FLT_FN (BUILT_IN_FABS):
6105 target = expand_builtin_fabs (exp, target, subtarget);
6106 if (target)
6107 return target;
6108 break;
6110 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6111 target = expand_builtin_copysign (exp, target, subtarget);
6112 if (target)
6113 return target;
6114 break;
6116 /* Just do a normal library call if we were unable to fold
6117 the values. */
6118 CASE_FLT_FN (BUILT_IN_CABS):
6119 break;
6121 CASE_FLT_FN (BUILT_IN_EXP):
6122 CASE_FLT_FN (BUILT_IN_EXP10):
6123 CASE_FLT_FN (BUILT_IN_POW10):
6124 CASE_FLT_FN (BUILT_IN_EXP2):
6125 CASE_FLT_FN (BUILT_IN_EXPM1):
6126 CASE_FLT_FN (BUILT_IN_LOGB):
6127 CASE_FLT_FN (BUILT_IN_LOG):
6128 CASE_FLT_FN (BUILT_IN_LOG10):
6129 CASE_FLT_FN (BUILT_IN_LOG2):
6130 CASE_FLT_FN (BUILT_IN_LOG1P):
6131 CASE_FLT_FN (BUILT_IN_TAN):
6132 CASE_FLT_FN (BUILT_IN_ASIN):
6133 CASE_FLT_FN (BUILT_IN_ACOS):
6134 CASE_FLT_FN (BUILT_IN_ATAN):
6135 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6136 because of possible accuracy problems. */
6137 if (! flag_unsafe_math_optimizations)
6138 break;
6139 CASE_FLT_FN (BUILT_IN_SQRT):
6140 CASE_FLT_FN (BUILT_IN_FLOOR):
6141 CASE_FLT_FN (BUILT_IN_CEIL):
6142 CASE_FLT_FN (BUILT_IN_TRUNC):
6143 CASE_FLT_FN (BUILT_IN_ROUND):
6144 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6145 CASE_FLT_FN (BUILT_IN_RINT):
6146 target = expand_builtin_mathfn (exp, target, subtarget);
6147 if (target)
6148 return target;
6149 break;
6151 CASE_FLT_FN (BUILT_IN_ILOGB):
6152 if (! flag_unsafe_math_optimizations)
6153 break;
6154 CASE_FLT_FN (BUILT_IN_ISINF):
6155 CASE_FLT_FN (BUILT_IN_FINITE):
6156 case BUILT_IN_ISFINITE:
6157 case BUILT_IN_ISNORMAL:
6158 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6159 if (target)
6160 return target;
6161 break;
6163 CASE_FLT_FN (BUILT_IN_LCEIL):
6164 CASE_FLT_FN (BUILT_IN_LLCEIL):
6165 CASE_FLT_FN (BUILT_IN_LFLOOR):
6166 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6167 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6168 if (target)
6169 return target;
6170 break;
6172 CASE_FLT_FN (BUILT_IN_LRINT):
6173 CASE_FLT_FN (BUILT_IN_LLRINT):
6174 CASE_FLT_FN (BUILT_IN_LROUND):
6175 CASE_FLT_FN (BUILT_IN_LLROUND):
6176 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6177 if (target)
6178 return target;
6179 break;
6181 CASE_FLT_FN (BUILT_IN_POW):
6182 target = expand_builtin_pow (exp, target, subtarget);
6183 if (target)
6184 return target;
6185 break;
6187 CASE_FLT_FN (BUILT_IN_POWI):
6188 target = expand_builtin_powi (exp, target, subtarget);
6189 if (target)
6190 return target;
6191 break;
6193 CASE_FLT_FN (BUILT_IN_ATAN2):
6194 CASE_FLT_FN (BUILT_IN_LDEXP):
6195 CASE_FLT_FN (BUILT_IN_SCALB):
6196 CASE_FLT_FN (BUILT_IN_SCALBN):
6197 CASE_FLT_FN (BUILT_IN_SCALBLN):
6198 if (! flag_unsafe_math_optimizations)
6199 break;
6201 CASE_FLT_FN (BUILT_IN_FMOD):
6202 CASE_FLT_FN (BUILT_IN_REMAINDER):
6203 CASE_FLT_FN (BUILT_IN_DREM):
6204 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6205 if (target)
6206 return target;
6207 break;
6209 CASE_FLT_FN (BUILT_IN_CEXPI):
6210 target = expand_builtin_cexpi (exp, target, subtarget);
6211 gcc_assert (target);
6212 return target;
6214 CASE_FLT_FN (BUILT_IN_SIN):
6215 CASE_FLT_FN (BUILT_IN_COS):
6216 if (! flag_unsafe_math_optimizations)
6217 break;
6218 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6219 if (target)
6220 return target;
6221 break;
6223 CASE_FLT_FN (BUILT_IN_SINCOS):
6224 if (! flag_unsafe_math_optimizations)
6225 break;
6226 target = expand_builtin_sincos (exp);
6227 if (target)
6228 return target;
6229 break;
6231 case BUILT_IN_APPLY_ARGS:
6232 return expand_builtin_apply_args ();
6234 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6235 FUNCTION with a copy of the parameters described by
6236 ARGUMENTS, and ARGSIZE. It returns a block of memory
6237 allocated on the stack into which is stored all the registers
6238 that might possibly be used for returning the result of a
6239 function. ARGUMENTS is the value returned by
6240 __builtin_apply_args. ARGSIZE is the number of bytes of
6241 arguments that must be copied. ??? How should this value be
6242 computed? We'll also need a safe worst case value for varargs
6243 functions. */
6244 case BUILT_IN_APPLY:
6245 if (!validate_arglist (exp, POINTER_TYPE,
6246 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6247 && !validate_arglist (exp, REFERENCE_TYPE,
6248 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6249 return const0_rtx;
6250 else
6252 rtx ops[3];
6254 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6255 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6256 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6258 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6261 /* __builtin_return (RESULT) causes the function to return the
6262 value described by RESULT. RESULT is address of the block of
6263 memory returned by __builtin_apply. */
6264 case BUILT_IN_RETURN:
6265 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6266 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6267 return const0_rtx;
6269 case BUILT_IN_SAVEREGS:
6270 return expand_builtin_saveregs ();
6272 case BUILT_IN_ARGS_INFO:
6273 return expand_builtin_args_info (exp);
6275 case BUILT_IN_VA_ARG_PACK:
6276 /* All valid uses of __builtin_va_arg_pack () are removed during
6277 inlining. */
6278 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6279 return const0_rtx;
6281 case BUILT_IN_VA_ARG_PACK_LEN:
6282 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6283 inlining. */
6284 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6285 return const0_rtx;
6287 /* Return the address of the first anonymous stack arg. */
6288 case BUILT_IN_NEXT_ARG:
6289 if (fold_builtin_next_arg (exp, false))
6290 return const0_rtx;
6291 return expand_builtin_next_arg ();
6293 case BUILT_IN_CLEAR_CACHE:
6294 target = expand_builtin___clear_cache (exp);
6295 if (target)
6296 return target;
6297 break;
6299 case BUILT_IN_CLASSIFY_TYPE:
6300 return expand_builtin_classify_type (exp);
6302 case BUILT_IN_CONSTANT_P:
6303 return const0_rtx;
6305 case BUILT_IN_FRAME_ADDRESS:
6306 case BUILT_IN_RETURN_ADDRESS:
6307 return expand_builtin_frame_address (fndecl, exp);
6309 /* Returns the address of the area where the structure is returned.
6310 0 otherwise. */
6311 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6312 if (call_expr_nargs (exp) != 0
6313 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6314 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6315 return const0_rtx;
6316 else
6317 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6319 case BUILT_IN_ALLOCA:
6320 target = expand_builtin_alloca (exp, target);
6321 if (target)
6322 return target;
6323 break;
6325 case BUILT_IN_STACK_SAVE:
6326 return expand_stack_save ();
6328 case BUILT_IN_STACK_RESTORE:
6329 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6330 return const0_rtx;
6332 case BUILT_IN_BSWAP32:
6333 case BUILT_IN_BSWAP64:
6334 target = expand_builtin_bswap (exp, target, subtarget);
6336 if (target)
6337 return target;
6338 break;
6340 CASE_INT_FN (BUILT_IN_FFS):
6341 case BUILT_IN_FFSIMAX:
6342 target = expand_builtin_unop (target_mode, exp, target,
6343 subtarget, ffs_optab);
6344 if (target)
6345 return target;
6346 break;
6348 CASE_INT_FN (BUILT_IN_CLZ):
6349 case BUILT_IN_CLZIMAX:
6350 target = expand_builtin_unop (target_mode, exp, target,
6351 subtarget, clz_optab);
6352 if (target)
6353 return target;
6354 break;
6356 CASE_INT_FN (BUILT_IN_CTZ):
6357 case BUILT_IN_CTZIMAX:
6358 target = expand_builtin_unop (target_mode, exp, target,
6359 subtarget, ctz_optab);
6360 if (target)
6361 return target;
6362 break;
6364 CASE_INT_FN (BUILT_IN_POPCOUNT):
6365 case BUILT_IN_POPCOUNTIMAX:
6366 target = expand_builtin_unop (target_mode, exp, target,
6367 subtarget, popcount_optab);
6368 if (target)
6369 return target;
6370 break;
6372 CASE_INT_FN (BUILT_IN_PARITY):
6373 case BUILT_IN_PARITYIMAX:
6374 target = expand_builtin_unop (target_mode, exp, target,
6375 subtarget, parity_optab);
6376 if (target)
6377 return target;
6378 break;
6380 case BUILT_IN_STRLEN:
6381 target = expand_builtin_strlen (exp, target, target_mode);
6382 if (target)
6383 return target;
6384 break;
6386 case BUILT_IN_STRCPY:
6387 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6388 if (target)
6389 return target;
6390 break;
6392 case BUILT_IN_STRNCPY:
6393 target = expand_builtin_strncpy (exp, target, mode);
6394 if (target)
6395 return target;
6396 break;
6398 case BUILT_IN_STPCPY:
6399 target = expand_builtin_stpcpy (exp, target, mode);
6400 if (target)
6401 return target;
6402 break;
6404 case BUILT_IN_STRCAT:
6405 target = expand_builtin_strcat (fndecl, exp, target, mode);
6406 if (target)
6407 return target;
6408 break;
6410 case BUILT_IN_STRNCAT:
6411 target = expand_builtin_strncat (exp, target, mode);
6412 if (target)
6413 return target;
6414 break;
6416 case BUILT_IN_STRSPN:
6417 target = expand_builtin_strspn (exp, target, mode);
6418 if (target)
6419 return target;
6420 break;
6422 case BUILT_IN_STRCSPN:
6423 target = expand_builtin_strcspn (exp, target, mode);
6424 if (target)
6425 return target;
6426 break;
6428 case BUILT_IN_STRSTR:
6429 target = expand_builtin_strstr (exp, target, mode);
6430 if (target)
6431 return target;
6432 break;
6434 case BUILT_IN_STRPBRK:
6435 target = expand_builtin_strpbrk (exp, target, mode);
6436 if (target)
6437 return target;
6438 break;
6440 case BUILT_IN_INDEX:
6441 case BUILT_IN_STRCHR:
6442 target = expand_builtin_strchr (exp, target, mode);
6443 if (target)
6444 return target;
6445 break;
6447 case BUILT_IN_RINDEX:
6448 case BUILT_IN_STRRCHR:
6449 target = expand_builtin_strrchr (exp, target, mode);
6450 if (target)
6451 return target;
6452 break;
6454 case BUILT_IN_MEMCPY:
6455 target = expand_builtin_memcpy (exp, target, mode);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_MEMPCPY:
6461 target = expand_builtin_mempcpy (exp, target, mode);
6462 if (target)
6463 return target;
6464 break;
6466 case BUILT_IN_MEMMOVE:
6467 target = expand_builtin_memmove (exp, target, mode, ignore);
6468 if (target)
6469 return target;
6470 break;
6472 case BUILT_IN_BCOPY:
6473 target = expand_builtin_bcopy (exp, ignore);
6474 if (target)
6475 return target;
6476 break;
6478 case BUILT_IN_MEMSET:
6479 target = expand_builtin_memset (exp, target, mode);
6480 if (target)
6481 return target;
6482 break;
6484 case BUILT_IN_BZERO:
6485 target = expand_builtin_bzero (exp);
6486 if (target)
6487 return target;
6488 break;
6490 case BUILT_IN_STRCMP:
6491 target = expand_builtin_strcmp (exp, target, mode);
6492 if (target)
6493 return target;
6494 break;
6496 case BUILT_IN_STRNCMP:
6497 target = expand_builtin_strncmp (exp, target, mode);
6498 if (target)
6499 return target;
6500 break;
6502 case BUILT_IN_MEMCHR:
6503 target = expand_builtin_memchr (exp, target, mode);
6504 if (target)
6505 return target;
6506 break;
6508 case BUILT_IN_BCMP:
6509 case BUILT_IN_MEMCMP:
6510 target = expand_builtin_memcmp (exp, target, mode);
6511 if (target)
6512 return target;
6513 break;
6515 case BUILT_IN_SETJMP:
6516 /* This should have been lowered to the builtins below. */
6517 gcc_unreachable ();
6519 case BUILT_IN_SETJMP_SETUP:
6520 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6521 and the receiver label. */
6522 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6524 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6525 VOIDmode, EXPAND_NORMAL);
6526 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6527 rtx label_r = label_rtx (label);
6529 /* This is copied from the handling of non-local gotos. */
6530 expand_builtin_setjmp_setup (buf_addr, label_r);
6531 nonlocal_goto_handler_labels
6532 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6533 nonlocal_goto_handler_labels);
6534 /* ??? Do not let expand_label treat us as such since we would
6535 not want to be both on the list of non-local labels and on
6536 the list of forced labels. */
6537 FORCED_LABEL (label) = 0;
6538 return const0_rtx;
6540 break;
6542 case BUILT_IN_SETJMP_DISPATCHER:
6543 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6544 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6546 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6547 rtx label_r = label_rtx (label);
6549 /* Remove the dispatcher label from the list of non-local labels
6550 since the receiver labels have been added to it above. */
6551 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6552 return const0_rtx;
6554 break;
6556 case BUILT_IN_SETJMP_RECEIVER:
6557 /* __builtin_setjmp_receiver is passed the receiver label. */
6558 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6560 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6561 rtx label_r = label_rtx (label);
6563 expand_builtin_setjmp_receiver (label_r);
6564 return const0_rtx;
6566 break;
6568 /* __builtin_longjmp is passed a pointer to an array of five words.
6569 It's similar to the C library longjmp function but works with
6570 __builtin_setjmp above. */
6571 case BUILT_IN_LONGJMP:
6572 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6574 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6575 VOIDmode, EXPAND_NORMAL);
6576 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6578 if (value != const1_rtx)
6580 error ("%<__builtin_longjmp%> second argument must be 1");
6581 return const0_rtx;
6584 expand_builtin_longjmp (buf_addr, value);
6585 return const0_rtx;
6587 break;
6589 case BUILT_IN_NONLOCAL_GOTO:
6590 target = expand_builtin_nonlocal_goto (exp);
6591 if (target)
6592 return target;
6593 break;
6595 /* This updates the setjmp buffer that is its argument with the value
6596 of the current stack pointer. */
6597 case BUILT_IN_UPDATE_SETJMP_BUF:
6598 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6600 rtx buf_addr
6601 = expand_normal (CALL_EXPR_ARG (exp, 0));
6603 expand_builtin_update_setjmp_buf (buf_addr);
6604 return const0_rtx;
6606 break;
6608 case BUILT_IN_TRAP:
6609 expand_builtin_trap ();
6610 return const0_rtx;
6612 case BUILT_IN_PRINTF:
6613 target = expand_builtin_printf (exp, target, mode, false);
6614 if (target)
6615 return target;
6616 break;
6618 case BUILT_IN_PRINTF_UNLOCKED:
6619 target = expand_builtin_printf (exp, target, mode, true);
6620 if (target)
6621 return target;
6622 break;
6624 case BUILT_IN_FPUTS:
6625 target = expand_builtin_fputs (exp, target, false);
6626 if (target)
6627 return target;
6628 break;
6629 case BUILT_IN_FPUTS_UNLOCKED:
6630 target = expand_builtin_fputs (exp, target, true);
6631 if (target)
6632 return target;
6633 break;
6635 case BUILT_IN_FPRINTF:
6636 target = expand_builtin_fprintf (exp, target, mode, false);
6637 if (target)
6638 return target;
6639 break;
6641 case BUILT_IN_FPRINTF_UNLOCKED:
6642 target = expand_builtin_fprintf (exp, target, mode, true);
6643 if (target)
6644 return target;
6645 break;
6647 case BUILT_IN_SPRINTF:
6648 target = expand_builtin_sprintf (exp, target, mode);
6649 if (target)
6650 return target;
6651 break;
6653 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6654 case BUILT_IN_SIGNBITD32:
6655 case BUILT_IN_SIGNBITD64:
6656 case BUILT_IN_SIGNBITD128:
6657 target = expand_builtin_signbit (exp, target);
6658 if (target)
6659 return target;
6660 break;
6662 /* Various hooks for the DWARF 2 __throw routine. */
6663 case BUILT_IN_UNWIND_INIT:
6664 expand_builtin_unwind_init ();
6665 return const0_rtx;
6666 case BUILT_IN_DWARF_CFA:
6667 return virtual_cfa_rtx;
6668 #ifdef DWARF2_UNWIND_INFO
6669 case BUILT_IN_DWARF_SP_COLUMN:
6670 return expand_builtin_dwarf_sp_column ();
6671 case BUILT_IN_INIT_DWARF_REG_SIZES:
6672 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6673 return const0_rtx;
6674 #endif
6675 case BUILT_IN_FROB_RETURN_ADDR:
6676 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6677 case BUILT_IN_EXTRACT_RETURN_ADDR:
6678 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6679 case BUILT_IN_EH_RETURN:
6680 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6681 CALL_EXPR_ARG (exp, 1));
6682 return const0_rtx;
6683 #ifdef EH_RETURN_DATA_REGNO
6684 case BUILT_IN_EH_RETURN_DATA_REGNO:
6685 return expand_builtin_eh_return_data_regno (exp);
6686 #endif
6687 case BUILT_IN_EXTEND_POINTER:
6688 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6690 case BUILT_IN_VA_START:
6691 return expand_builtin_va_start (exp);
6692 case BUILT_IN_VA_END:
6693 return expand_builtin_va_end (exp);
6694 case BUILT_IN_VA_COPY:
6695 return expand_builtin_va_copy (exp);
6696 case BUILT_IN_EXPECT:
6697 return expand_builtin_expect (exp, target);
6698 case BUILT_IN_PREFETCH:
6699 expand_builtin_prefetch (exp);
6700 return const0_rtx;
6702 case BUILT_IN_PROFILE_FUNC_ENTER:
6703 return expand_builtin_profile_func (false);
6704 case BUILT_IN_PROFILE_FUNC_EXIT:
6705 return expand_builtin_profile_func (true);
6707 case BUILT_IN_INIT_TRAMPOLINE:
6708 return expand_builtin_init_trampoline (exp);
6709 case BUILT_IN_ADJUST_TRAMPOLINE:
6710 return expand_builtin_adjust_trampoline (exp);
6712 case BUILT_IN_FORK:
6713 case BUILT_IN_EXECL:
6714 case BUILT_IN_EXECV:
6715 case BUILT_IN_EXECLP:
6716 case BUILT_IN_EXECLE:
6717 case BUILT_IN_EXECVP:
6718 case BUILT_IN_EXECVE:
6719 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6720 if (target)
6721 return target;
6722 break;
6724 case BUILT_IN_FETCH_AND_ADD_1:
6725 case BUILT_IN_FETCH_AND_ADD_2:
6726 case BUILT_IN_FETCH_AND_ADD_4:
6727 case BUILT_IN_FETCH_AND_ADD_8:
6728 case BUILT_IN_FETCH_AND_ADD_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6730 target = expand_builtin_sync_operation (mode, exp, PLUS,
6731 false, target, ignore);
6732 if (target)
6733 return target;
6734 break;
6736 case BUILT_IN_FETCH_AND_SUB_1:
6737 case BUILT_IN_FETCH_AND_SUB_2:
6738 case BUILT_IN_FETCH_AND_SUB_4:
6739 case BUILT_IN_FETCH_AND_SUB_8:
6740 case BUILT_IN_FETCH_AND_SUB_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6742 target = expand_builtin_sync_operation (mode, exp, MINUS,
6743 false, target, ignore);
6744 if (target)
6745 return target;
6746 break;
6748 case BUILT_IN_FETCH_AND_OR_1:
6749 case BUILT_IN_FETCH_AND_OR_2:
6750 case BUILT_IN_FETCH_AND_OR_4:
6751 case BUILT_IN_FETCH_AND_OR_8:
6752 case BUILT_IN_FETCH_AND_OR_16:
6753 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6754 target = expand_builtin_sync_operation (mode, exp, IOR,
6755 false, target, ignore);
6756 if (target)
6757 return target;
6758 break;
6760 case BUILT_IN_FETCH_AND_AND_1:
6761 case BUILT_IN_FETCH_AND_AND_2:
6762 case BUILT_IN_FETCH_AND_AND_4:
6763 case BUILT_IN_FETCH_AND_AND_8:
6764 case BUILT_IN_FETCH_AND_AND_16:
6765 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6766 target = expand_builtin_sync_operation (mode, exp, AND,
6767 false, target, ignore);
6768 if (target)
6769 return target;
6770 break;
6772 case BUILT_IN_FETCH_AND_XOR_1:
6773 case BUILT_IN_FETCH_AND_XOR_2:
6774 case BUILT_IN_FETCH_AND_XOR_4:
6775 case BUILT_IN_FETCH_AND_XOR_8:
6776 case BUILT_IN_FETCH_AND_XOR_16:
6777 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6778 target = expand_builtin_sync_operation (mode, exp, XOR,
6779 false, target, ignore);
6780 if (target)
6781 return target;
6782 break;
6784 case BUILT_IN_FETCH_AND_NAND_1:
6785 case BUILT_IN_FETCH_AND_NAND_2:
6786 case BUILT_IN_FETCH_AND_NAND_4:
6787 case BUILT_IN_FETCH_AND_NAND_8:
6788 case BUILT_IN_FETCH_AND_NAND_16:
6789 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6790 target = expand_builtin_sync_operation (mode, exp, NOT,
6791 false, target, ignore);
6792 if (target)
6793 return target;
6794 break;
6796 case BUILT_IN_ADD_AND_FETCH_1:
6797 case BUILT_IN_ADD_AND_FETCH_2:
6798 case BUILT_IN_ADD_AND_FETCH_4:
6799 case BUILT_IN_ADD_AND_FETCH_8:
6800 case BUILT_IN_ADD_AND_FETCH_16:
6801 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6802 target = expand_builtin_sync_operation (mode, exp, PLUS,
6803 true, target, ignore);
6804 if (target)
6805 return target;
6806 break;
6808 case BUILT_IN_SUB_AND_FETCH_1:
6809 case BUILT_IN_SUB_AND_FETCH_2:
6810 case BUILT_IN_SUB_AND_FETCH_4:
6811 case BUILT_IN_SUB_AND_FETCH_8:
6812 case BUILT_IN_SUB_AND_FETCH_16:
6813 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6814 target = expand_builtin_sync_operation (mode, exp, MINUS,
6815 true, target, ignore);
6816 if (target)
6817 return target;
6818 break;
6820 case BUILT_IN_OR_AND_FETCH_1:
6821 case BUILT_IN_OR_AND_FETCH_2:
6822 case BUILT_IN_OR_AND_FETCH_4:
6823 case BUILT_IN_OR_AND_FETCH_8:
6824 case BUILT_IN_OR_AND_FETCH_16:
6825 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6826 target = expand_builtin_sync_operation (mode, exp, IOR,
6827 true, target, ignore);
6828 if (target)
6829 return target;
6830 break;
6832 case BUILT_IN_AND_AND_FETCH_1:
6833 case BUILT_IN_AND_AND_FETCH_2:
6834 case BUILT_IN_AND_AND_FETCH_4:
6835 case BUILT_IN_AND_AND_FETCH_8:
6836 case BUILT_IN_AND_AND_FETCH_16:
6837 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6838 target = expand_builtin_sync_operation (mode, exp, AND,
6839 true, target, ignore);
6840 if (target)
6841 return target;
6842 break;
6844 case BUILT_IN_XOR_AND_FETCH_1:
6845 case BUILT_IN_XOR_AND_FETCH_2:
6846 case BUILT_IN_XOR_AND_FETCH_4:
6847 case BUILT_IN_XOR_AND_FETCH_8:
6848 case BUILT_IN_XOR_AND_FETCH_16:
6849 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6850 target = expand_builtin_sync_operation (mode, exp, XOR,
6851 true, target, ignore);
6852 if (target)
6853 return target;
6854 break;
6856 case BUILT_IN_NAND_AND_FETCH_1:
6857 case BUILT_IN_NAND_AND_FETCH_2:
6858 case BUILT_IN_NAND_AND_FETCH_4:
6859 case BUILT_IN_NAND_AND_FETCH_8:
6860 case BUILT_IN_NAND_AND_FETCH_16:
6861 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6862 target = expand_builtin_sync_operation (mode, exp, NOT,
6863 true, target, ignore);
6864 if (target)
6865 return target;
6866 break;
6868 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6869 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6870 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6871 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6872 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6873 if (mode == VOIDmode)
6874 mode = TYPE_MODE (boolean_type_node);
6875 if (!target || !register_operand (target, mode))
6876 target = gen_reg_rtx (mode);
6878 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6879 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6880 if (target)
6881 return target;
6882 break;
6884 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6885 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6886 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6887 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6888 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6889 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6890 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6891 if (target)
6892 return target;
6893 break;
6895 case BUILT_IN_LOCK_TEST_AND_SET_1:
6896 case BUILT_IN_LOCK_TEST_AND_SET_2:
6897 case BUILT_IN_LOCK_TEST_AND_SET_4:
6898 case BUILT_IN_LOCK_TEST_AND_SET_8:
6899 case BUILT_IN_LOCK_TEST_AND_SET_16:
6900 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6901 target = expand_builtin_lock_test_and_set (mode, exp, target);
6902 if (target)
6903 return target;
6904 break;
6906 case BUILT_IN_LOCK_RELEASE_1:
6907 case BUILT_IN_LOCK_RELEASE_2:
6908 case BUILT_IN_LOCK_RELEASE_4:
6909 case BUILT_IN_LOCK_RELEASE_8:
6910 case BUILT_IN_LOCK_RELEASE_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6912 expand_builtin_lock_release (mode, exp);
6913 return const0_rtx;
6915 case BUILT_IN_SYNCHRONIZE:
6916 expand_builtin_synchronize ();
6917 return const0_rtx;
6919 case BUILT_IN_OBJECT_SIZE:
6920 return expand_builtin_object_size (exp);
6922 case BUILT_IN_MEMCPY_CHK:
6923 case BUILT_IN_MEMPCPY_CHK:
6924 case BUILT_IN_MEMMOVE_CHK:
6925 case BUILT_IN_MEMSET_CHK:
6926 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6927 if (target)
6928 return target;
6929 break;
6931 case BUILT_IN_STRCPY_CHK:
6932 case BUILT_IN_STPCPY_CHK:
6933 case BUILT_IN_STRNCPY_CHK:
6934 case BUILT_IN_STRCAT_CHK:
6935 case BUILT_IN_STRNCAT_CHK:
6936 case BUILT_IN_SNPRINTF_CHK:
6937 case BUILT_IN_VSNPRINTF_CHK:
6938 maybe_emit_chk_warning (exp, fcode);
6939 break;
6941 case BUILT_IN_SPRINTF_CHK:
6942 case BUILT_IN_VSPRINTF_CHK:
6943 maybe_emit_sprintf_chk_warning (exp, fcode);
6944 break;
6946 default: /* just do library call, if unknown builtin */
6947 break;
6950 /* The switch statement above can drop through to cause the function
6951 to be called normally. */
6952 return expand_call (exp, target, ignore);
6955 /* Determine whether a tree node represents a call to a built-in
6956 function. If the tree T is a call to a built-in function with
6957 the right number of arguments of the appropriate types, return
6958 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6959 Otherwise the return value is END_BUILTINS. */
6961 enum built_in_function
6962 builtin_mathfn_code (const_tree t)
6964 const_tree fndecl, arg, parmlist;
6965 const_tree argtype, parmtype;
6966 const_call_expr_arg_iterator iter;
6968 if (TREE_CODE (t) != CALL_EXPR
6969 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6970 return END_BUILTINS;
6972 fndecl = get_callee_fndecl (t);
6973 if (fndecl == NULL_TREE
6974 || TREE_CODE (fndecl) != FUNCTION_DECL
6975 || ! DECL_BUILT_IN (fndecl)
6976 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6977 return END_BUILTINS;
6979 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6980 init_const_call_expr_arg_iterator (t, &iter);
6981 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6983 /* If a function doesn't take a variable number of arguments,
6984 the last element in the list will have type `void'. */
6985 parmtype = TREE_VALUE (parmlist);
6986 if (VOID_TYPE_P (parmtype))
6988 if (more_const_call_expr_args_p (&iter))
6989 return END_BUILTINS;
6990 return DECL_FUNCTION_CODE (fndecl);
6993 if (! more_const_call_expr_args_p (&iter))
6994 return END_BUILTINS;
6996 arg = next_const_call_expr_arg (&iter);
6997 argtype = TREE_TYPE (arg);
6999 if (SCALAR_FLOAT_TYPE_P (parmtype))
7001 if (! SCALAR_FLOAT_TYPE_P (argtype))
7002 return END_BUILTINS;
7004 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7006 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7007 return END_BUILTINS;
7009 else if (POINTER_TYPE_P (parmtype))
7011 if (! POINTER_TYPE_P (argtype))
7012 return END_BUILTINS;
7014 else if (INTEGRAL_TYPE_P (parmtype))
7016 if (! INTEGRAL_TYPE_P (argtype))
7017 return END_BUILTINS;
7019 else
7020 return END_BUILTINS;
7023 /* Variable-length argument list. */
7024 return DECL_FUNCTION_CODE (fndecl);
7027 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7028 evaluate to a constant. */
7030 static tree
7031 fold_builtin_constant_p (tree arg)
7033 /* We return 1 for a numeric type that's known to be a constant
7034 value at compile-time or for an aggregate type that's a
7035 literal constant. */
7036 STRIP_NOPS (arg);
7038 /* If we know this is a constant, emit the constant of one. */
7039 if (CONSTANT_CLASS_P (arg)
7040 || (TREE_CODE (arg) == CONSTRUCTOR
7041 && TREE_CONSTANT (arg)))
7042 return integer_one_node;
7043 if (TREE_CODE (arg) == ADDR_EXPR)
7045 tree op = TREE_OPERAND (arg, 0);
7046 if (TREE_CODE (op) == STRING_CST
7047 || (TREE_CODE (op) == ARRAY_REF
7048 && integer_zerop (TREE_OPERAND (op, 1))
7049 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7050 return integer_one_node;
7053 /* If this expression has side effects, show we don't know it to be a
7054 constant. Likewise if it's a pointer or aggregate type since in
7055 those case we only want literals, since those are only optimized
7056 when generating RTL, not later.
7057 And finally, if we are compiling an initializer, not code, we
7058 need to return a definite result now; there's not going to be any
7059 more optimization done. */
7060 if (TREE_SIDE_EFFECTS (arg)
7061 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7062 || POINTER_TYPE_P (TREE_TYPE (arg))
7063 || cfun == 0
7064 || folding_initializer)
7065 return integer_zero_node;
7067 return NULL_TREE;
7070 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7071 return it as a truthvalue. */
7073 static tree
7074 build_builtin_expect_predicate (tree pred, tree expected)
7076 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7078 fn = built_in_decls[BUILT_IN_EXPECT];
7079 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7080 ret_type = TREE_TYPE (TREE_TYPE (fn));
7081 pred_type = TREE_VALUE (arg_types);
7082 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7084 pred = fold_convert (pred_type, pred);
7085 expected = fold_convert (expected_type, expected);
7086 call_expr = build_call_expr (fn, 2, pred, expected);
7088 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7089 build_int_cst (ret_type, 0));
7092 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7093 NULL_TREE if no simplification is possible. */
7095 static tree
7096 fold_builtin_expect (tree arg0, tree arg1)
7098 tree inner, fndecl;
7099 enum tree_code code;
7101 /* If this is a builtin_expect within a builtin_expect keep the
7102 inner one. See through a comparison against a constant. It
7103 might have been added to create a thruthvalue. */
7104 inner = arg0;
7105 if (COMPARISON_CLASS_P (inner)
7106 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7107 inner = TREE_OPERAND (inner, 0);
7109 if (TREE_CODE (inner) == CALL_EXPR
7110 && (fndecl = get_callee_fndecl (inner))
7111 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7112 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7113 return arg0;
7115 /* Distribute the expected value over short-circuiting operators.
7116 See through the cast from truthvalue_type_node to long. */
7117 inner = arg0;
7118 while (TREE_CODE (inner) == NOP_EXPR
7119 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7120 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7121 inner = TREE_OPERAND (inner, 0);
7123 code = TREE_CODE (inner);
7124 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7126 tree op0 = TREE_OPERAND (inner, 0);
7127 tree op1 = TREE_OPERAND (inner, 1);
7129 op0 = build_builtin_expect_predicate (op0, arg1);
7130 op1 = build_builtin_expect_predicate (op1, arg1);
7131 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7133 return fold_convert (TREE_TYPE (arg0), inner);
7136 /* If the argument isn't invariant then there's nothing else we can do. */
7137 if (!TREE_CONSTANT (arg0))
7138 return NULL_TREE;
7140 /* If we expect that a comparison against the argument will fold to
7141 a constant return the constant. In practice, this means a true
7142 constant or the address of a non-weak symbol. */
7143 inner = arg0;
7144 STRIP_NOPS (inner);
7145 if (TREE_CODE (inner) == ADDR_EXPR)
7149 inner = TREE_OPERAND (inner, 0);
7151 while (TREE_CODE (inner) == COMPONENT_REF
7152 || TREE_CODE (inner) == ARRAY_REF);
7153 if (DECL_P (inner) && DECL_WEAK (inner))
7154 return NULL_TREE;
7157 /* Otherwise, ARG0 already has the proper type for the return value. */
7158 return arg0;
7161 /* Fold a call to __builtin_classify_type with argument ARG. */
7163 static tree
7164 fold_builtin_classify_type (tree arg)
7166 if (arg == 0)
7167 return build_int_cst (NULL_TREE, no_type_class);
7169 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7172 /* Fold a call to __builtin_strlen with argument ARG. */
7174 static tree
7175 fold_builtin_strlen (tree arg)
7177 if (!validate_arg (arg, POINTER_TYPE))
7178 return NULL_TREE;
7179 else
7181 tree len = c_strlen (arg, 0);
7183 if (len)
7185 /* Convert from the internal "sizetype" type to "size_t". */
7186 if (size_type_node)
7187 len = fold_convert (size_type_node, len);
7188 return len;
7191 return NULL_TREE;
7195 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7197 static tree
7198 fold_builtin_inf (tree type, int warn)
7200 REAL_VALUE_TYPE real;
7202 /* __builtin_inff is intended to be usable to define INFINITY on all
7203 targets. If an infinity is not available, INFINITY expands "to a
7204 positive constant of type float that overflows at translation
7205 time", footnote "In this case, using INFINITY will violate the
7206 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7207 Thus we pedwarn to ensure this constraint violation is
7208 diagnosed. */
7209 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7210 pedwarn ("target format does not support infinity");
7212 real_inf (&real);
7213 return build_real (type, real);
7216 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7218 static tree
7219 fold_builtin_nan (tree arg, tree type, int quiet)
7221 REAL_VALUE_TYPE real;
7222 const char *str;
7224 if (!validate_arg (arg, POINTER_TYPE))
7225 return NULL_TREE;
7226 str = c_getstr (arg);
7227 if (!str)
7228 return NULL_TREE;
7230 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7231 return NULL_TREE;
7233 return build_real (type, real);
7236 /* Return true if the floating point expression T has an integer value.
7237 We also allow +Inf, -Inf and NaN to be considered integer values. */
7239 static bool
7240 integer_valued_real_p (tree t)
7242 switch (TREE_CODE (t))
7244 case FLOAT_EXPR:
7245 return true;
7247 case ABS_EXPR:
7248 case SAVE_EXPR:
7249 return integer_valued_real_p (TREE_OPERAND (t, 0));
7251 case COMPOUND_EXPR:
7252 case MODIFY_EXPR:
7253 case BIND_EXPR:
7254 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7256 case PLUS_EXPR:
7257 case MINUS_EXPR:
7258 case MULT_EXPR:
7259 case MIN_EXPR:
7260 case MAX_EXPR:
7261 return integer_valued_real_p (TREE_OPERAND (t, 0))
7262 && integer_valued_real_p (TREE_OPERAND (t, 1));
7264 case COND_EXPR:
7265 return integer_valued_real_p (TREE_OPERAND (t, 1))
7266 && integer_valued_real_p (TREE_OPERAND (t, 2));
7268 case REAL_CST:
7269 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7271 case NOP_EXPR:
7273 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7274 if (TREE_CODE (type) == INTEGER_TYPE)
7275 return true;
7276 if (TREE_CODE (type) == REAL_TYPE)
7277 return integer_valued_real_p (TREE_OPERAND (t, 0));
7278 break;
7281 case CALL_EXPR:
7282 switch (builtin_mathfn_code (t))
7284 CASE_FLT_FN (BUILT_IN_CEIL):
7285 CASE_FLT_FN (BUILT_IN_FLOOR):
7286 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7287 CASE_FLT_FN (BUILT_IN_RINT):
7288 CASE_FLT_FN (BUILT_IN_ROUND):
7289 CASE_FLT_FN (BUILT_IN_TRUNC):
7290 return true;
7292 CASE_FLT_FN (BUILT_IN_FMIN):
7293 CASE_FLT_FN (BUILT_IN_FMAX):
7294 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7295 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7297 default:
7298 break;
7300 break;
7302 default:
7303 break;
7305 return false;
7308 /* FNDECL is assumed to be a builtin where truncation can be propagated
7309 across (for instance floor((double)f) == (double)floorf (f).
7310 Do the transformation for a call with argument ARG. */
7312 static tree
7313 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7315 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7317 if (!validate_arg (arg, REAL_TYPE))
7318 return NULL_TREE;
7320 /* Integer rounding functions are idempotent. */
7321 if (fcode == builtin_mathfn_code (arg))
7322 return arg;
7324 /* If argument is already integer valued, and we don't need to worry
7325 about setting errno, there's no need to perform rounding. */
7326 if (! flag_errno_math && integer_valued_real_p (arg))
7327 return arg;
7329 if (optimize)
7331 tree arg0 = strip_float_extensions (arg);
7332 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7333 tree newtype = TREE_TYPE (arg0);
7334 tree decl;
7336 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7337 && (decl = mathfn_built_in (newtype, fcode)))
7338 return fold_convert (ftype,
7339 build_call_expr (decl, 1,
7340 fold_convert (newtype, arg0)));
7342 return NULL_TREE;
7345 /* FNDECL is assumed to be builtin which can narrow the FP type of
7346 the argument, for instance lround((double)f) -> lroundf (f).
7347 Do the transformation for a call with argument ARG. */
7349 static tree
7350 fold_fixed_mathfn (tree fndecl, tree arg)
7352 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7354 if (!validate_arg (arg, REAL_TYPE))
7355 return NULL_TREE;
7357 /* If argument is already integer valued, and we don't need to worry
7358 about setting errno, there's no need to perform rounding. */
7359 if (! flag_errno_math && integer_valued_real_p (arg))
7360 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7362 if (optimize)
7364 tree ftype = TREE_TYPE (arg);
7365 tree arg0 = strip_float_extensions (arg);
7366 tree newtype = TREE_TYPE (arg0);
7367 tree decl;
7369 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7370 && (decl = mathfn_built_in (newtype, fcode)))
7371 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7374 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7375 sizeof (long long) == sizeof (long). */
7376 if (TYPE_PRECISION (long_long_integer_type_node)
7377 == TYPE_PRECISION (long_integer_type_node))
7379 tree newfn = NULL_TREE;
7380 switch (fcode)
7382 CASE_FLT_FN (BUILT_IN_LLCEIL):
7383 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7384 break;
7386 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7387 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7388 break;
7390 CASE_FLT_FN (BUILT_IN_LLROUND):
7391 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7392 break;
7394 CASE_FLT_FN (BUILT_IN_LLRINT):
7395 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7396 break;
7398 default:
7399 break;
7402 if (newfn)
7404 tree newcall = build_call_expr(newfn, 1, arg);
7405 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7409 return NULL_TREE;
7412 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7413 return type. Return NULL_TREE if no simplification can be made. */
7415 static tree
7416 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7418 tree res;
7420 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7421 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7422 return NULL_TREE;
7424 /* Calculate the result when the argument is a constant. */
7425 if (TREE_CODE (arg) == COMPLEX_CST
7426 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7427 type, mpfr_hypot)))
7428 return res;
7430 if (TREE_CODE (arg) == COMPLEX_EXPR)
7432 tree real = TREE_OPERAND (arg, 0);
7433 tree imag = TREE_OPERAND (arg, 1);
7435 /* If either part is zero, cabs is fabs of the other. */
7436 if (real_zerop (real))
7437 return fold_build1 (ABS_EXPR, type, imag);
7438 if (real_zerop (imag))
7439 return fold_build1 (ABS_EXPR, type, real);
7441 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7442 if (flag_unsafe_math_optimizations
7443 && operand_equal_p (real, imag, OEP_PURE_SAME))
7445 const REAL_VALUE_TYPE sqrt2_trunc
7446 = real_value_truncate (TYPE_MODE (type),
7447 *get_real_const (rv_sqrt2));
7448 STRIP_NOPS (real);
7449 return fold_build2 (MULT_EXPR, type,
7450 fold_build1 (ABS_EXPR, type, real),
7451 build_real (type, sqrt2_trunc));
7455 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7456 if (TREE_CODE (arg) == NEGATE_EXPR
7457 || TREE_CODE (arg) == CONJ_EXPR)
7458 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7460 /* Don't do this when optimizing for size. */
7461 if (flag_unsafe_math_optimizations
7462 && optimize && !optimize_size)
7464 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7466 if (sqrtfn != NULL_TREE)
7468 tree rpart, ipart, result;
7470 arg = builtin_save_expr (arg);
7472 rpart = fold_build1 (REALPART_EXPR, type, arg);
7473 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7475 rpart = builtin_save_expr (rpart);
7476 ipart = builtin_save_expr (ipart);
7478 result = fold_build2 (PLUS_EXPR, type,
7479 fold_build2 (MULT_EXPR, type,
7480 rpart, rpart),
7481 fold_build2 (MULT_EXPR, type,
7482 ipart, ipart));
7484 return build_call_expr (sqrtfn, 1, result);
7488 return NULL_TREE;
7491 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7492 Return NULL_TREE if no simplification can be made. */
7494 static tree
7495 fold_builtin_sqrt (tree arg, tree type)
7498 enum built_in_function fcode;
7499 tree res;
7501 if (!validate_arg (arg, REAL_TYPE))
7502 return NULL_TREE;
7504 /* Calculate the result when the argument is a constant. */
7505 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7506 return res;
7508 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7509 fcode = builtin_mathfn_code (arg);
7510 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7512 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7513 arg = fold_build2 (MULT_EXPR, type,
7514 CALL_EXPR_ARG (arg, 0),
7515 build_real (type, dconsthalf));
7516 return build_call_expr (expfn, 1, arg);
7519 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7520 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7522 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7524 if (powfn)
7526 tree arg0 = CALL_EXPR_ARG (arg, 0);
7527 tree tree_root;
7528 /* The inner root was either sqrt or cbrt. */
7529 REAL_VALUE_TYPE dconstroot =
7530 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7532 /* Adjust for the outer root. */
7533 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7534 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7535 tree_root = build_real (type, dconstroot);
7536 return build_call_expr (powfn, 2, arg0, tree_root);
7540 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7541 if (flag_unsafe_math_optimizations
7542 && (fcode == BUILT_IN_POW
7543 || fcode == BUILT_IN_POWF
7544 || fcode == BUILT_IN_POWL))
7546 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7547 tree arg0 = CALL_EXPR_ARG (arg, 0);
7548 tree arg1 = CALL_EXPR_ARG (arg, 1);
7549 tree narg1;
7550 if (!tree_expr_nonnegative_p (arg0))
7551 arg0 = build1 (ABS_EXPR, type, arg0);
7552 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7553 build_real (type, dconsthalf));
7554 return build_call_expr (powfn, 2, arg0, narg1);
7557 return NULL_TREE;
7560 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7561 Return NULL_TREE if no simplification can be made. */
7563 static tree
7564 fold_builtin_cbrt (tree arg, tree type)
7566 const enum built_in_function fcode = builtin_mathfn_code (arg);
7567 tree res;
7569 if (!validate_arg (arg, REAL_TYPE))
7570 return NULL_TREE;
7572 /* Calculate the result when the argument is a constant. */
7573 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7574 return res;
7576 if (flag_unsafe_math_optimizations)
7578 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7579 if (BUILTIN_EXPONENT_P (fcode))
7581 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7582 const REAL_VALUE_TYPE third_trunc =
7583 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7584 arg = fold_build2 (MULT_EXPR, type,
7585 CALL_EXPR_ARG (arg, 0),
7586 build_real (type, third_trunc));
7587 return build_call_expr (expfn, 1, arg);
7590 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7591 if (BUILTIN_SQRT_P (fcode))
7593 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7595 if (powfn)
7597 tree arg0 = CALL_EXPR_ARG (arg, 0);
7598 tree tree_root;
7599 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7601 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7602 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7603 tree_root = build_real (type, dconstroot);
7604 return build_call_expr (powfn, 2, arg0, tree_root);
7608 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7609 if (BUILTIN_CBRT_P (fcode))
7611 tree arg0 = CALL_EXPR_ARG (arg, 0);
7612 if (tree_expr_nonnegative_p (arg0))
7614 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7616 if (powfn)
7618 tree tree_root;
7619 REAL_VALUE_TYPE dconstroot;
7621 real_arithmetic (&dconstroot, MULT_EXPR,
7622 get_real_const (rv_third),
7623 get_real_const (rv_third));
7624 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7625 tree_root = build_real (type, dconstroot);
7626 return build_call_expr (powfn, 2, arg0, tree_root);
7631 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7632 if (fcode == BUILT_IN_POW
7633 || fcode == BUILT_IN_POWF
7634 || fcode == BUILT_IN_POWL)
7636 tree arg00 = CALL_EXPR_ARG (arg, 0);
7637 tree arg01 = CALL_EXPR_ARG (arg, 1);
7638 if (tree_expr_nonnegative_p (arg00))
7640 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7641 const REAL_VALUE_TYPE dconstroot
7642 = real_value_truncate (TYPE_MODE (type),
7643 *get_real_const (rv_third));
7644 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7645 build_real (type, dconstroot));
7646 return build_call_expr (powfn, 2, arg00, narg01);
7650 return NULL_TREE;
7653 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7654 TYPE is the type of the return value. Return NULL_TREE if no
7655 simplification can be made. */
7657 static tree
7658 fold_builtin_cos (tree arg, tree type, tree fndecl)
7660 tree res, narg;
7662 if (!validate_arg (arg, REAL_TYPE))
7663 return NULL_TREE;
7665 /* Calculate the result when the argument is a constant. */
7666 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7667 return res;
7669 /* Optimize cos(-x) into cos (x). */
7670 if ((narg = fold_strip_sign_ops (arg)))
7671 return build_call_expr (fndecl, 1, narg);
7673 return NULL_TREE;
7676 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7677 Return NULL_TREE if no simplification can be made. */
7679 static tree
7680 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7682 if (validate_arg (arg, REAL_TYPE))
7684 tree res, narg;
7686 /* Calculate the result when the argument is a constant. */
7687 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7688 return res;
7690 /* Optimize cosh(-x) into cosh (x). */
7691 if ((narg = fold_strip_sign_ops (arg)))
7692 return build_call_expr (fndecl, 1, narg);
7695 return NULL_TREE;
7698 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7699 Return NULL_TREE if no simplification can be made. */
7701 static tree
7702 fold_builtin_tan (tree arg, tree type)
7704 enum built_in_function fcode;
7705 tree res;
7707 if (!validate_arg (arg, REAL_TYPE))
7708 return NULL_TREE;
7710 /* Calculate the result when the argument is a constant. */
7711 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7712 return res;
7714 /* Optimize tan(atan(x)) = x. */
7715 fcode = builtin_mathfn_code (arg);
7716 if (flag_unsafe_math_optimizations
7717 && (fcode == BUILT_IN_ATAN
7718 || fcode == BUILT_IN_ATANF
7719 || fcode == BUILT_IN_ATANL))
7720 return CALL_EXPR_ARG (arg, 0);
7722 return NULL_TREE;
7725 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7726 NULL_TREE if no simplification can be made. */
7728 static tree
7729 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7731 tree type;
7732 tree res, fn, call;
7734 if (!validate_arg (arg0, REAL_TYPE)
7735 || !validate_arg (arg1, POINTER_TYPE)
7736 || !validate_arg (arg2, POINTER_TYPE))
7737 return NULL_TREE;
7739 type = TREE_TYPE (arg0);
7741 /* Calculate the result when the argument is a constant. */
7742 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7743 return res;
7745 /* Canonicalize sincos to cexpi. */
7746 if (!TARGET_C99_FUNCTIONS)
7747 return NULL_TREE;
7748 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7749 if (!fn)
7750 return NULL_TREE;
7752 call = build_call_expr (fn, 1, arg0);
7753 call = builtin_save_expr (call);
7755 return build2 (COMPOUND_EXPR, type,
7756 build2 (MODIFY_EXPR, void_type_node,
7757 build_fold_indirect_ref (arg1),
7758 build1 (IMAGPART_EXPR, type, call)),
7759 build2 (MODIFY_EXPR, void_type_node,
7760 build_fold_indirect_ref (arg2),
7761 build1 (REALPART_EXPR, type, call)));
7764 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7765 NULL_TREE if no simplification can be made. */
7767 static tree
7768 fold_builtin_cexp (tree arg0, tree type)
7770 tree rtype;
7771 tree realp, imagp, ifn;
7773 if (!validate_arg (arg0, COMPLEX_TYPE))
7774 return NULL_TREE;
7776 rtype = TREE_TYPE (TREE_TYPE (arg0));
7778 /* In case we can figure out the real part of arg0 and it is constant zero
7779 fold to cexpi. */
7780 if (!TARGET_C99_FUNCTIONS)
7781 return NULL_TREE;
7782 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7783 if (!ifn)
7784 return NULL_TREE;
7786 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7787 && real_zerop (realp))
7789 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7790 return build_call_expr (ifn, 1, narg);
7793 /* In case we can easily decompose real and imaginary parts split cexp
7794 to exp (r) * cexpi (i). */
7795 if (flag_unsafe_math_optimizations
7796 && realp)
7798 tree rfn, rcall, icall;
7800 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7801 if (!rfn)
7802 return NULL_TREE;
7804 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7805 if (!imagp)
7806 return NULL_TREE;
7808 icall = build_call_expr (ifn, 1, imagp);
7809 icall = builtin_save_expr (icall);
7810 rcall = build_call_expr (rfn, 1, realp);
7811 rcall = builtin_save_expr (rcall);
7812 return fold_build2 (COMPLEX_EXPR, type,
7813 fold_build2 (MULT_EXPR, rtype,
7814 rcall,
7815 fold_build1 (REALPART_EXPR, rtype, icall)),
7816 fold_build2 (MULT_EXPR, rtype,
7817 rcall,
7818 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7821 return NULL_TREE;
7824 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7825 Return NULL_TREE if no simplification can be made. */
7827 static tree
7828 fold_builtin_trunc (tree fndecl, tree arg)
7830 if (!validate_arg (arg, REAL_TYPE))
7831 return NULL_TREE;
7833 /* Optimize trunc of constant value. */
7834 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7836 REAL_VALUE_TYPE r, x;
7837 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7839 x = TREE_REAL_CST (arg);
7840 real_trunc (&r, TYPE_MODE (type), &x);
7841 return build_real (type, r);
7844 return fold_trunc_transparent_mathfn (fndecl, arg);
7847 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7848 Return NULL_TREE if no simplification can be made. */
7850 static tree
7851 fold_builtin_floor (tree fndecl, tree arg)
7853 if (!validate_arg (arg, REAL_TYPE))
7854 return NULL_TREE;
7856 /* Optimize floor of constant value. */
7857 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7859 REAL_VALUE_TYPE x;
7861 x = TREE_REAL_CST (arg);
7862 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7864 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7865 REAL_VALUE_TYPE r;
7867 real_floor (&r, TYPE_MODE (type), &x);
7868 return build_real (type, r);
7872 /* Fold floor (x) where x is nonnegative to trunc (x). */
7873 if (tree_expr_nonnegative_p (arg))
7875 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7876 if (truncfn)
7877 return build_call_expr (truncfn, 1, arg);
7880 return fold_trunc_transparent_mathfn (fndecl, arg);
7883 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7884 Return NULL_TREE if no simplification can be made. */
7886 static tree
7887 fold_builtin_ceil (tree fndecl, tree arg)
7889 if (!validate_arg (arg, REAL_TYPE))
7890 return NULL_TREE;
7892 /* Optimize ceil of constant value. */
7893 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7895 REAL_VALUE_TYPE x;
7897 x = TREE_REAL_CST (arg);
7898 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7900 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7901 REAL_VALUE_TYPE r;
7903 real_ceil (&r, TYPE_MODE (type), &x);
7904 return build_real (type, r);
7908 return fold_trunc_transparent_mathfn (fndecl, arg);
7911 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7912 Return NULL_TREE if no simplification can be made. */
7914 static tree
7915 fold_builtin_round (tree fndecl, tree arg)
7917 if (!validate_arg (arg, REAL_TYPE))
7918 return NULL_TREE;
7920 /* Optimize round of constant value. */
7921 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7923 REAL_VALUE_TYPE x;
7925 x = TREE_REAL_CST (arg);
7926 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7928 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7929 REAL_VALUE_TYPE r;
7931 real_round (&r, TYPE_MODE (type), &x);
7932 return build_real (type, r);
7936 return fold_trunc_transparent_mathfn (fndecl, arg);
7939 /* Fold function call to builtin lround, lroundf or lroundl (or the
7940 corresponding long long versions) and other rounding functions. ARG
7941 is the argument to the call. Return NULL_TREE if no simplification
7942 can be made. */
7944 static tree
7945 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7947 if (!validate_arg (arg, REAL_TYPE))
7948 return NULL_TREE;
7950 /* Optimize lround of constant value. */
7951 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7953 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7955 if (real_isfinite (&x))
7957 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7958 tree ftype = TREE_TYPE (arg);
7959 unsigned HOST_WIDE_INT lo2;
7960 HOST_WIDE_INT hi, lo;
7961 REAL_VALUE_TYPE r;
7963 switch (DECL_FUNCTION_CODE (fndecl))
7965 CASE_FLT_FN (BUILT_IN_LFLOOR):
7966 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7967 real_floor (&r, TYPE_MODE (ftype), &x);
7968 break;
7970 CASE_FLT_FN (BUILT_IN_LCEIL):
7971 CASE_FLT_FN (BUILT_IN_LLCEIL):
7972 real_ceil (&r, TYPE_MODE (ftype), &x);
7973 break;
7975 CASE_FLT_FN (BUILT_IN_LROUND):
7976 CASE_FLT_FN (BUILT_IN_LLROUND):
7977 real_round (&r, TYPE_MODE (ftype), &x);
7978 break;
7980 default:
7981 gcc_unreachable ();
7984 REAL_VALUE_TO_INT (&lo, &hi, r);
7985 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7986 return build_int_cst_wide (itype, lo2, hi);
7990 switch (DECL_FUNCTION_CODE (fndecl))
7992 CASE_FLT_FN (BUILT_IN_LFLOOR):
7993 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7994 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7995 if (tree_expr_nonnegative_p (arg))
7996 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7997 arg);
7998 break;
7999 default:;
8002 return fold_fixed_mathfn (fndecl, arg);
8005 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8006 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8007 the argument to the call. Return NULL_TREE if no simplification can
8008 be made. */
8010 static tree
8011 fold_builtin_bitop (tree fndecl, tree arg)
8013 if (!validate_arg (arg, INTEGER_TYPE))
8014 return NULL_TREE;
8016 /* Optimize for constant argument. */
8017 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8019 HOST_WIDE_INT hi, width, result;
8020 unsigned HOST_WIDE_INT lo;
8021 tree type;
8023 type = TREE_TYPE (arg);
8024 width = TYPE_PRECISION (type);
8025 lo = TREE_INT_CST_LOW (arg);
8027 /* Clear all the bits that are beyond the type's precision. */
8028 if (width > HOST_BITS_PER_WIDE_INT)
8030 hi = TREE_INT_CST_HIGH (arg);
8031 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8032 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8034 else
8036 hi = 0;
8037 if (width < HOST_BITS_PER_WIDE_INT)
8038 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8041 switch (DECL_FUNCTION_CODE (fndecl))
8043 CASE_INT_FN (BUILT_IN_FFS):
8044 if (lo != 0)
8045 result = exact_log2 (lo & -lo) + 1;
8046 else if (hi != 0)
8047 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8048 else
8049 result = 0;
8050 break;
8052 CASE_INT_FN (BUILT_IN_CLZ):
8053 if (hi != 0)
8054 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8055 else if (lo != 0)
8056 result = width - floor_log2 (lo) - 1;
8057 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8058 result = width;
8059 break;
8061 CASE_INT_FN (BUILT_IN_CTZ):
8062 if (lo != 0)
8063 result = exact_log2 (lo & -lo);
8064 else if (hi != 0)
8065 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8066 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8067 result = width;
8068 break;
8070 CASE_INT_FN (BUILT_IN_POPCOUNT):
8071 result = 0;
8072 while (lo)
8073 result++, lo &= lo - 1;
8074 while (hi)
8075 result++, hi &= hi - 1;
8076 break;
8078 CASE_INT_FN (BUILT_IN_PARITY):
8079 result = 0;
8080 while (lo)
8081 result++, lo &= lo - 1;
8082 while (hi)
8083 result++, hi &= hi - 1;
8084 result &= 1;
8085 break;
8087 default:
8088 gcc_unreachable ();
8091 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8094 return NULL_TREE;
8097 /* Fold function call to builtin_bswap and the long and long long
8098 variants. Return NULL_TREE if no simplification can be made. */
8099 static tree
8100 fold_builtin_bswap (tree fndecl, tree arg)
8102 if (! validate_arg (arg, INTEGER_TYPE))
8103 return NULL_TREE;
8105 /* Optimize constant value. */
8106 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8108 HOST_WIDE_INT hi, width, r_hi = 0;
8109 unsigned HOST_WIDE_INT lo, r_lo = 0;
8110 tree type;
8112 type = TREE_TYPE (arg);
8113 width = TYPE_PRECISION (type);
8114 lo = TREE_INT_CST_LOW (arg);
8115 hi = TREE_INT_CST_HIGH (arg);
8117 switch (DECL_FUNCTION_CODE (fndecl))
8119 case BUILT_IN_BSWAP32:
8120 case BUILT_IN_BSWAP64:
8122 int s;
8124 for (s = 0; s < width; s += 8)
8126 int d = width - s - 8;
8127 unsigned HOST_WIDE_INT byte;
8129 if (s < HOST_BITS_PER_WIDE_INT)
8130 byte = (lo >> s) & 0xff;
8131 else
8132 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8134 if (d < HOST_BITS_PER_WIDE_INT)
8135 r_lo |= byte << d;
8136 else
8137 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8141 break;
8143 default:
8144 gcc_unreachable ();
8147 if (width < HOST_BITS_PER_WIDE_INT)
8148 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8149 else
8150 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8153 return NULL_TREE;
8156 /* Return true if EXPR is the real constant contained in VALUE. */
8158 static bool
8159 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8161 STRIP_NOPS (expr);
8163 return ((TREE_CODE (expr) == REAL_CST
8164 && !TREE_OVERFLOW (expr)
8165 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8166 || (TREE_CODE (expr) == COMPLEX_CST
8167 && real_dconstp (TREE_REALPART (expr), value)
8168 && real_zerop (TREE_IMAGPART (expr))));
8171 /* A subroutine of fold_builtin to fold the various logarithmic
8172 functions. Return NULL_TREE if no simplification can me made.
8173 FUNC is the corresponding MPFR logarithm function. */
8175 static tree
8176 fold_builtin_logarithm (tree fndecl, tree arg,
8177 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8179 if (validate_arg (arg, REAL_TYPE))
8181 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8182 tree res;
8183 const enum built_in_function fcode = builtin_mathfn_code (arg);
8185 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8186 instead we'll look for 'e' truncated to MODE. So only do
8187 this if flag_unsafe_math_optimizations is set. */
8188 if (flag_unsafe_math_optimizations && func == mpfr_log)
8190 const REAL_VALUE_TYPE e_truncated =
8191 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8192 if (real_dconstp (arg, &e_truncated))
8193 return build_real (type, dconst1);
8196 /* Calculate the result when the argument is a constant. */
8197 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8198 return res;
8200 /* Special case, optimize logN(expN(x)) = x. */
8201 if (flag_unsafe_math_optimizations
8202 && ((func == mpfr_log
8203 && (fcode == BUILT_IN_EXP
8204 || fcode == BUILT_IN_EXPF
8205 || fcode == BUILT_IN_EXPL))
8206 || (func == mpfr_log2
8207 && (fcode == BUILT_IN_EXP2
8208 || fcode == BUILT_IN_EXP2F
8209 || fcode == BUILT_IN_EXP2L))
8210 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8211 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8213 /* Optimize logN(func()) for various exponential functions. We
8214 want to determine the value "x" and the power "exponent" in
8215 order to transform logN(x**exponent) into exponent*logN(x). */
8216 if (flag_unsafe_math_optimizations)
8218 tree exponent = 0, x = 0;
8220 switch (fcode)
8222 CASE_FLT_FN (BUILT_IN_EXP):
8223 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8224 x = build_real (type,
8225 real_value_truncate (TYPE_MODE (type),
8226 *get_real_const (rv_e)));
8227 exponent = CALL_EXPR_ARG (arg, 0);
8228 break;
8229 CASE_FLT_FN (BUILT_IN_EXP2):
8230 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8231 x = build_real (type, dconst2);
8232 exponent = CALL_EXPR_ARG (arg, 0);
8233 break;
8234 CASE_FLT_FN (BUILT_IN_EXP10):
8235 CASE_FLT_FN (BUILT_IN_POW10):
8236 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8238 REAL_VALUE_TYPE dconst10;
8239 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8240 x = build_real (type, dconst10);
8242 exponent = CALL_EXPR_ARG (arg, 0);
8243 break;
8244 CASE_FLT_FN (BUILT_IN_SQRT):
8245 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8246 x = CALL_EXPR_ARG (arg, 0);
8247 exponent = build_real (type, dconsthalf);
8248 break;
8249 CASE_FLT_FN (BUILT_IN_CBRT):
8250 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8251 x = CALL_EXPR_ARG (arg, 0);
8252 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8253 *get_real_const (rv_third)));
8254 break;
8255 CASE_FLT_FN (BUILT_IN_POW):
8256 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8257 x = CALL_EXPR_ARG (arg, 0);
8258 exponent = CALL_EXPR_ARG (arg, 1);
8259 break;
8260 default:
8261 break;
8264 /* Now perform the optimization. */
8265 if (x && exponent)
8267 tree logfn = build_call_expr (fndecl, 1, x);
8268 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8273 return NULL_TREE;
8276 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8277 NULL_TREE if no simplification can be made. */
8279 static tree
8280 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8282 tree res, narg0, narg1;
8284 if (!validate_arg (arg0, REAL_TYPE)
8285 || !validate_arg (arg1, REAL_TYPE))
8286 return NULL_TREE;
8288 /* Calculate the result when the argument is a constant. */
8289 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8290 return res;
8292 /* If either argument to hypot has a negate or abs, strip that off.
8293 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8294 narg0 = fold_strip_sign_ops (arg0);
8295 narg1 = fold_strip_sign_ops (arg1);
8296 if (narg0 || narg1)
8298 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8299 narg1 ? narg1 : arg1);
8302 /* If either argument is zero, hypot is fabs of the other. */
8303 if (real_zerop (arg0))
8304 return fold_build1 (ABS_EXPR, type, arg1);
8305 else if (real_zerop (arg1))
8306 return fold_build1 (ABS_EXPR, type, arg0);
8308 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8309 if (flag_unsafe_math_optimizations
8310 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8312 const REAL_VALUE_TYPE sqrt2_trunc
8313 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8314 return fold_build2 (MULT_EXPR, type,
8315 fold_build1 (ABS_EXPR, type, arg0),
8316 build_real (type, sqrt2_trunc));
8319 return NULL_TREE;
8323 /* Fold a builtin function call to pow, powf, or powl. Return
8324 NULL_TREE if no simplification can be made. */
8325 static tree
8326 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8328 tree res;
8330 if (!validate_arg (arg0, REAL_TYPE)
8331 || !validate_arg (arg1, REAL_TYPE))
8332 return NULL_TREE;
8334 /* Calculate the result when the argument is a constant. */
8335 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8336 return res;
8338 /* Optimize pow(1.0,y) = 1.0. */
8339 if (real_onep (arg0))
8340 return omit_one_operand (type, build_real (type, dconst1), arg1);
8342 if (TREE_CODE (arg1) == REAL_CST
8343 && !TREE_OVERFLOW (arg1))
8345 REAL_VALUE_TYPE cint;
8346 REAL_VALUE_TYPE c;
8347 HOST_WIDE_INT n;
8349 c = TREE_REAL_CST (arg1);
8351 /* Optimize pow(x,0.0) = 1.0. */
8352 if (REAL_VALUES_EQUAL (c, dconst0))
8353 return omit_one_operand (type, build_real (type, dconst1),
8354 arg0);
8356 /* Optimize pow(x,1.0) = x. */
8357 if (REAL_VALUES_EQUAL (c, dconst1))
8358 return arg0;
8360 /* Optimize pow(x,-1.0) = 1.0/x. */
8361 if (REAL_VALUES_EQUAL (c, dconstm1))
8362 return fold_build2 (RDIV_EXPR, type,
8363 build_real (type, dconst1), arg0);
8365 /* Optimize pow(x,0.5) = sqrt(x). */
8366 if (flag_unsafe_math_optimizations
8367 && REAL_VALUES_EQUAL (c, dconsthalf))
8369 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8371 if (sqrtfn != NULL_TREE)
8372 return build_call_expr (sqrtfn, 1, arg0);
8375 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8376 if (flag_unsafe_math_optimizations)
8378 const REAL_VALUE_TYPE dconstroot
8379 = real_value_truncate (TYPE_MODE (type),
8380 *get_real_const (rv_third));
8382 if (REAL_VALUES_EQUAL (c, dconstroot))
8384 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8385 if (cbrtfn != NULL_TREE)
8386 return build_call_expr (cbrtfn, 1, arg0);
8390 /* Check for an integer exponent. */
8391 n = real_to_integer (&c);
8392 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8393 if (real_identical (&c, &cint))
8395 /* Attempt to evaluate pow at compile-time. */
8396 if (TREE_CODE (arg0) == REAL_CST
8397 && !TREE_OVERFLOW (arg0))
8399 REAL_VALUE_TYPE x;
8400 bool inexact;
8402 x = TREE_REAL_CST (arg0);
8403 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8404 if (flag_unsafe_math_optimizations || !inexact)
8405 return build_real (type, x);
8408 /* Strip sign ops from even integer powers. */
8409 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8411 tree narg0 = fold_strip_sign_ops (arg0);
8412 if (narg0)
8413 return build_call_expr (fndecl, 2, narg0, arg1);
8418 if (flag_unsafe_math_optimizations)
8420 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8422 /* Optimize pow(expN(x),y) = expN(x*y). */
8423 if (BUILTIN_EXPONENT_P (fcode))
8425 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8426 tree arg = CALL_EXPR_ARG (arg0, 0);
8427 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8428 return build_call_expr (expfn, 1, arg);
8431 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8432 if (BUILTIN_SQRT_P (fcode))
8434 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8435 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8436 build_real (type, dconsthalf));
8437 return build_call_expr (fndecl, 2, narg0, narg1);
8440 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8441 if (BUILTIN_CBRT_P (fcode))
8443 tree arg = CALL_EXPR_ARG (arg0, 0);
8444 if (tree_expr_nonnegative_p (arg))
8446 const REAL_VALUE_TYPE dconstroot
8447 = real_value_truncate (TYPE_MODE (type),
8448 *get_real_const (rv_third));
8449 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8450 build_real (type, dconstroot));
8451 return build_call_expr (fndecl, 2, arg, narg1);
8455 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8456 if (fcode == BUILT_IN_POW
8457 || fcode == BUILT_IN_POWF
8458 || fcode == BUILT_IN_POWL)
8460 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8461 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8462 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8463 return build_call_expr (fndecl, 2, arg00, narg1);
8467 return NULL_TREE;
8470 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8471 Return NULL_TREE if no simplification can be made. */
8472 static tree
8473 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8474 tree arg0, tree arg1, tree type)
8476 if (!validate_arg (arg0, REAL_TYPE)
8477 || !validate_arg (arg1, INTEGER_TYPE))
8478 return NULL_TREE;
8480 /* Optimize pow(1.0,y) = 1.0. */
8481 if (real_onep (arg0))
8482 return omit_one_operand (type, build_real (type, dconst1), arg1);
8484 if (host_integerp (arg1, 0))
8486 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8488 /* Evaluate powi at compile-time. */
8489 if (TREE_CODE (arg0) == REAL_CST
8490 && !TREE_OVERFLOW (arg0))
8492 REAL_VALUE_TYPE x;
8493 x = TREE_REAL_CST (arg0);
8494 real_powi (&x, TYPE_MODE (type), &x, c);
8495 return build_real (type, x);
8498 /* Optimize pow(x,0) = 1.0. */
8499 if (c == 0)
8500 return omit_one_operand (type, build_real (type, dconst1),
8501 arg0);
8503 /* Optimize pow(x,1) = x. */
8504 if (c == 1)
8505 return arg0;
8507 /* Optimize pow(x,-1) = 1.0/x. */
8508 if (c == -1)
8509 return fold_build2 (RDIV_EXPR, type,
8510 build_real (type, dconst1), arg0);
8513 return NULL_TREE;
8516 /* A subroutine of fold_builtin to fold the various exponent
8517 functions. Return NULL_TREE if no simplification can be made.
8518 FUNC is the corresponding MPFR exponent function. */
8520 static tree
8521 fold_builtin_exponent (tree fndecl, tree arg,
8522 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8524 if (validate_arg (arg, REAL_TYPE))
8526 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8527 tree res;
8529 /* Calculate the result when the argument is a constant. */
8530 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8531 return res;
8533 /* Optimize expN(logN(x)) = x. */
8534 if (flag_unsafe_math_optimizations)
8536 const enum built_in_function fcode = builtin_mathfn_code (arg);
8538 if ((func == mpfr_exp
8539 && (fcode == BUILT_IN_LOG
8540 || fcode == BUILT_IN_LOGF
8541 || fcode == BUILT_IN_LOGL))
8542 || (func == mpfr_exp2
8543 && (fcode == BUILT_IN_LOG2
8544 || fcode == BUILT_IN_LOG2F
8545 || fcode == BUILT_IN_LOG2L))
8546 || (func == mpfr_exp10
8547 && (fcode == BUILT_IN_LOG10
8548 || fcode == BUILT_IN_LOG10F
8549 || fcode == BUILT_IN_LOG10L)))
8550 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8554 return NULL_TREE;
8557 /* Return true if VAR is a VAR_DECL or a component thereof. */
8559 static bool
8560 var_decl_component_p (tree var)
8562 tree inner = var;
8563 while (handled_component_p (inner))
8564 inner = TREE_OPERAND (inner, 0);
8565 return SSA_VAR_P (inner);
8568 /* Fold function call to builtin memset. Return
8569 NULL_TREE if no simplification can be made. */
8571 static tree
8572 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8574 tree var, ret;
8575 unsigned HOST_WIDE_INT length, cval;
8577 if (! validate_arg (dest, POINTER_TYPE)
8578 || ! validate_arg (c, INTEGER_TYPE)
8579 || ! validate_arg (len, INTEGER_TYPE))
8580 return NULL_TREE;
8582 if (! host_integerp (len, 1))
8583 return NULL_TREE;
8585 /* If the LEN parameter is zero, return DEST. */
8586 if (integer_zerop (len))
8587 return omit_one_operand (type, dest, c);
8589 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8590 return NULL_TREE;
8592 var = dest;
8593 STRIP_NOPS (var);
8594 if (TREE_CODE (var) != ADDR_EXPR)
8595 return NULL_TREE;
8597 var = TREE_OPERAND (var, 0);
8598 if (TREE_THIS_VOLATILE (var))
8599 return NULL_TREE;
8601 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8602 && !POINTER_TYPE_P (TREE_TYPE (var)))
8603 return NULL_TREE;
8605 if (! var_decl_component_p (var))
8606 return NULL_TREE;
8608 length = tree_low_cst (len, 1);
8609 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8610 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8611 < (int) length)
8612 return NULL_TREE;
8614 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8615 return NULL_TREE;
8617 if (integer_zerop (c))
8618 cval = 0;
8619 else
8621 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8622 return NULL_TREE;
8624 cval = tree_low_cst (c, 1);
8625 cval &= 0xff;
8626 cval |= cval << 8;
8627 cval |= cval << 16;
8628 cval |= (cval << 31) << 1;
8631 ret = build_int_cst_type (TREE_TYPE (var), cval);
8632 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8633 if (ignore)
8634 return ret;
8636 return omit_one_operand (type, dest, ret);
8639 /* Fold function call to builtin memset. Return
8640 NULL_TREE if no simplification can be made. */
8642 static tree
8643 fold_builtin_bzero (tree dest, tree size, bool ignore)
8645 if (! validate_arg (dest, POINTER_TYPE)
8646 || ! validate_arg (size, INTEGER_TYPE))
8647 return NULL_TREE;
8649 if (!ignore)
8650 return NULL_TREE;
8652 /* New argument list transforming bzero(ptr x, int y) to
8653 memset(ptr x, int 0, size_t y). This is done this way
8654 so that if it isn't expanded inline, we fallback to
8655 calling bzero instead of memset. */
8657 return fold_builtin_memset (dest, integer_zero_node,
8658 fold_convert (sizetype, size),
8659 void_type_node, ignore);
8662 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8663 NULL_TREE if no simplification can be made.
8664 If ENDP is 0, return DEST (like memcpy).
8665 If ENDP is 1, return DEST+LEN (like mempcpy).
8666 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8667 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8668 (memmove). */
8670 static tree
8671 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8673 tree destvar, srcvar, expr;
8675 if (! validate_arg (dest, POINTER_TYPE)
8676 || ! validate_arg (src, POINTER_TYPE)
8677 || ! validate_arg (len, INTEGER_TYPE))
8678 return NULL_TREE;
8680 /* If the LEN parameter is zero, return DEST. */
8681 if (integer_zerop (len))
8682 return omit_one_operand (type, dest, src);
8684 /* If SRC and DEST are the same (and not volatile), return
8685 DEST{,+LEN,+LEN-1}. */
8686 if (operand_equal_p (src, dest, 0))
8687 expr = len;
8688 else
8690 tree srctype, desttype;
8691 if (endp == 3)
8693 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8694 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8696 /* Both DEST and SRC must be pointer types.
8697 ??? This is what old code did. Is the testing for pointer types
8698 really mandatory?
8700 If either SRC is readonly or length is 1, we can use memcpy. */
8701 if (dest_align && src_align
8702 && (readonly_data_expr (src)
8703 || (host_integerp (len, 1)
8704 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8705 tree_low_cst (len, 1)))))
8707 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8708 if (!fn)
8709 return NULL_TREE;
8710 return build_call_expr (fn, 3, dest, src, len);
8712 return NULL_TREE;
8715 if (!host_integerp (len, 0))
8716 return NULL_TREE;
8717 /* FIXME:
8718 This logic lose for arguments like (type *)malloc (sizeof (type)),
8719 since we strip the casts of up to VOID return value from malloc.
8720 Perhaps we ought to inherit type from non-VOID argument here? */
8721 STRIP_NOPS (src);
8722 STRIP_NOPS (dest);
8723 srctype = TREE_TYPE (TREE_TYPE (src));
8724 desttype = TREE_TYPE (TREE_TYPE (dest));
8725 if (!srctype || !desttype
8726 || !TYPE_SIZE_UNIT (srctype)
8727 || !TYPE_SIZE_UNIT (desttype)
8728 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8729 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8730 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8731 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8732 return NULL_TREE;
8734 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8735 < (int) TYPE_ALIGN (desttype)
8736 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8737 < (int) TYPE_ALIGN (srctype)))
8738 return NULL_TREE;
8740 if (!ignore)
8741 dest = builtin_save_expr (dest);
8743 srcvar = build_fold_indirect_ref (src);
8744 if (TREE_THIS_VOLATILE (srcvar))
8745 return NULL_TREE;
8746 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8747 return NULL_TREE;
8748 /* With memcpy, it is possible to bypass aliasing rules, so without
8749 this check i.e. execute/20060930-2.c would be misoptimized, because
8750 it use conflicting alias set to hold argument for the memcpy call.
8751 This check is probably unnecessary with -fno-strict-aliasing.
8752 Similarly for destvar. See also PR29286. */
8753 if (!var_decl_component_p (srcvar)
8754 /* Accept: memcpy (*char_var, "test", 1); that simplify
8755 to char_var='t'; */
8756 || is_gimple_min_invariant (srcvar)
8757 || readonly_data_expr (src))
8758 return NULL_TREE;
8760 destvar = build_fold_indirect_ref (dest);
8761 if (TREE_THIS_VOLATILE (destvar))
8762 return NULL_TREE;
8763 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8764 return NULL_TREE;
8765 if (!var_decl_component_p (destvar))
8766 return NULL_TREE;
8768 if (srctype == desttype
8769 || (gimple_in_ssa_p (cfun)
8770 && useless_type_conversion_p (desttype, srctype)))
8771 expr = srcvar;
8772 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8773 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8774 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8775 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8776 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8777 else
8778 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8779 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8782 if (ignore)
8783 return expr;
8785 if (endp == 0 || endp == 3)
8786 return omit_one_operand (type, dest, expr);
8788 if (expr == len)
8789 expr = NULL_TREE;
8791 if (endp == 2)
8792 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8793 ssize_int (1));
8795 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8796 dest = fold_convert (type, dest);
8797 if (expr)
8798 dest = omit_one_operand (type, dest, expr);
8799 return dest;
8802 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8803 If LEN is not NULL, it represents the length of the string to be
8804 copied. Return NULL_TREE if no simplification can be made. */
8806 tree
8807 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8809 tree fn;
8811 if (!validate_arg (dest, POINTER_TYPE)
8812 || !validate_arg (src, POINTER_TYPE))
8813 return NULL_TREE;
8815 /* If SRC and DEST are the same (and not volatile), return DEST. */
8816 if (operand_equal_p (src, dest, 0))
8817 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8819 if (optimize_size)
8820 return NULL_TREE;
8822 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8823 if (!fn)
8824 return NULL_TREE;
8826 if (!len)
8828 len = c_strlen (src, 1);
8829 if (! len || TREE_SIDE_EFFECTS (len))
8830 return NULL_TREE;
8833 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8834 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8835 build_call_expr (fn, 3, dest, src, len));
8838 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8839 If SLEN is not NULL, it represents the length of the source string.
8840 Return NULL_TREE if no simplification can be made. */
8842 tree
8843 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8845 tree fn;
8847 if (!validate_arg (dest, POINTER_TYPE)
8848 || !validate_arg (src, POINTER_TYPE)
8849 || !validate_arg (len, INTEGER_TYPE))
8850 return NULL_TREE;
8852 /* If the LEN parameter is zero, return DEST. */
8853 if (integer_zerop (len))
8854 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8856 /* We can't compare slen with len as constants below if len is not a
8857 constant. */
8858 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8859 return NULL_TREE;
8861 if (!slen)
8862 slen = c_strlen (src, 1);
8864 /* Now, we must be passed a constant src ptr parameter. */
8865 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8866 return NULL_TREE;
8868 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8870 /* We do not support simplification of this case, though we do
8871 support it when expanding trees into RTL. */
8872 /* FIXME: generate a call to __builtin_memset. */
8873 if (tree_int_cst_lt (slen, len))
8874 return NULL_TREE;
8876 /* OK transform into builtin memcpy. */
8877 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8878 if (!fn)
8879 return NULL_TREE;
8880 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8881 build_call_expr (fn, 3, dest, src, len));
8884 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8885 arguments to the call, and TYPE is its return type.
8886 Return NULL_TREE if no simplification can be made. */
8888 static tree
8889 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8891 if (!validate_arg (arg1, POINTER_TYPE)
8892 || !validate_arg (arg2, INTEGER_TYPE)
8893 || !validate_arg (len, INTEGER_TYPE))
8894 return NULL_TREE;
8895 else
8897 const char *p1;
8899 if (TREE_CODE (arg2) != INTEGER_CST
8900 || !host_integerp (len, 1))
8901 return NULL_TREE;
8903 p1 = c_getstr (arg1);
8904 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8906 char c;
8907 const char *r;
8908 tree tem;
8910 if (target_char_cast (arg2, &c))
8911 return NULL_TREE;
8913 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8915 if (r == NULL)
8916 return build_int_cst (TREE_TYPE (arg1), 0);
8918 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8919 size_int (r - p1));
8920 return fold_convert (type, tem);
8922 return NULL_TREE;
8926 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8927 Return NULL_TREE if no simplification can be made. */
8929 static tree
8930 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8932 const char *p1, *p2;
8934 if (!validate_arg (arg1, POINTER_TYPE)
8935 || !validate_arg (arg2, POINTER_TYPE)
8936 || !validate_arg (len, INTEGER_TYPE))
8937 return NULL_TREE;
8939 /* If the LEN parameter is zero, return zero. */
8940 if (integer_zerop (len))
8941 return omit_two_operands (integer_type_node, integer_zero_node,
8942 arg1, arg2);
8944 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8945 if (operand_equal_p (arg1, arg2, 0))
8946 return omit_one_operand (integer_type_node, integer_zero_node, len);
8948 p1 = c_getstr (arg1);
8949 p2 = c_getstr (arg2);
8951 /* If all arguments are constant, and the value of len is not greater
8952 than the lengths of arg1 and arg2, evaluate at compile-time. */
8953 if (host_integerp (len, 1) && p1 && p2
8954 && compare_tree_int (len, strlen (p1) + 1) <= 0
8955 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8957 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8959 if (r > 0)
8960 return integer_one_node;
8961 else if (r < 0)
8962 return integer_minus_one_node;
8963 else
8964 return integer_zero_node;
8967 /* If len parameter is one, return an expression corresponding to
8968 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8969 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8971 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8972 tree cst_uchar_ptr_node
8973 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8975 tree ind1 = fold_convert (integer_type_node,
8976 build1 (INDIRECT_REF, cst_uchar_node,
8977 fold_convert (cst_uchar_ptr_node,
8978 arg1)));
8979 tree ind2 = fold_convert (integer_type_node,
8980 build1 (INDIRECT_REF, cst_uchar_node,
8981 fold_convert (cst_uchar_ptr_node,
8982 arg2)));
8983 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8986 return NULL_TREE;
8989 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8990 Return NULL_TREE if no simplification can be made. */
8992 static tree
8993 fold_builtin_strcmp (tree arg1, tree arg2)
8995 const char *p1, *p2;
8997 if (!validate_arg (arg1, POINTER_TYPE)
8998 || !validate_arg (arg2, POINTER_TYPE))
8999 return NULL_TREE;
9001 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9002 if (operand_equal_p (arg1, arg2, 0))
9003 return integer_zero_node;
9005 p1 = c_getstr (arg1);
9006 p2 = c_getstr (arg2);
9008 if (p1 && p2)
9010 const int i = strcmp (p1, p2);
9011 if (i < 0)
9012 return integer_minus_one_node;
9013 else if (i > 0)
9014 return integer_one_node;
9015 else
9016 return integer_zero_node;
9019 /* If the second arg is "", return *(const unsigned char*)arg1. */
9020 if (p2 && *p2 == '\0')
9022 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9023 tree cst_uchar_ptr_node
9024 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9026 return fold_convert (integer_type_node,
9027 build1 (INDIRECT_REF, cst_uchar_node,
9028 fold_convert (cst_uchar_ptr_node,
9029 arg1)));
9032 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9033 if (p1 && *p1 == '\0')
9035 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9036 tree cst_uchar_ptr_node
9037 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9039 tree temp = fold_convert (integer_type_node,
9040 build1 (INDIRECT_REF, cst_uchar_node,
9041 fold_convert (cst_uchar_ptr_node,
9042 arg2)));
9043 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9046 return NULL_TREE;
9049 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9050 Return NULL_TREE if no simplification can be made. */
9052 static tree
9053 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9055 const char *p1, *p2;
9057 if (!validate_arg (arg1, POINTER_TYPE)
9058 || !validate_arg (arg2, POINTER_TYPE)
9059 || !validate_arg (len, INTEGER_TYPE))
9060 return NULL_TREE;
9062 /* If the LEN parameter is zero, return zero. */
9063 if (integer_zerop (len))
9064 return omit_two_operands (integer_type_node, integer_zero_node,
9065 arg1, arg2);
9067 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9068 if (operand_equal_p (arg1, arg2, 0))
9069 return omit_one_operand (integer_type_node, integer_zero_node, len);
9071 p1 = c_getstr (arg1);
9072 p2 = c_getstr (arg2);
9074 if (host_integerp (len, 1) && p1 && p2)
9076 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9077 if (i > 0)
9078 return integer_one_node;
9079 else if (i < 0)
9080 return integer_minus_one_node;
9081 else
9082 return integer_zero_node;
9085 /* If the second arg is "", and the length is greater than zero,
9086 return *(const unsigned char*)arg1. */
9087 if (p2 && *p2 == '\0'
9088 && TREE_CODE (len) == INTEGER_CST
9089 && tree_int_cst_sgn (len) == 1)
9091 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9092 tree cst_uchar_ptr_node
9093 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9095 return fold_convert (integer_type_node,
9096 build1 (INDIRECT_REF, cst_uchar_node,
9097 fold_convert (cst_uchar_ptr_node,
9098 arg1)));
9101 /* If the first arg is "", and the length is greater than zero,
9102 return -*(const unsigned char*)arg2. */
9103 if (p1 && *p1 == '\0'
9104 && TREE_CODE (len) == INTEGER_CST
9105 && tree_int_cst_sgn (len) == 1)
9107 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9108 tree cst_uchar_ptr_node
9109 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9111 tree temp = fold_convert (integer_type_node,
9112 build1 (INDIRECT_REF, cst_uchar_node,
9113 fold_convert (cst_uchar_ptr_node,
9114 arg2)));
9115 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9118 /* If len parameter is one, return an expression corresponding to
9119 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9120 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9122 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9123 tree cst_uchar_ptr_node
9124 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9126 tree ind1 = fold_convert (integer_type_node,
9127 build1 (INDIRECT_REF, cst_uchar_node,
9128 fold_convert (cst_uchar_ptr_node,
9129 arg1)));
9130 tree ind2 = fold_convert (integer_type_node,
9131 build1 (INDIRECT_REF, cst_uchar_node,
9132 fold_convert (cst_uchar_ptr_node,
9133 arg2)));
9134 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9137 return NULL_TREE;
9140 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9141 ARG. Return NULL_TREE if no simplification can be made. */
9143 static tree
9144 fold_builtin_signbit (tree arg, tree type)
9146 tree temp;
9148 if (!validate_arg (arg, REAL_TYPE))
9149 return NULL_TREE;
9151 /* If ARG is a compile-time constant, determine the result. */
9152 if (TREE_CODE (arg) == REAL_CST
9153 && !TREE_OVERFLOW (arg))
9155 REAL_VALUE_TYPE c;
9157 c = TREE_REAL_CST (arg);
9158 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9159 return fold_convert (type, temp);
9162 /* If ARG is non-negative, the result is always zero. */
9163 if (tree_expr_nonnegative_p (arg))
9164 return omit_one_operand (type, integer_zero_node, arg);
9166 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9167 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9168 return fold_build2 (LT_EXPR, type, arg,
9169 build_real (TREE_TYPE (arg), dconst0));
9171 return NULL_TREE;
9174 /* Fold function call to builtin copysign, copysignf or copysignl with
9175 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9176 be made. */
9178 static tree
9179 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9181 tree tem;
9183 if (!validate_arg (arg1, REAL_TYPE)
9184 || !validate_arg (arg2, REAL_TYPE))
9185 return NULL_TREE;
9187 /* copysign(X,X) is X. */
9188 if (operand_equal_p (arg1, arg2, 0))
9189 return fold_convert (type, arg1);
9191 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9192 if (TREE_CODE (arg1) == REAL_CST
9193 && TREE_CODE (arg2) == REAL_CST
9194 && !TREE_OVERFLOW (arg1)
9195 && !TREE_OVERFLOW (arg2))
9197 REAL_VALUE_TYPE c1, c2;
9199 c1 = TREE_REAL_CST (arg1);
9200 c2 = TREE_REAL_CST (arg2);
9201 /* c1.sign := c2.sign. */
9202 real_copysign (&c1, &c2);
9203 return build_real (type, c1);
9206 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9207 Remember to evaluate Y for side-effects. */
9208 if (tree_expr_nonnegative_p (arg2))
9209 return omit_one_operand (type,
9210 fold_build1 (ABS_EXPR, type, arg1),
9211 arg2);
9213 /* Strip sign changing operations for the first argument. */
9214 tem = fold_strip_sign_ops (arg1);
9215 if (tem)
9216 return build_call_expr (fndecl, 2, tem, arg2);
9218 return NULL_TREE;
9221 /* Fold a call to builtin isascii with argument ARG. */
9223 static tree
9224 fold_builtin_isascii (tree arg)
9226 if (!validate_arg (arg, INTEGER_TYPE))
9227 return NULL_TREE;
9228 else
9230 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9231 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9232 build_int_cst (NULL_TREE,
9233 ~ (unsigned HOST_WIDE_INT) 0x7f));
9234 return fold_build2 (EQ_EXPR, integer_type_node,
9235 arg, integer_zero_node);
9239 /* Fold a call to builtin toascii with argument ARG. */
9241 static tree
9242 fold_builtin_toascii (tree arg)
9244 if (!validate_arg (arg, INTEGER_TYPE))
9245 return NULL_TREE;
9247 /* Transform toascii(c) -> (c & 0x7f). */
9248 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9249 build_int_cst (NULL_TREE, 0x7f));
9252 /* Fold a call to builtin isdigit with argument ARG. */
9254 static tree
9255 fold_builtin_isdigit (tree arg)
9257 if (!validate_arg (arg, INTEGER_TYPE))
9258 return NULL_TREE;
9259 else
9261 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9262 /* According to the C standard, isdigit is unaffected by locale.
9263 However, it definitely is affected by the target character set. */
9264 unsigned HOST_WIDE_INT target_digit0
9265 = lang_hooks.to_target_charset ('0');
9267 if (target_digit0 == 0)
9268 return NULL_TREE;
9270 arg = fold_convert (unsigned_type_node, arg);
9271 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9272 build_int_cst (unsigned_type_node, target_digit0));
9273 return fold_build2 (LE_EXPR, integer_type_node, arg,
9274 build_int_cst (unsigned_type_node, 9));
9278 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9280 static tree
9281 fold_builtin_fabs (tree arg, tree type)
9283 if (!validate_arg (arg, REAL_TYPE))
9284 return NULL_TREE;
9286 arg = fold_convert (type, arg);
9287 if (TREE_CODE (arg) == REAL_CST)
9288 return fold_abs_const (arg, type);
9289 return fold_build1 (ABS_EXPR, type, arg);
9292 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9294 static tree
9295 fold_builtin_abs (tree arg, tree type)
9297 if (!validate_arg (arg, INTEGER_TYPE))
9298 return NULL_TREE;
9300 arg = fold_convert (type, arg);
9301 if (TREE_CODE (arg) == INTEGER_CST)
9302 return fold_abs_const (arg, type);
9303 return fold_build1 (ABS_EXPR, type, arg);
9306 /* Fold a call to builtin fmin or fmax. */
9308 static tree
9309 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9311 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9313 /* Calculate the result when the argument is a constant. */
9314 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9316 if (res)
9317 return res;
9319 /* If either argument is NaN, return the other one. Avoid the
9320 transformation if we get (and honor) a signalling NaN. Using
9321 omit_one_operand() ensures we create a non-lvalue. */
9322 if (TREE_CODE (arg0) == REAL_CST
9323 && real_isnan (&TREE_REAL_CST (arg0))
9324 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9325 || ! TREE_REAL_CST (arg0).signalling))
9326 return omit_one_operand (type, arg1, arg0);
9327 if (TREE_CODE (arg1) == REAL_CST
9328 && real_isnan (&TREE_REAL_CST (arg1))
9329 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9330 || ! TREE_REAL_CST (arg1).signalling))
9331 return omit_one_operand (type, arg0, arg1);
9333 /* Transform fmin/fmax(x,x) -> x. */
9334 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9335 return omit_one_operand (type, arg0, arg1);
9337 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9338 functions to return the numeric arg if the other one is NaN.
9339 These tree codes don't honor that, so only transform if
9340 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9341 handled, so we don't have to worry about it either. */
9342 if (flag_finite_math_only)
9343 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9344 fold_convert (type, arg0),
9345 fold_convert (type, arg1));
9347 return NULL_TREE;
9350 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9352 static tree
9353 fold_builtin_carg (tree arg, tree type)
9355 if (validate_arg (arg, COMPLEX_TYPE))
9357 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9359 if (atan2_fn)
9361 tree new_arg = builtin_save_expr (arg);
9362 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9363 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9364 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9368 return NULL_TREE;
9371 /* Fold a call to builtin logb/ilogb. */
9373 static tree
9374 fold_builtin_logb (tree arg, tree rettype)
9376 if (! validate_arg (arg, REAL_TYPE))
9377 return NULL_TREE;
9379 STRIP_NOPS (arg);
9381 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9383 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9385 switch (value->cl)
9387 case rvc_nan:
9388 case rvc_inf:
9389 /* If arg is Inf or NaN and we're logb, return it. */
9390 if (TREE_CODE (rettype) == REAL_TYPE)
9391 return fold_convert (rettype, arg);
9392 /* Fall through... */
9393 case rvc_zero:
9394 /* Zero may set errno and/or raise an exception for logb, also
9395 for ilogb we don't know FP_ILOGB0. */
9396 return NULL_TREE;
9397 case rvc_normal:
9398 /* For normal numbers, proceed iff radix == 2. In GCC,
9399 normalized significands are in the range [0.5, 1.0). We
9400 want the exponent as if they were [1.0, 2.0) so get the
9401 exponent and subtract 1. */
9402 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9403 return fold_convert (rettype, build_int_cst (NULL_TREE,
9404 REAL_EXP (value)-1));
9405 break;
9409 return NULL_TREE;
9412 /* Fold a call to builtin significand, if radix == 2. */
9414 static tree
9415 fold_builtin_significand (tree arg, tree rettype)
9417 if (! validate_arg (arg, REAL_TYPE))
9418 return NULL_TREE;
9420 STRIP_NOPS (arg);
9422 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9424 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9426 switch (value->cl)
9428 case rvc_zero:
9429 case rvc_nan:
9430 case rvc_inf:
9431 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9432 return fold_convert (rettype, arg);
9433 case rvc_normal:
9434 /* For normal numbers, proceed iff radix == 2. */
9435 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9437 REAL_VALUE_TYPE result = *value;
9438 /* In GCC, normalized significands are in the range [0.5,
9439 1.0). We want them to be [1.0, 2.0) so set the
9440 exponent to 1. */
9441 SET_REAL_EXP (&result, 1);
9442 return build_real (rettype, result);
9444 break;
9448 return NULL_TREE;
9451 /* Fold a call to builtin frexp, we can assume the base is 2. */
9453 static tree
9454 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9456 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9457 return NULL_TREE;
9459 STRIP_NOPS (arg0);
9461 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9462 return NULL_TREE;
9464 arg1 = build_fold_indirect_ref (arg1);
9466 /* Proceed if a valid pointer type was passed in. */
9467 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9469 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9470 tree frac, exp;
9472 switch (value->cl)
9474 case rvc_zero:
9475 /* For +-0, return (*exp = 0, +-0). */
9476 exp = integer_zero_node;
9477 frac = arg0;
9478 break;
9479 case rvc_nan:
9480 case rvc_inf:
9481 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9482 return omit_one_operand (rettype, arg0, arg1);
9483 case rvc_normal:
9485 /* Since the frexp function always expects base 2, and in
9486 GCC normalized significands are already in the range
9487 [0.5, 1.0), we have exactly what frexp wants. */
9488 REAL_VALUE_TYPE frac_rvt = *value;
9489 SET_REAL_EXP (&frac_rvt, 0);
9490 frac = build_real (rettype, frac_rvt);
9491 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9493 break;
9494 default:
9495 gcc_unreachable ();
9498 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9499 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9500 TREE_SIDE_EFFECTS (arg1) = 1;
9501 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9504 return NULL_TREE;
9507 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9508 then we can assume the base is two. If it's false, then we have to
9509 check the mode of the TYPE parameter in certain cases. */
9511 static tree
9512 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9514 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9516 STRIP_NOPS (arg0);
9517 STRIP_NOPS (arg1);
9519 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9520 if (real_zerop (arg0) || integer_zerop (arg1)
9521 || (TREE_CODE (arg0) == REAL_CST
9522 && !real_isfinite (&TREE_REAL_CST (arg0))))
9523 return omit_one_operand (type, arg0, arg1);
9525 /* If both arguments are constant, then try to evaluate it. */
9526 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9527 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9528 && host_integerp (arg1, 0))
9530 /* Bound the maximum adjustment to twice the range of the
9531 mode's valid exponents. Use abs to ensure the range is
9532 positive as a sanity check. */
9533 const long max_exp_adj = 2 *
9534 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9535 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9537 /* Get the user-requested adjustment. */
9538 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9540 /* The requested adjustment must be inside this range. This
9541 is a preliminary cap to avoid things like overflow, we
9542 may still fail to compute the result for other reasons. */
9543 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9545 REAL_VALUE_TYPE initial_result;
9547 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9549 /* Ensure we didn't overflow. */
9550 if (! real_isinf (&initial_result))
9552 const REAL_VALUE_TYPE trunc_result
9553 = real_value_truncate (TYPE_MODE (type), initial_result);
9555 /* Only proceed if the target mode can hold the
9556 resulting value. */
9557 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9558 return build_real (type, trunc_result);
9564 return NULL_TREE;
9567 /* Fold a call to builtin modf. */
9569 static tree
9570 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9572 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9573 return NULL_TREE;
9575 STRIP_NOPS (arg0);
9577 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9578 return NULL_TREE;
9580 arg1 = build_fold_indirect_ref (arg1);
9582 /* Proceed if a valid pointer type was passed in. */
9583 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9585 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9586 REAL_VALUE_TYPE trunc, frac;
9588 switch (value->cl)
9590 case rvc_nan:
9591 case rvc_zero:
9592 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9593 trunc = frac = *value;
9594 break;
9595 case rvc_inf:
9596 /* For +-Inf, return (*arg1 = arg0, +-0). */
9597 frac = dconst0;
9598 frac.sign = value->sign;
9599 trunc = *value;
9600 break;
9601 case rvc_normal:
9602 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9603 real_trunc (&trunc, VOIDmode, value);
9604 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9605 /* If the original number was negative and already
9606 integral, then the fractional part is -0.0. */
9607 if (value->sign && frac.cl == rvc_zero)
9608 frac.sign = value->sign;
9609 break;
9612 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9613 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9614 build_real (rettype, trunc));
9615 TREE_SIDE_EFFECTS (arg1) = 1;
9616 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9617 build_real (rettype, frac));
9620 return NULL_TREE;
9623 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9624 ARG is the argument for the call. */
9626 static tree
9627 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9629 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9630 REAL_VALUE_TYPE r;
9632 if (!validate_arg (arg, REAL_TYPE))
9633 return NULL_TREE;
9635 switch (builtin_index)
9637 case BUILT_IN_ISINF:
9638 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9639 return omit_one_operand (type, integer_zero_node, arg);
9641 if (TREE_CODE (arg) == REAL_CST)
9643 r = TREE_REAL_CST (arg);
9644 if (real_isinf (&r))
9645 return real_compare (GT_EXPR, &r, &dconst0)
9646 ? integer_one_node : integer_minus_one_node;
9647 else
9648 return integer_zero_node;
9651 return NULL_TREE;
9653 case BUILT_IN_ISINF_SIGN:
9655 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9656 /* In a boolean context, GCC will fold the inner COND_EXPR to
9657 1. So e.g. "if (isinf_sign(x))" would be folded to just
9658 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9659 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9660 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9661 tree tmp = NULL_TREE;
9663 arg = builtin_save_expr (arg);
9665 if (signbit_fn && isinf_fn)
9667 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9668 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9670 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9671 signbit_call, integer_zero_node);
9672 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9673 isinf_call, integer_zero_node);
9675 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9676 integer_minus_one_node, integer_one_node);
9677 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9678 integer_zero_node);
9681 return tmp;
9684 case BUILT_IN_ISFINITE:
9685 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9686 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9687 return omit_one_operand (type, integer_one_node, arg);
9689 if (TREE_CODE (arg) == REAL_CST)
9691 r = TREE_REAL_CST (arg);
9692 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9695 return NULL_TREE;
9697 case BUILT_IN_ISNAN:
9698 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9699 return omit_one_operand (type, integer_zero_node, arg);
9701 if (TREE_CODE (arg) == REAL_CST)
9703 r = TREE_REAL_CST (arg);
9704 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9707 arg = builtin_save_expr (arg);
9708 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9710 default:
9711 gcc_unreachable ();
9715 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9716 This builtin will generate code to return the appropriate floating
9717 point classification depending on the value of the floating point
9718 number passed in. The possible return values must be supplied as
9719 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9720 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9721 one floating point argument which is "type generic". */
9723 static tree
9724 fold_builtin_fpclassify (tree exp)
9726 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9727 arg, type, res, tmp;
9728 enum machine_mode mode;
9729 REAL_VALUE_TYPE r;
9730 char buf[128];
9732 /* Verify the required arguments in the original call. */
9733 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9734 INTEGER_TYPE, INTEGER_TYPE,
9735 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9736 return NULL_TREE;
9738 fp_nan = CALL_EXPR_ARG (exp, 0);
9739 fp_infinite = CALL_EXPR_ARG (exp, 1);
9740 fp_normal = CALL_EXPR_ARG (exp, 2);
9741 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9742 fp_zero = CALL_EXPR_ARG (exp, 4);
9743 arg = CALL_EXPR_ARG (exp, 5);
9744 type = TREE_TYPE (arg);
9745 mode = TYPE_MODE (type);
9746 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9748 /* fpclassify(x) ->
9749 isnan(x) ? FP_NAN :
9750 (fabs(x) == Inf ? FP_INFINITE :
9751 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9752 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9754 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9755 build_real (type, dconst0));
9756 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9758 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9759 real_from_string (&r, buf);
9760 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9761 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9763 if (HONOR_INFINITIES (mode))
9765 real_inf (&r);
9766 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9767 build_real (type, r));
9768 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9771 if (HONOR_NANS (mode))
9773 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9774 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9777 return res;
9780 /* Fold a call to an unordered comparison function such as
9781 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9782 being called and ARG0 and ARG1 are the arguments for the call.
9783 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9784 the opposite of the desired result. UNORDERED_CODE is used
9785 for modes that can hold NaNs and ORDERED_CODE is used for
9786 the rest. */
9788 static tree
9789 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9790 enum tree_code unordered_code,
9791 enum tree_code ordered_code)
9793 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9794 enum tree_code code;
9795 tree type0, type1;
9796 enum tree_code code0, code1;
9797 tree cmp_type = NULL_TREE;
9799 type0 = TREE_TYPE (arg0);
9800 type1 = TREE_TYPE (arg1);
9802 code0 = TREE_CODE (type0);
9803 code1 = TREE_CODE (type1);
9805 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9806 /* Choose the wider of two real types. */
9807 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9808 ? type0 : type1;
9809 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9810 cmp_type = type0;
9811 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9812 cmp_type = type1;
9814 arg0 = fold_convert (cmp_type, arg0);
9815 arg1 = fold_convert (cmp_type, arg1);
9817 if (unordered_code == UNORDERED_EXPR)
9819 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9820 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9821 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9824 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9825 : ordered_code;
9826 return fold_build1 (TRUTH_NOT_EXPR, type,
9827 fold_build2 (code, type, arg0, arg1));
9830 /* Fold a call to built-in function FNDECL with 0 arguments.
9831 IGNORE is true if the result of the function call is ignored. This
9832 function returns NULL_TREE if no simplification was possible. */
9834 static tree
9835 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9837 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9838 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9839 switch (fcode)
9841 CASE_FLT_FN (BUILT_IN_INF):
9842 case BUILT_IN_INFD32:
9843 case BUILT_IN_INFD64:
9844 case BUILT_IN_INFD128:
9845 return fold_builtin_inf (type, true);
9847 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9848 return fold_builtin_inf (type, false);
9850 case BUILT_IN_CLASSIFY_TYPE:
9851 return fold_builtin_classify_type (NULL_TREE);
9853 default:
9854 break;
9856 return NULL_TREE;
9859 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9860 IGNORE is true if the result of the function call is ignored. This
9861 function returns NULL_TREE if no simplification was possible. */
9863 static tree
9864 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9866 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9867 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9868 switch (fcode)
9871 case BUILT_IN_CONSTANT_P:
9873 tree val = fold_builtin_constant_p (arg0);
9875 /* Gimplification will pull the CALL_EXPR for the builtin out of
9876 an if condition. When not optimizing, we'll not CSE it back.
9877 To avoid link error types of regressions, return false now. */
9878 if (!val && !optimize)
9879 val = integer_zero_node;
9881 return val;
9884 case BUILT_IN_CLASSIFY_TYPE:
9885 return fold_builtin_classify_type (arg0);
9887 case BUILT_IN_STRLEN:
9888 return fold_builtin_strlen (arg0);
9890 CASE_FLT_FN (BUILT_IN_FABS):
9891 return fold_builtin_fabs (arg0, type);
9893 case BUILT_IN_ABS:
9894 case BUILT_IN_LABS:
9895 case BUILT_IN_LLABS:
9896 case BUILT_IN_IMAXABS:
9897 return fold_builtin_abs (arg0, type);
9899 CASE_FLT_FN (BUILT_IN_CONJ):
9900 if (validate_arg (arg0, COMPLEX_TYPE))
9901 return fold_build1 (CONJ_EXPR, type, arg0);
9902 break;
9904 CASE_FLT_FN (BUILT_IN_CREAL):
9905 if (validate_arg (arg0, COMPLEX_TYPE))
9906 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9907 break;
9909 CASE_FLT_FN (BUILT_IN_CIMAG):
9910 if (validate_arg (arg0, COMPLEX_TYPE))
9911 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9912 break;
9914 CASE_FLT_FN (BUILT_IN_CCOS):
9915 CASE_FLT_FN (BUILT_IN_CCOSH):
9916 /* These functions are "even", i.e. f(x) == f(-x). */
9917 if (validate_arg (arg0, COMPLEX_TYPE))
9919 tree narg = fold_strip_sign_ops (arg0);
9920 if (narg)
9921 return build_call_expr (fndecl, 1, narg);
9923 break;
9925 CASE_FLT_FN (BUILT_IN_CABS):
9926 return fold_builtin_cabs (arg0, type, fndecl);
9928 CASE_FLT_FN (BUILT_IN_CARG):
9929 return fold_builtin_carg (arg0, type);
9931 CASE_FLT_FN (BUILT_IN_SQRT):
9932 return fold_builtin_sqrt (arg0, type);
9934 CASE_FLT_FN (BUILT_IN_CBRT):
9935 return fold_builtin_cbrt (arg0, type);
9937 CASE_FLT_FN (BUILT_IN_ASIN):
9938 if (validate_arg (arg0, REAL_TYPE))
9939 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9940 &dconstm1, &dconst1, true);
9941 break;
9943 CASE_FLT_FN (BUILT_IN_ACOS):
9944 if (validate_arg (arg0, REAL_TYPE))
9945 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9946 &dconstm1, &dconst1, true);
9947 break;
9949 CASE_FLT_FN (BUILT_IN_ATAN):
9950 if (validate_arg (arg0, REAL_TYPE))
9951 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9952 break;
9954 CASE_FLT_FN (BUILT_IN_ASINH):
9955 if (validate_arg (arg0, REAL_TYPE))
9956 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9957 break;
9959 CASE_FLT_FN (BUILT_IN_ACOSH):
9960 if (validate_arg (arg0, REAL_TYPE))
9961 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9962 &dconst1, NULL, true);
9963 break;
9965 CASE_FLT_FN (BUILT_IN_ATANH):
9966 if (validate_arg (arg0, REAL_TYPE))
9967 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9968 &dconstm1, &dconst1, false);
9969 break;
9971 CASE_FLT_FN (BUILT_IN_SIN):
9972 if (validate_arg (arg0, REAL_TYPE))
9973 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9974 break;
9976 CASE_FLT_FN (BUILT_IN_COS):
9977 return fold_builtin_cos (arg0, type, fndecl);
9978 break;
9980 CASE_FLT_FN (BUILT_IN_TAN):
9981 return fold_builtin_tan (arg0, type);
9983 CASE_FLT_FN (BUILT_IN_CEXP):
9984 return fold_builtin_cexp (arg0, type);
9986 CASE_FLT_FN (BUILT_IN_CEXPI):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9989 break;
9991 CASE_FLT_FN (BUILT_IN_SINH):
9992 if (validate_arg (arg0, REAL_TYPE))
9993 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9994 break;
9996 CASE_FLT_FN (BUILT_IN_COSH):
9997 return fold_builtin_cosh (arg0, type, fndecl);
9999 CASE_FLT_FN (BUILT_IN_TANH):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10002 break;
10004 CASE_FLT_FN (BUILT_IN_ERF):
10005 if (validate_arg (arg0, REAL_TYPE))
10006 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10007 break;
10009 CASE_FLT_FN (BUILT_IN_ERFC):
10010 if (validate_arg (arg0, REAL_TYPE))
10011 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10012 break;
10014 CASE_FLT_FN (BUILT_IN_TGAMMA):
10015 if (validate_arg (arg0, REAL_TYPE))
10016 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10017 break;
10019 CASE_FLT_FN (BUILT_IN_EXP):
10020 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10022 CASE_FLT_FN (BUILT_IN_EXP2):
10023 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10025 CASE_FLT_FN (BUILT_IN_EXP10):
10026 CASE_FLT_FN (BUILT_IN_POW10):
10027 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10029 CASE_FLT_FN (BUILT_IN_EXPM1):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10032 break;
10034 CASE_FLT_FN (BUILT_IN_LOG):
10035 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10037 CASE_FLT_FN (BUILT_IN_LOG2):
10038 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10040 CASE_FLT_FN (BUILT_IN_LOG10):
10041 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10043 CASE_FLT_FN (BUILT_IN_LOG1P):
10044 if (validate_arg (arg0, REAL_TYPE))
10045 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10046 &dconstm1, NULL, false);
10047 break;
10049 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10050 CASE_FLT_FN (BUILT_IN_J0):
10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10053 NULL, NULL, 0);
10054 break;
10056 CASE_FLT_FN (BUILT_IN_J1):
10057 if (validate_arg (arg0, REAL_TYPE))
10058 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10059 NULL, NULL, 0);
10060 break;
10062 CASE_FLT_FN (BUILT_IN_Y0):
10063 if (validate_arg (arg0, REAL_TYPE))
10064 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10065 &dconst0, NULL, false);
10066 break;
10068 CASE_FLT_FN (BUILT_IN_Y1):
10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10071 &dconst0, NULL, false);
10072 break;
10073 #endif
10075 CASE_FLT_FN (BUILT_IN_NAN):
10076 case BUILT_IN_NAND32:
10077 case BUILT_IN_NAND64:
10078 case BUILT_IN_NAND128:
10079 return fold_builtin_nan (arg0, type, true);
10081 CASE_FLT_FN (BUILT_IN_NANS):
10082 return fold_builtin_nan (arg0, type, false);
10084 CASE_FLT_FN (BUILT_IN_FLOOR):
10085 return fold_builtin_floor (fndecl, arg0);
10087 CASE_FLT_FN (BUILT_IN_CEIL):
10088 return fold_builtin_ceil (fndecl, arg0);
10090 CASE_FLT_FN (BUILT_IN_TRUNC):
10091 return fold_builtin_trunc (fndecl, arg0);
10093 CASE_FLT_FN (BUILT_IN_ROUND):
10094 return fold_builtin_round (fndecl, arg0);
10096 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10097 CASE_FLT_FN (BUILT_IN_RINT):
10098 return fold_trunc_transparent_mathfn (fndecl, arg0);
10100 CASE_FLT_FN (BUILT_IN_LCEIL):
10101 CASE_FLT_FN (BUILT_IN_LLCEIL):
10102 CASE_FLT_FN (BUILT_IN_LFLOOR):
10103 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10104 CASE_FLT_FN (BUILT_IN_LROUND):
10105 CASE_FLT_FN (BUILT_IN_LLROUND):
10106 return fold_builtin_int_roundingfn (fndecl, arg0);
10108 CASE_FLT_FN (BUILT_IN_LRINT):
10109 CASE_FLT_FN (BUILT_IN_LLRINT):
10110 return fold_fixed_mathfn (fndecl, arg0);
10112 case BUILT_IN_BSWAP32:
10113 case BUILT_IN_BSWAP64:
10114 return fold_builtin_bswap (fndecl, arg0);
10116 CASE_INT_FN (BUILT_IN_FFS):
10117 CASE_INT_FN (BUILT_IN_CLZ):
10118 CASE_INT_FN (BUILT_IN_CTZ):
10119 CASE_INT_FN (BUILT_IN_POPCOUNT):
10120 CASE_INT_FN (BUILT_IN_PARITY):
10121 return fold_builtin_bitop (fndecl, arg0);
10123 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10124 return fold_builtin_signbit (arg0, type);
10126 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10127 return fold_builtin_significand (arg0, type);
10129 CASE_FLT_FN (BUILT_IN_ILOGB):
10130 CASE_FLT_FN (BUILT_IN_LOGB):
10131 return fold_builtin_logb (arg0, type);
10133 case BUILT_IN_ISASCII:
10134 return fold_builtin_isascii (arg0);
10136 case BUILT_IN_TOASCII:
10137 return fold_builtin_toascii (arg0);
10139 case BUILT_IN_ISDIGIT:
10140 return fold_builtin_isdigit (arg0);
10142 CASE_FLT_FN (BUILT_IN_FINITE):
10143 case BUILT_IN_FINITED32:
10144 case BUILT_IN_FINITED64:
10145 case BUILT_IN_FINITED128:
10146 case BUILT_IN_ISFINITE:
10147 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10149 CASE_FLT_FN (BUILT_IN_ISINF):
10150 case BUILT_IN_ISINFD32:
10151 case BUILT_IN_ISINFD64:
10152 case BUILT_IN_ISINFD128:
10153 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10155 case BUILT_IN_ISINF_SIGN:
10156 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10158 CASE_FLT_FN (BUILT_IN_ISNAN):
10159 case BUILT_IN_ISNAND32:
10160 case BUILT_IN_ISNAND64:
10161 case BUILT_IN_ISNAND128:
10162 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10164 case BUILT_IN_PRINTF:
10165 case BUILT_IN_PRINTF_UNLOCKED:
10166 case BUILT_IN_VPRINTF:
10167 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10169 default:
10170 break;
10173 return NULL_TREE;
10177 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10178 IGNORE is true if the result of the function call is ignored. This
10179 function returns NULL_TREE if no simplification was possible. */
10181 static tree
10182 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10184 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10185 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10187 switch (fcode)
10189 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10190 CASE_FLT_FN (BUILT_IN_JN):
10191 if (validate_arg (arg0, INTEGER_TYPE)
10192 && validate_arg (arg1, REAL_TYPE))
10193 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10194 break;
10196 CASE_FLT_FN (BUILT_IN_YN):
10197 if (validate_arg (arg0, INTEGER_TYPE)
10198 && validate_arg (arg1, REAL_TYPE))
10199 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10200 &dconst0, false);
10201 break;
10203 CASE_FLT_FN (BUILT_IN_DREM):
10204 CASE_FLT_FN (BUILT_IN_REMAINDER):
10205 if (validate_arg (arg0, REAL_TYPE)
10206 && validate_arg(arg1, REAL_TYPE))
10207 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10208 break;
10210 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10211 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10212 if (validate_arg (arg0, REAL_TYPE)
10213 && validate_arg(arg1, POINTER_TYPE))
10214 return do_mpfr_lgamma_r (arg0, arg1, type);
10215 break;
10216 #endif
10218 CASE_FLT_FN (BUILT_IN_ATAN2):
10219 if (validate_arg (arg0, REAL_TYPE)
10220 && validate_arg(arg1, REAL_TYPE))
10221 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10222 break;
10224 CASE_FLT_FN (BUILT_IN_FDIM):
10225 if (validate_arg (arg0, REAL_TYPE)
10226 && validate_arg(arg1, REAL_TYPE))
10227 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10228 break;
10230 CASE_FLT_FN (BUILT_IN_HYPOT):
10231 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10233 CASE_FLT_FN (BUILT_IN_LDEXP):
10234 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10235 CASE_FLT_FN (BUILT_IN_SCALBN):
10236 CASE_FLT_FN (BUILT_IN_SCALBLN):
10237 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10239 CASE_FLT_FN (BUILT_IN_FREXP):
10240 return fold_builtin_frexp (arg0, arg1, type);
10242 CASE_FLT_FN (BUILT_IN_MODF):
10243 return fold_builtin_modf (arg0, arg1, type);
10245 case BUILT_IN_BZERO:
10246 return fold_builtin_bzero (arg0, arg1, ignore);
10248 case BUILT_IN_FPUTS:
10249 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10251 case BUILT_IN_FPUTS_UNLOCKED:
10252 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10254 case BUILT_IN_STRSTR:
10255 return fold_builtin_strstr (arg0, arg1, type);
10257 case BUILT_IN_STRCAT:
10258 return fold_builtin_strcat (arg0, arg1);
10260 case BUILT_IN_STRSPN:
10261 return fold_builtin_strspn (arg0, arg1);
10263 case BUILT_IN_STRCSPN:
10264 return fold_builtin_strcspn (arg0, arg1);
10266 case BUILT_IN_STRCHR:
10267 case BUILT_IN_INDEX:
10268 return fold_builtin_strchr (arg0, arg1, type);
10270 case BUILT_IN_STRRCHR:
10271 case BUILT_IN_RINDEX:
10272 return fold_builtin_strrchr (arg0, arg1, type);
10274 case BUILT_IN_STRCPY:
10275 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10277 case BUILT_IN_STRCMP:
10278 return fold_builtin_strcmp (arg0, arg1);
10280 case BUILT_IN_STRPBRK:
10281 return fold_builtin_strpbrk (arg0, arg1, type);
10283 case BUILT_IN_EXPECT:
10284 return fold_builtin_expect (arg0, arg1);
10286 CASE_FLT_FN (BUILT_IN_POW):
10287 return fold_builtin_pow (fndecl, arg0, arg1, type);
10289 CASE_FLT_FN (BUILT_IN_POWI):
10290 return fold_builtin_powi (fndecl, arg0, arg1, type);
10292 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10293 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10295 CASE_FLT_FN (BUILT_IN_FMIN):
10296 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10298 CASE_FLT_FN (BUILT_IN_FMAX):
10299 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10301 case BUILT_IN_ISGREATER:
10302 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10303 case BUILT_IN_ISGREATEREQUAL:
10304 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10305 case BUILT_IN_ISLESS:
10306 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10307 case BUILT_IN_ISLESSEQUAL:
10308 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10309 case BUILT_IN_ISLESSGREATER:
10310 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10311 case BUILT_IN_ISUNORDERED:
10312 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10313 NOP_EXPR);
10315 /* We do the folding for va_start in the expander. */
10316 case BUILT_IN_VA_START:
10317 break;
10319 case BUILT_IN_SPRINTF:
10320 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10322 case BUILT_IN_OBJECT_SIZE:
10323 return fold_builtin_object_size (arg0, arg1);
10325 case BUILT_IN_PRINTF:
10326 case BUILT_IN_PRINTF_UNLOCKED:
10327 case BUILT_IN_VPRINTF:
10328 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10330 case BUILT_IN_PRINTF_CHK:
10331 case BUILT_IN_VPRINTF_CHK:
10332 if (!validate_arg (arg0, INTEGER_TYPE)
10333 || TREE_SIDE_EFFECTS (arg0))
10334 return NULL_TREE;
10335 else
10336 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10337 break;
10339 case BUILT_IN_FPRINTF:
10340 case BUILT_IN_FPRINTF_UNLOCKED:
10341 case BUILT_IN_VFPRINTF:
10342 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10343 ignore, fcode);
10345 default:
10346 break;
10348 return NULL_TREE;
10351 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10352 and ARG2. IGNORE is true if the result of the function call is ignored.
10353 This function returns NULL_TREE if no simplification was possible. */
10355 static tree
10356 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10358 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10359 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10360 switch (fcode)
10363 CASE_FLT_FN (BUILT_IN_SINCOS):
10364 return fold_builtin_sincos (arg0, arg1, arg2);
10366 CASE_FLT_FN (BUILT_IN_FMA):
10367 if (validate_arg (arg0, REAL_TYPE)
10368 && validate_arg(arg1, REAL_TYPE)
10369 && validate_arg(arg2, REAL_TYPE))
10370 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10371 break;
10373 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10374 CASE_FLT_FN (BUILT_IN_REMQUO):
10375 if (validate_arg (arg0, REAL_TYPE)
10376 && validate_arg(arg1, REAL_TYPE)
10377 && validate_arg(arg2, POINTER_TYPE))
10378 return do_mpfr_remquo (arg0, arg1, arg2);
10379 break;
10380 #endif
10382 case BUILT_IN_MEMSET:
10383 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10385 case BUILT_IN_BCOPY:
10386 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10388 case BUILT_IN_MEMCPY:
10389 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10391 case BUILT_IN_MEMPCPY:
10392 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10394 case BUILT_IN_MEMMOVE:
10395 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10397 case BUILT_IN_STRNCAT:
10398 return fold_builtin_strncat (arg0, arg1, arg2);
10400 case BUILT_IN_STRNCPY:
10401 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10403 case BUILT_IN_STRNCMP:
10404 return fold_builtin_strncmp (arg0, arg1, arg2);
10406 case BUILT_IN_MEMCHR:
10407 return fold_builtin_memchr (arg0, arg1, arg2, type);
10409 case BUILT_IN_BCMP:
10410 case BUILT_IN_MEMCMP:
10411 return fold_builtin_memcmp (arg0, arg1, arg2);;
10413 case BUILT_IN_SPRINTF:
10414 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10416 case BUILT_IN_STRCPY_CHK:
10417 case BUILT_IN_STPCPY_CHK:
10418 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10419 ignore, fcode);
10421 case BUILT_IN_STRCAT_CHK:
10422 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10424 case BUILT_IN_PRINTF_CHK:
10425 case BUILT_IN_VPRINTF_CHK:
10426 if (!validate_arg (arg0, INTEGER_TYPE)
10427 || TREE_SIDE_EFFECTS (arg0))
10428 return NULL_TREE;
10429 else
10430 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10431 break;
10433 case BUILT_IN_FPRINTF:
10434 case BUILT_IN_FPRINTF_UNLOCKED:
10435 case BUILT_IN_VFPRINTF:
10436 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10438 case BUILT_IN_FPRINTF_CHK:
10439 case BUILT_IN_VFPRINTF_CHK:
10440 if (!validate_arg (arg1, INTEGER_TYPE)
10441 || TREE_SIDE_EFFECTS (arg1))
10442 return NULL_TREE;
10443 else
10444 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10445 ignore, fcode);
10447 default:
10448 break;
10450 return NULL_TREE;
10453 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10454 ARG2, and ARG3. IGNORE is true if the result of the function call is
10455 ignored. This function returns NULL_TREE if no simplification was
10456 possible. */
10458 static tree
10459 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10460 bool ignore)
10462 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10464 switch (fcode)
10466 case BUILT_IN_MEMCPY_CHK:
10467 case BUILT_IN_MEMPCPY_CHK:
10468 case BUILT_IN_MEMMOVE_CHK:
10469 case BUILT_IN_MEMSET_CHK:
10470 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10471 NULL_TREE, ignore,
10472 DECL_FUNCTION_CODE (fndecl));
10474 case BUILT_IN_STRNCPY_CHK:
10475 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10477 case BUILT_IN_STRNCAT_CHK:
10478 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10480 case BUILT_IN_FPRINTF_CHK:
10481 case BUILT_IN_VFPRINTF_CHK:
10482 if (!validate_arg (arg1, INTEGER_TYPE)
10483 || TREE_SIDE_EFFECTS (arg1))
10484 return NULL_TREE;
10485 else
10486 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10487 ignore, fcode);
10488 break;
10490 default:
10491 break;
10493 return NULL_TREE;
10496 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10497 arguments, where NARGS <= 4. IGNORE is true if the result of the
10498 function call is ignored. This function returns NULL_TREE if no
10499 simplification was possible. Note that this only folds builtins with
10500 fixed argument patterns. Foldings that do varargs-to-varargs
10501 transformations, or that match calls with more than 4 arguments,
10502 need to be handled with fold_builtin_varargs instead. */
10504 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10506 static tree
10507 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10509 tree ret = NULL_TREE;
10511 switch (nargs)
10513 case 0:
10514 ret = fold_builtin_0 (fndecl, ignore);
10515 break;
10516 case 1:
10517 ret = fold_builtin_1 (fndecl, args[0], ignore);
10518 break;
10519 case 2:
10520 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10521 break;
10522 case 3:
10523 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10524 break;
10525 case 4:
10526 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10527 ignore);
10528 break;
10529 default:
10530 break;
10532 if (ret)
10534 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10535 TREE_NO_WARNING (ret) = 1;
10536 return ret;
10538 return NULL_TREE;
10541 /* Builtins with folding operations that operate on "..." arguments
10542 need special handling; we need to store the arguments in a convenient
10543 data structure before attempting any folding. Fortunately there are
10544 only a few builtins that fall into this category. FNDECL is the
10545 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10546 result of the function call is ignored. */
10548 static tree
10549 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10551 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10552 tree ret = NULL_TREE;
10554 switch (fcode)
10556 case BUILT_IN_SPRINTF_CHK:
10557 case BUILT_IN_VSPRINTF_CHK:
10558 ret = fold_builtin_sprintf_chk (exp, fcode);
10559 break;
10561 case BUILT_IN_SNPRINTF_CHK:
10562 case BUILT_IN_VSNPRINTF_CHK:
10563 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10564 break;
10566 case BUILT_IN_FPCLASSIFY:
10567 ret = fold_builtin_fpclassify (exp);
10568 break;
10570 default:
10571 break;
10573 if (ret)
10575 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10576 TREE_NO_WARNING (ret) = 1;
10577 return ret;
10579 return NULL_TREE;
10582 /* A wrapper function for builtin folding that prevents warnings for
10583 "statement without effect" and the like, caused by removing the
10584 call node earlier than the warning is generated. */
10586 tree
10587 fold_call_expr (tree exp, bool ignore)
10589 tree ret = NULL_TREE;
10590 tree fndecl = get_callee_fndecl (exp);
10591 if (fndecl
10592 && TREE_CODE (fndecl) == FUNCTION_DECL
10593 && DECL_BUILT_IN (fndecl)
10594 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10595 yet. Defer folding until we see all the arguments
10596 (after inlining). */
10597 && !CALL_EXPR_VA_ARG_PACK (exp))
10599 int nargs = call_expr_nargs (exp);
10601 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10602 instead last argument is __builtin_va_arg_pack (). Defer folding
10603 even in that case, until arguments are finalized. */
10604 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10606 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10607 if (fndecl2
10608 && TREE_CODE (fndecl2) == FUNCTION_DECL
10609 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10610 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10611 return NULL_TREE;
10614 /* FIXME: Don't use a list in this interface. */
10615 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10616 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10617 else
10619 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10621 tree *args = CALL_EXPR_ARGP (exp);
10622 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10624 if (!ret)
10625 ret = fold_builtin_varargs (fndecl, exp, ignore);
10626 if (ret)
10628 /* Propagate location information from original call to
10629 expansion of builtin. Otherwise things like
10630 maybe_emit_chk_warning, that operate on the expansion
10631 of a builtin, will use the wrong location information. */
10632 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10634 tree realret = ret;
10635 if (TREE_CODE (ret) == NOP_EXPR)
10636 realret = TREE_OPERAND (ret, 0);
10637 if (CAN_HAVE_LOCATION_P (realret)
10638 && !EXPR_HAS_LOCATION (realret))
10639 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10640 return realret;
10642 return ret;
10646 return NULL_TREE;
10649 /* Conveniently construct a function call expression. FNDECL names the
10650 function to be called and ARGLIST is a TREE_LIST of arguments. */
10652 tree
10653 build_function_call_expr (tree fndecl, tree arglist)
10655 tree fntype = TREE_TYPE (fndecl);
10656 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10657 int n = list_length (arglist);
10658 tree *argarray = (tree *) alloca (n * sizeof (tree));
10659 int i;
10661 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10662 argarray[i] = TREE_VALUE (arglist);
10663 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10666 /* Conveniently construct a function call expression. FNDECL names the
10667 function to be called, N is the number of arguments, and the "..."
10668 parameters are the argument expressions. */
10670 tree
10671 build_call_expr (tree fndecl, int n, ...)
10673 va_list ap;
10674 tree fntype = TREE_TYPE (fndecl);
10675 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10676 tree *argarray = (tree *) alloca (n * sizeof (tree));
10677 int i;
10679 va_start (ap, n);
10680 for (i = 0; i < n; i++)
10681 argarray[i] = va_arg (ap, tree);
10682 va_end (ap);
10683 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10686 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10687 N arguments are passed in the array ARGARRAY. */
10689 tree
10690 fold_builtin_call_array (tree type,
10691 tree fn,
10692 int n,
10693 tree *argarray)
10695 tree ret = NULL_TREE;
10696 int i;
10697 tree exp;
10699 if (TREE_CODE (fn) == ADDR_EXPR)
10701 tree fndecl = TREE_OPERAND (fn, 0);
10702 if (TREE_CODE (fndecl) == FUNCTION_DECL
10703 && DECL_BUILT_IN (fndecl))
10705 /* If last argument is __builtin_va_arg_pack (), arguments to this
10706 function are not finalized yet. Defer folding until they are. */
10707 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10709 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10710 if (fndecl2
10711 && TREE_CODE (fndecl2) == FUNCTION_DECL
10712 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10713 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10714 return build_call_array (type, fn, n, argarray);
10716 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10718 tree arglist = NULL_TREE;
10719 for (i = n - 1; i >= 0; i--)
10720 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10721 ret = targetm.fold_builtin (fndecl, arglist, false);
10722 if (ret)
10723 return ret;
10725 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10727 /* First try the transformations that don't require consing up
10728 an exp. */
10729 ret = fold_builtin_n (fndecl, argarray, n, false);
10730 if (ret)
10731 return ret;
10734 /* If we got this far, we need to build an exp. */
10735 exp = build_call_array (type, fn, n, argarray);
10736 ret = fold_builtin_varargs (fndecl, exp, false);
10737 return ret ? ret : exp;
10741 return build_call_array (type, fn, n, argarray);
10744 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10745 along with N new arguments specified as the "..." parameters. SKIP
10746 is the number of arguments in EXP to be omitted. This function is used
10747 to do varargs-to-varargs transformations. */
10749 static tree
10750 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10752 int oldnargs = call_expr_nargs (exp);
10753 int nargs = oldnargs - skip + n;
10754 tree fntype = TREE_TYPE (fndecl);
10755 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10756 tree *buffer;
10758 if (n > 0)
10760 int i, j;
10761 va_list ap;
10763 buffer = XALLOCAVEC (tree, nargs);
10764 va_start (ap, n);
10765 for (i = 0; i < n; i++)
10766 buffer[i] = va_arg (ap, tree);
10767 va_end (ap);
10768 for (j = skip; j < oldnargs; j++, i++)
10769 buffer[i] = CALL_EXPR_ARG (exp, j);
10771 else
10772 buffer = CALL_EXPR_ARGP (exp) + skip;
10774 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10777 /* Validate a single argument ARG against a tree code CODE representing
10778 a type. */
10780 static bool
10781 validate_arg (const_tree arg, enum tree_code code)
10783 if (!arg)
10784 return false;
10785 else if (code == POINTER_TYPE)
10786 return POINTER_TYPE_P (TREE_TYPE (arg));
10787 else if (code == INTEGER_TYPE)
10788 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10789 return code == TREE_CODE (TREE_TYPE (arg));
10792 /* This function validates the types of a function call argument list
10793 against a specified list of tree_codes. If the last specifier is a 0,
10794 that represents an ellipses, otherwise the last specifier must be a
10795 VOID_TYPE. */
10797 bool
10798 validate_arglist (const_tree callexpr, ...)
10800 enum tree_code code;
10801 bool res = 0;
10802 va_list ap;
10803 const_call_expr_arg_iterator iter;
10804 const_tree arg;
10806 va_start (ap, callexpr);
10807 init_const_call_expr_arg_iterator (callexpr, &iter);
10811 code = va_arg (ap, enum tree_code);
10812 switch (code)
10814 case 0:
10815 /* This signifies an ellipses, any further arguments are all ok. */
10816 res = true;
10817 goto end;
10818 case VOID_TYPE:
10819 /* This signifies an endlink, if no arguments remain, return
10820 true, otherwise return false. */
10821 res = !more_const_call_expr_args_p (&iter);
10822 goto end;
10823 default:
10824 /* If no parameters remain or the parameter's code does not
10825 match the specified code, return false. Otherwise continue
10826 checking any remaining arguments. */
10827 arg = next_const_call_expr_arg (&iter);
10828 if (!validate_arg (arg, code))
10829 goto end;
10830 break;
10833 while (1);
10835 /* We need gotos here since we can only have one VA_CLOSE in a
10836 function. */
10837 end: ;
10838 va_end (ap);
10840 return res;
10843 /* Default target-specific builtin expander that does nothing. */
10846 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10847 rtx target ATTRIBUTE_UNUSED,
10848 rtx subtarget ATTRIBUTE_UNUSED,
10849 enum machine_mode mode ATTRIBUTE_UNUSED,
10850 int ignore ATTRIBUTE_UNUSED)
10852 return NULL_RTX;
10855 /* Returns true is EXP represents data that would potentially reside
10856 in a readonly section. */
10858 static bool
10859 readonly_data_expr (tree exp)
10861 STRIP_NOPS (exp);
10863 if (TREE_CODE (exp) != ADDR_EXPR)
10864 return false;
10866 exp = get_base_address (TREE_OPERAND (exp, 0));
10867 if (!exp)
10868 return false;
10870 /* Make sure we call decl_readonly_section only for trees it
10871 can handle (since it returns true for everything it doesn't
10872 understand). */
10873 if (TREE_CODE (exp) == STRING_CST
10874 || TREE_CODE (exp) == CONSTRUCTOR
10875 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10876 return decl_readonly_section (exp, 0);
10877 else
10878 return false;
10881 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10882 to the call, and TYPE is its return type.
10884 Return NULL_TREE if no simplification was possible, otherwise return the
10885 simplified form of the call as a tree.
10887 The simplified form may be a constant or other expression which
10888 computes the same value, but in a more efficient manner (including
10889 calls to other builtin functions).
10891 The call may contain arguments which need to be evaluated, but
10892 which are not useful to determine the result of the call. In
10893 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10894 COMPOUND_EXPR will be an argument which must be evaluated.
10895 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10896 COMPOUND_EXPR in the chain will contain the tree for the simplified
10897 form of the builtin function call. */
10899 static tree
10900 fold_builtin_strstr (tree s1, tree s2, tree type)
10902 if (!validate_arg (s1, POINTER_TYPE)
10903 || !validate_arg (s2, POINTER_TYPE))
10904 return NULL_TREE;
10905 else
10907 tree fn;
10908 const char *p1, *p2;
10910 p2 = c_getstr (s2);
10911 if (p2 == NULL)
10912 return NULL_TREE;
10914 p1 = c_getstr (s1);
10915 if (p1 != NULL)
10917 const char *r = strstr (p1, p2);
10918 tree tem;
10920 if (r == NULL)
10921 return build_int_cst (TREE_TYPE (s1), 0);
10923 /* Return an offset into the constant string argument. */
10924 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10925 s1, size_int (r - p1));
10926 return fold_convert (type, tem);
10929 /* The argument is const char *, and the result is char *, so we need
10930 a type conversion here to avoid a warning. */
10931 if (p2[0] == '\0')
10932 return fold_convert (type, s1);
10934 if (p2[1] != '\0')
10935 return NULL_TREE;
10937 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10938 if (!fn)
10939 return NULL_TREE;
10941 /* New argument list transforming strstr(s1, s2) to
10942 strchr(s1, s2[0]). */
10943 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10947 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10948 the call, and TYPE is its return type.
10950 Return NULL_TREE if no simplification was possible, otherwise return the
10951 simplified form of the call as a tree.
10953 The simplified form may be a constant or other expression which
10954 computes the same value, but in a more efficient manner (including
10955 calls to other builtin functions).
10957 The call may contain arguments which need to be evaluated, but
10958 which are not useful to determine the result of the call. In
10959 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10960 COMPOUND_EXPR will be an argument which must be evaluated.
10961 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10962 COMPOUND_EXPR in the chain will contain the tree for the simplified
10963 form of the builtin function call. */
10965 static tree
10966 fold_builtin_strchr (tree s1, tree s2, tree type)
10968 if (!validate_arg (s1, POINTER_TYPE)
10969 || !validate_arg (s2, INTEGER_TYPE))
10970 return NULL_TREE;
10971 else
10973 const char *p1;
10975 if (TREE_CODE (s2) != INTEGER_CST)
10976 return NULL_TREE;
10978 p1 = c_getstr (s1);
10979 if (p1 != NULL)
10981 char c;
10982 const char *r;
10983 tree tem;
10985 if (target_char_cast (s2, &c))
10986 return NULL_TREE;
10988 r = strchr (p1, c);
10990 if (r == NULL)
10991 return build_int_cst (TREE_TYPE (s1), 0);
10993 /* Return an offset into the constant string argument. */
10994 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10995 s1, size_int (r - p1));
10996 return fold_convert (type, tem);
10998 return NULL_TREE;
11002 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11003 the call, and TYPE is its return type.
11005 Return NULL_TREE if no simplification was possible, otherwise return the
11006 simplified form of the call as a tree.
11008 The simplified form may be a constant or other expression which
11009 computes the same value, but in a more efficient manner (including
11010 calls to other builtin functions).
11012 The call may contain arguments which need to be evaluated, but
11013 which are not useful to determine the result of the call. In
11014 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11015 COMPOUND_EXPR will be an argument which must be evaluated.
11016 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11017 COMPOUND_EXPR in the chain will contain the tree for the simplified
11018 form of the builtin function call. */
11020 static tree
11021 fold_builtin_strrchr (tree s1, tree s2, tree type)
11023 if (!validate_arg (s1, POINTER_TYPE)
11024 || !validate_arg (s2, INTEGER_TYPE))
11025 return NULL_TREE;
11026 else
11028 tree fn;
11029 const char *p1;
11031 if (TREE_CODE (s2) != INTEGER_CST)
11032 return NULL_TREE;
11034 p1 = c_getstr (s1);
11035 if (p1 != NULL)
11037 char c;
11038 const char *r;
11039 tree tem;
11041 if (target_char_cast (s2, &c))
11042 return NULL_TREE;
11044 r = strrchr (p1, c);
11046 if (r == NULL)
11047 return build_int_cst (TREE_TYPE (s1), 0);
11049 /* Return an offset into the constant string argument. */
11050 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11051 s1, size_int (r - p1));
11052 return fold_convert (type, tem);
11055 if (! integer_zerop (s2))
11056 return NULL_TREE;
11058 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11059 if (!fn)
11060 return NULL_TREE;
11062 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11063 return build_call_expr (fn, 2, s1, s2);
11067 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11068 to the call, and TYPE is its return type.
11070 Return NULL_TREE if no simplification was possible, otherwise return the
11071 simplified form of the call as a tree.
11073 The simplified form may be a constant or other expression which
11074 computes the same value, but in a more efficient manner (including
11075 calls to other builtin functions).
11077 The call may contain arguments which need to be evaluated, but
11078 which are not useful to determine the result of the call. In
11079 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11080 COMPOUND_EXPR will be an argument which must be evaluated.
11081 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11082 COMPOUND_EXPR in the chain will contain the tree for the simplified
11083 form of the builtin function call. */
11085 static tree
11086 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11088 if (!validate_arg (s1, POINTER_TYPE)
11089 || !validate_arg (s2, POINTER_TYPE))
11090 return NULL_TREE;
11091 else
11093 tree fn;
11094 const char *p1, *p2;
11096 p2 = c_getstr (s2);
11097 if (p2 == NULL)
11098 return NULL_TREE;
11100 p1 = c_getstr (s1);
11101 if (p1 != NULL)
11103 const char *r = strpbrk (p1, p2);
11104 tree tem;
11106 if (r == NULL)
11107 return build_int_cst (TREE_TYPE (s1), 0);
11109 /* Return an offset into the constant string argument. */
11110 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11111 s1, size_int (r - p1));
11112 return fold_convert (type, tem);
11115 if (p2[0] == '\0')
11116 /* strpbrk(x, "") == NULL.
11117 Evaluate and ignore s1 in case it had side-effects. */
11118 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11120 if (p2[1] != '\0')
11121 return NULL_TREE; /* Really call strpbrk. */
11123 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11124 if (!fn)
11125 return NULL_TREE;
11127 /* New argument list transforming strpbrk(s1, s2) to
11128 strchr(s1, s2[0]). */
11129 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11133 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11134 to the call.
11136 Return NULL_TREE if no simplification was possible, otherwise return the
11137 simplified form of the call as a tree.
11139 The simplified form may be a constant or other expression which
11140 computes the same value, but in a more efficient manner (including
11141 calls to other builtin functions).
11143 The call may contain arguments which need to be evaluated, but
11144 which are not useful to determine the result of the call. In
11145 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11146 COMPOUND_EXPR will be an argument which must be evaluated.
11147 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11148 COMPOUND_EXPR in the chain will contain the tree for the simplified
11149 form of the builtin function call. */
11151 static tree
11152 fold_builtin_strcat (tree dst, tree src)
11154 if (!validate_arg (dst, POINTER_TYPE)
11155 || !validate_arg (src, POINTER_TYPE))
11156 return NULL_TREE;
11157 else
11159 const char *p = c_getstr (src);
11161 /* If the string length is zero, return the dst parameter. */
11162 if (p && *p == '\0')
11163 return dst;
11165 return NULL_TREE;
11169 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11170 arguments to the call.
11172 Return NULL_TREE if no simplification was possible, otherwise return the
11173 simplified form of the call as a tree.
11175 The simplified form may be a constant or other expression which
11176 computes the same value, but in a more efficient manner (including
11177 calls to other builtin functions).
11179 The call may contain arguments which need to be evaluated, but
11180 which are not useful to determine the result of the call. In
11181 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11182 COMPOUND_EXPR will be an argument which must be evaluated.
11183 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11184 COMPOUND_EXPR in the chain will contain the tree for the simplified
11185 form of the builtin function call. */
11187 static tree
11188 fold_builtin_strncat (tree dst, tree src, tree len)
11190 if (!validate_arg (dst, POINTER_TYPE)
11191 || !validate_arg (src, POINTER_TYPE)
11192 || !validate_arg (len, INTEGER_TYPE))
11193 return NULL_TREE;
11194 else
11196 const char *p = c_getstr (src);
11198 /* If the requested length is zero, or the src parameter string
11199 length is zero, return the dst parameter. */
11200 if (integer_zerop (len) || (p && *p == '\0'))
11201 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11203 /* If the requested len is greater than or equal to the string
11204 length, call strcat. */
11205 if (TREE_CODE (len) == INTEGER_CST && p
11206 && compare_tree_int (len, strlen (p)) >= 0)
11208 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11210 /* If the replacement _DECL isn't initialized, don't do the
11211 transformation. */
11212 if (!fn)
11213 return NULL_TREE;
11215 return build_call_expr (fn, 2, dst, src);
11217 return NULL_TREE;
11221 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11222 to the call.
11224 Return NULL_TREE if no simplification was possible, otherwise return the
11225 simplified form of the call as a tree.
11227 The simplified form may be a constant or other expression which
11228 computes the same value, but in a more efficient manner (including
11229 calls to other builtin functions).
11231 The call may contain arguments which need to be evaluated, but
11232 which are not useful to determine the result of the call. In
11233 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11234 COMPOUND_EXPR will be an argument which must be evaluated.
11235 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11236 COMPOUND_EXPR in the chain will contain the tree for the simplified
11237 form of the builtin function call. */
11239 static tree
11240 fold_builtin_strspn (tree s1, tree s2)
11242 if (!validate_arg (s1, POINTER_TYPE)
11243 || !validate_arg (s2, POINTER_TYPE))
11244 return NULL_TREE;
11245 else
11247 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11249 /* If both arguments are constants, evaluate at compile-time. */
11250 if (p1 && p2)
11252 const size_t r = strspn (p1, p2);
11253 return size_int (r);
11256 /* If either argument is "", return NULL_TREE. */
11257 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11258 /* Evaluate and ignore both arguments in case either one has
11259 side-effects. */
11260 return omit_two_operands (integer_type_node, integer_zero_node,
11261 s1, s2);
11262 return NULL_TREE;
11266 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11267 to the call.
11269 Return NULL_TREE if no simplification was possible, otherwise return the
11270 simplified form of the call as a tree.
11272 The simplified form may be a constant or other expression which
11273 computes the same value, but in a more efficient manner (including
11274 calls to other builtin functions).
11276 The call may contain arguments which need to be evaluated, but
11277 which are not useful to determine the result of the call. In
11278 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11279 COMPOUND_EXPR will be an argument which must be evaluated.
11280 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11281 COMPOUND_EXPR in the chain will contain the tree for the simplified
11282 form of the builtin function call. */
11284 static tree
11285 fold_builtin_strcspn (tree s1, tree s2)
11287 if (!validate_arg (s1, POINTER_TYPE)
11288 || !validate_arg (s2, POINTER_TYPE))
11289 return NULL_TREE;
11290 else
11292 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11294 /* If both arguments are constants, evaluate at compile-time. */
11295 if (p1 && p2)
11297 const size_t r = strcspn (p1, p2);
11298 return size_int (r);
11301 /* If the first argument is "", return NULL_TREE. */
11302 if (p1 && *p1 == '\0')
11304 /* Evaluate and ignore argument s2 in case it has
11305 side-effects. */
11306 return omit_one_operand (integer_type_node,
11307 integer_zero_node, s2);
11310 /* If the second argument is "", return __builtin_strlen(s1). */
11311 if (p2 && *p2 == '\0')
11313 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11315 /* If the replacement _DECL isn't initialized, don't do the
11316 transformation. */
11317 if (!fn)
11318 return NULL_TREE;
11320 return build_call_expr (fn, 1, s1);
11322 return NULL_TREE;
11326 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11327 to the call. IGNORE is true if the value returned
11328 by the builtin will be ignored. UNLOCKED is true is true if this
11329 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11330 the known length of the string. Return NULL_TREE if no simplification
11331 was possible. */
11333 tree
11334 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11336 /* If we're using an unlocked function, assume the other unlocked
11337 functions exist explicitly. */
11338 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11339 : implicit_built_in_decls[BUILT_IN_FPUTC];
11340 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11341 : implicit_built_in_decls[BUILT_IN_FWRITE];
11343 /* If the return value is used, don't do the transformation. */
11344 if (!ignore)
11345 return NULL_TREE;
11347 /* Verify the arguments in the original call. */
11348 if (!validate_arg (arg0, POINTER_TYPE)
11349 || !validate_arg (arg1, POINTER_TYPE))
11350 return NULL_TREE;
11352 if (! len)
11353 len = c_strlen (arg0, 0);
11355 /* Get the length of the string passed to fputs. If the length
11356 can't be determined, punt. */
11357 if (!len
11358 || TREE_CODE (len) != INTEGER_CST)
11359 return NULL_TREE;
11361 switch (compare_tree_int (len, 1))
11363 case -1: /* length is 0, delete the call entirely . */
11364 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11366 case 0: /* length is 1, call fputc. */
11368 const char *p = c_getstr (arg0);
11370 if (p != NULL)
11372 if (fn_fputc)
11373 return build_call_expr (fn_fputc, 2,
11374 build_int_cst (NULL_TREE, p[0]), arg1);
11375 else
11376 return NULL_TREE;
11379 /* FALLTHROUGH */
11380 case 1: /* length is greater than 1, call fwrite. */
11382 /* If optimizing for size keep fputs. */
11383 if (optimize_size)
11384 return NULL_TREE;
11385 /* New argument list transforming fputs(string, stream) to
11386 fwrite(string, 1, len, stream). */
11387 if (fn_fwrite)
11388 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11389 else
11390 return NULL_TREE;
11392 default:
11393 gcc_unreachable ();
11395 return NULL_TREE;
11398 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11399 produced. False otherwise. This is done so that we don't output the error
11400 or warning twice or three times. */
11401 bool
11402 fold_builtin_next_arg (tree exp, bool va_start_p)
11404 tree fntype = TREE_TYPE (current_function_decl);
11405 int nargs = call_expr_nargs (exp);
11406 tree arg;
11408 if (TYPE_ARG_TYPES (fntype) == 0
11409 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11410 == void_type_node))
11412 error ("%<va_start%> used in function with fixed args");
11413 return true;
11416 if (va_start_p)
11418 if (va_start_p && (nargs != 2))
11420 error ("wrong number of arguments to function %<va_start%>");
11421 return true;
11423 arg = CALL_EXPR_ARG (exp, 1);
11425 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11426 when we checked the arguments and if needed issued a warning. */
11427 else
11429 if (nargs == 0)
11431 /* Evidently an out of date version of <stdarg.h>; can't validate
11432 va_start's second argument, but can still work as intended. */
11433 warning (0, "%<__builtin_next_arg%> called without an argument");
11434 return true;
11436 else if (nargs > 1)
11438 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11439 return true;
11441 arg = CALL_EXPR_ARG (exp, 0);
11444 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11445 or __builtin_next_arg (0) the first time we see it, after checking
11446 the arguments and if needed issuing a warning. */
11447 if (!integer_zerop (arg))
11449 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11451 /* Strip off all nops for the sake of the comparison. This
11452 is not quite the same as STRIP_NOPS. It does more.
11453 We must also strip off INDIRECT_EXPR for C++ reference
11454 parameters. */
11455 while (CONVERT_EXPR_P (arg)
11456 || TREE_CODE (arg) == INDIRECT_REF)
11457 arg = TREE_OPERAND (arg, 0);
11458 if (arg != last_parm)
11460 /* FIXME: Sometimes with the tree optimizers we can get the
11461 not the last argument even though the user used the last
11462 argument. We just warn and set the arg to be the last
11463 argument so that we will get wrong-code because of
11464 it. */
11465 warning (0, "second parameter of %<va_start%> not last named argument");
11467 /* We want to verify the second parameter just once before the tree
11468 optimizers are run and then avoid keeping it in the tree,
11469 as otherwise we could warn even for correct code like:
11470 void foo (int i, ...)
11471 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11472 if (va_start_p)
11473 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11474 else
11475 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11477 return false;
11481 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11482 ORIG may be null if this is a 2-argument call. We don't attempt to
11483 simplify calls with more than 3 arguments.
11485 Return NULL_TREE if no simplification was possible, otherwise return the
11486 simplified form of the call as a tree. If IGNORED is true, it means that
11487 the caller does not use the returned value of the function. */
11489 static tree
11490 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11492 tree call, retval;
11493 const char *fmt_str = NULL;
11495 /* Verify the required arguments in the original call. We deal with two
11496 types of sprintf() calls: 'sprintf (str, fmt)' and
11497 'sprintf (dest, "%s", orig)'. */
11498 if (!validate_arg (dest, POINTER_TYPE)
11499 || !validate_arg (fmt, POINTER_TYPE))
11500 return NULL_TREE;
11501 if (orig && !validate_arg (orig, POINTER_TYPE))
11502 return NULL_TREE;
11504 /* Check whether the format is a literal string constant. */
11505 fmt_str = c_getstr (fmt);
11506 if (fmt_str == NULL)
11507 return NULL_TREE;
11509 call = NULL_TREE;
11510 retval = NULL_TREE;
11512 if (!init_target_chars ())
11513 return NULL_TREE;
11515 /* If the format doesn't contain % args or %%, use strcpy. */
11516 if (strchr (fmt_str, target_percent) == NULL)
11518 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11520 if (!fn)
11521 return NULL_TREE;
11523 /* Don't optimize sprintf (buf, "abc", ptr++). */
11524 if (orig)
11525 return NULL_TREE;
11527 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11528 'format' is known to contain no % formats. */
11529 call = build_call_expr (fn, 2, dest, fmt);
11530 if (!ignored)
11531 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11534 /* If the format is "%s", use strcpy if the result isn't used. */
11535 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11537 tree fn;
11538 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11540 if (!fn)
11541 return NULL_TREE;
11543 /* Don't crash on sprintf (str1, "%s"). */
11544 if (!orig)
11545 return NULL_TREE;
11547 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11548 if (!ignored)
11550 retval = c_strlen (orig, 1);
11551 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11552 return NULL_TREE;
11554 call = build_call_expr (fn, 2, dest, orig);
11557 if (call && retval)
11559 retval = fold_convert
11560 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11561 retval);
11562 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11564 else
11565 return call;
11568 /* Expand a call EXP to __builtin_object_size. */
11571 expand_builtin_object_size (tree exp)
11573 tree ost;
11574 int object_size_type;
11575 tree fndecl = get_callee_fndecl (exp);
11577 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11579 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11580 exp, fndecl);
11581 expand_builtin_trap ();
11582 return const0_rtx;
11585 ost = CALL_EXPR_ARG (exp, 1);
11586 STRIP_NOPS (ost);
11588 if (TREE_CODE (ost) != INTEGER_CST
11589 || tree_int_cst_sgn (ost) < 0
11590 || compare_tree_int (ost, 3) > 0)
11592 error ("%Klast argument of %D is not integer constant between 0 and 3",
11593 exp, fndecl);
11594 expand_builtin_trap ();
11595 return const0_rtx;
11598 object_size_type = tree_low_cst (ost, 0);
11600 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11603 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11604 FCODE is the BUILT_IN_* to use.
11605 Return NULL_RTX if we failed; the caller should emit a normal call,
11606 otherwise try to get the result in TARGET, if convenient (and in
11607 mode MODE if that's convenient). */
11609 static rtx
11610 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11611 enum built_in_function fcode)
11613 tree dest, src, len, size;
11615 if (!validate_arglist (exp,
11616 POINTER_TYPE,
11617 fcode == BUILT_IN_MEMSET_CHK
11618 ? INTEGER_TYPE : POINTER_TYPE,
11619 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11620 return NULL_RTX;
11622 dest = CALL_EXPR_ARG (exp, 0);
11623 src = CALL_EXPR_ARG (exp, 1);
11624 len = CALL_EXPR_ARG (exp, 2);
11625 size = CALL_EXPR_ARG (exp, 3);
11627 if (! host_integerp (size, 1))
11628 return NULL_RTX;
11630 if (host_integerp (len, 1) || integer_all_onesp (size))
11632 tree fn;
11634 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11636 warning (0, "%Kcall to %D will always overflow destination buffer",
11637 exp, get_callee_fndecl (exp));
11638 return NULL_RTX;
11641 fn = NULL_TREE;
11642 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11643 mem{cpy,pcpy,move,set} is available. */
11644 switch (fcode)
11646 case BUILT_IN_MEMCPY_CHK:
11647 fn = built_in_decls[BUILT_IN_MEMCPY];
11648 break;
11649 case BUILT_IN_MEMPCPY_CHK:
11650 fn = built_in_decls[BUILT_IN_MEMPCPY];
11651 break;
11652 case BUILT_IN_MEMMOVE_CHK:
11653 fn = built_in_decls[BUILT_IN_MEMMOVE];
11654 break;
11655 case BUILT_IN_MEMSET_CHK:
11656 fn = built_in_decls[BUILT_IN_MEMSET];
11657 break;
11658 default:
11659 break;
11662 if (! fn)
11663 return NULL_RTX;
11665 fn = build_call_expr (fn, 3, dest, src, len);
11666 STRIP_TYPE_NOPS (fn);
11667 while (TREE_CODE (fn) == COMPOUND_EXPR)
11669 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11670 EXPAND_NORMAL);
11671 fn = TREE_OPERAND (fn, 1);
11673 if (TREE_CODE (fn) == CALL_EXPR)
11674 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11675 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11677 else if (fcode == BUILT_IN_MEMSET_CHK)
11678 return NULL_RTX;
11679 else
11681 unsigned int dest_align
11682 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11684 /* If DEST is not a pointer type, call the normal function. */
11685 if (dest_align == 0)
11686 return NULL_RTX;
11688 /* If SRC and DEST are the same (and not volatile), do nothing. */
11689 if (operand_equal_p (src, dest, 0))
11691 tree expr;
11693 if (fcode != BUILT_IN_MEMPCPY_CHK)
11695 /* Evaluate and ignore LEN in case it has side-effects. */
11696 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11697 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11700 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11701 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11704 /* __memmove_chk special case. */
11705 if (fcode == BUILT_IN_MEMMOVE_CHK)
11707 unsigned int src_align
11708 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11710 if (src_align == 0)
11711 return NULL_RTX;
11713 /* If src is categorized for a readonly section we can use
11714 normal __memcpy_chk. */
11715 if (readonly_data_expr (src))
11717 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11718 if (!fn)
11719 return NULL_RTX;
11720 fn = build_call_expr (fn, 4, dest, src, len, size);
11721 STRIP_TYPE_NOPS (fn);
11722 while (TREE_CODE (fn) == COMPOUND_EXPR)
11724 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11725 EXPAND_NORMAL);
11726 fn = TREE_OPERAND (fn, 1);
11728 if (TREE_CODE (fn) == CALL_EXPR)
11729 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11730 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11733 return NULL_RTX;
11737 /* Emit warning if a buffer overflow is detected at compile time. */
11739 static void
11740 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11742 int is_strlen = 0;
11743 tree len, size;
11745 switch (fcode)
11747 case BUILT_IN_STRCPY_CHK:
11748 case BUILT_IN_STPCPY_CHK:
11749 /* For __strcat_chk the warning will be emitted only if overflowing
11750 by at least strlen (dest) + 1 bytes. */
11751 case BUILT_IN_STRCAT_CHK:
11752 len = CALL_EXPR_ARG (exp, 1);
11753 size = CALL_EXPR_ARG (exp, 2);
11754 is_strlen = 1;
11755 break;
11756 case BUILT_IN_STRNCAT_CHK:
11757 case BUILT_IN_STRNCPY_CHK:
11758 len = CALL_EXPR_ARG (exp, 2);
11759 size = CALL_EXPR_ARG (exp, 3);
11760 break;
11761 case BUILT_IN_SNPRINTF_CHK:
11762 case BUILT_IN_VSNPRINTF_CHK:
11763 len = CALL_EXPR_ARG (exp, 1);
11764 size = CALL_EXPR_ARG (exp, 3);
11765 break;
11766 default:
11767 gcc_unreachable ();
11770 if (!len || !size)
11771 return;
11773 if (! host_integerp (size, 1) || integer_all_onesp (size))
11774 return;
11776 if (is_strlen)
11778 len = c_strlen (len, 1);
11779 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11780 return;
11782 else if (fcode == BUILT_IN_STRNCAT_CHK)
11784 tree src = CALL_EXPR_ARG (exp, 1);
11785 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11786 return;
11787 src = c_strlen (src, 1);
11788 if (! src || ! host_integerp (src, 1))
11790 warning (0, "%Kcall to %D might overflow destination buffer",
11791 exp, get_callee_fndecl (exp));
11792 return;
11794 else if (tree_int_cst_lt (src, size))
11795 return;
11797 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11798 return;
11800 warning (0, "%Kcall to %D will always overflow destination buffer",
11801 exp, get_callee_fndecl (exp));
11804 /* Emit warning if a buffer overflow is detected at compile time
11805 in __sprintf_chk/__vsprintf_chk calls. */
11807 static void
11808 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11810 tree dest, size, len, fmt, flag;
11811 const char *fmt_str;
11812 int nargs = call_expr_nargs (exp);
11814 /* Verify the required arguments in the original call. */
11816 if (nargs < 4)
11817 return;
11818 dest = CALL_EXPR_ARG (exp, 0);
11819 flag = CALL_EXPR_ARG (exp, 1);
11820 size = CALL_EXPR_ARG (exp, 2);
11821 fmt = CALL_EXPR_ARG (exp, 3);
11823 if (! host_integerp (size, 1) || integer_all_onesp (size))
11824 return;
11826 /* Check whether the format is a literal string constant. */
11827 fmt_str = c_getstr (fmt);
11828 if (fmt_str == NULL)
11829 return;
11831 if (!init_target_chars ())
11832 return;
11834 /* If the format doesn't contain % args or %%, we know its size. */
11835 if (strchr (fmt_str, target_percent) == 0)
11836 len = build_int_cstu (size_type_node, strlen (fmt_str));
11837 /* If the format is "%s" and first ... argument is a string literal,
11838 we know it too. */
11839 else if (fcode == BUILT_IN_SPRINTF_CHK
11840 && strcmp (fmt_str, target_percent_s) == 0)
11842 tree arg;
11844 if (nargs < 5)
11845 return;
11846 arg = CALL_EXPR_ARG (exp, 4);
11847 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11848 return;
11850 len = c_strlen (arg, 1);
11851 if (!len || ! host_integerp (len, 1))
11852 return;
11854 else
11855 return;
11857 if (! tree_int_cst_lt (len, size))
11859 warning (0, "%Kcall to %D will always overflow destination buffer",
11860 exp, get_callee_fndecl (exp));
11864 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11865 if possible. */
11867 tree
11868 fold_builtin_object_size (tree ptr, tree ost)
11870 tree ret = NULL_TREE;
11871 int object_size_type;
11873 if (!validate_arg (ptr, POINTER_TYPE)
11874 || !validate_arg (ost, INTEGER_TYPE))
11875 return NULL_TREE;
11877 STRIP_NOPS (ost);
11879 if (TREE_CODE (ost) != INTEGER_CST
11880 || tree_int_cst_sgn (ost) < 0
11881 || compare_tree_int (ost, 3) > 0)
11882 return NULL_TREE;
11884 object_size_type = tree_low_cst (ost, 0);
11886 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11887 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11888 and (size_t) 0 for types 2 and 3. */
11889 if (TREE_SIDE_EFFECTS (ptr))
11890 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11892 if (TREE_CODE (ptr) == ADDR_EXPR)
11893 ret = build_int_cstu (size_type_node,
11894 compute_builtin_object_size (ptr, object_size_type));
11896 else if (TREE_CODE (ptr) == SSA_NAME)
11898 unsigned HOST_WIDE_INT bytes;
11900 /* If object size is not known yet, delay folding until
11901 later. Maybe subsequent passes will help determining
11902 it. */
11903 bytes = compute_builtin_object_size (ptr, object_size_type);
11904 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11905 ? -1 : 0))
11906 ret = build_int_cstu (size_type_node, bytes);
11909 if (ret)
11911 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11912 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11913 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11914 ret = NULL_TREE;
11917 return ret;
11920 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11921 DEST, SRC, LEN, and SIZE are the arguments to the call.
11922 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11923 code of the builtin. If MAXLEN is not NULL, it is maximum length
11924 passed as third argument. */
11926 tree
11927 fold_builtin_memory_chk (tree fndecl,
11928 tree dest, tree src, tree len, tree size,
11929 tree maxlen, bool ignore,
11930 enum built_in_function fcode)
11932 tree fn;
11934 if (!validate_arg (dest, POINTER_TYPE)
11935 || !validate_arg (src,
11936 (fcode == BUILT_IN_MEMSET_CHK
11937 ? INTEGER_TYPE : POINTER_TYPE))
11938 || !validate_arg (len, INTEGER_TYPE)
11939 || !validate_arg (size, INTEGER_TYPE))
11940 return NULL_TREE;
11942 /* If SRC and DEST are the same (and not volatile), return DEST
11943 (resp. DEST+LEN for __mempcpy_chk). */
11944 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11946 if (fcode != BUILT_IN_MEMPCPY_CHK)
11947 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11948 else
11950 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11951 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11955 if (! host_integerp (size, 1))
11956 return NULL_TREE;
11958 if (! integer_all_onesp (size))
11960 if (! host_integerp (len, 1))
11962 /* If LEN is not constant, try MAXLEN too.
11963 For MAXLEN only allow optimizing into non-_ocs function
11964 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11965 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11967 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11969 /* (void) __mempcpy_chk () can be optimized into
11970 (void) __memcpy_chk (). */
11971 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11972 if (!fn)
11973 return NULL_TREE;
11975 return build_call_expr (fn, 4, dest, src, len, size);
11977 return NULL_TREE;
11980 else
11981 maxlen = len;
11983 if (tree_int_cst_lt (size, maxlen))
11984 return NULL_TREE;
11987 fn = NULL_TREE;
11988 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11989 mem{cpy,pcpy,move,set} is available. */
11990 switch (fcode)
11992 case BUILT_IN_MEMCPY_CHK:
11993 fn = built_in_decls[BUILT_IN_MEMCPY];
11994 break;
11995 case BUILT_IN_MEMPCPY_CHK:
11996 fn = built_in_decls[BUILT_IN_MEMPCPY];
11997 break;
11998 case BUILT_IN_MEMMOVE_CHK:
11999 fn = built_in_decls[BUILT_IN_MEMMOVE];
12000 break;
12001 case BUILT_IN_MEMSET_CHK:
12002 fn = built_in_decls[BUILT_IN_MEMSET];
12003 break;
12004 default:
12005 break;
12008 if (!fn)
12009 return NULL_TREE;
12011 return build_call_expr (fn, 3, dest, src, len);
12014 /* Fold a call to the __st[rp]cpy_chk builtin.
12015 DEST, SRC, and SIZE are the arguments to the call.
12016 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12017 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12018 strings passed as second argument. */
12020 tree
12021 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12022 tree maxlen, bool ignore,
12023 enum built_in_function fcode)
12025 tree len, fn;
12027 if (!validate_arg (dest, POINTER_TYPE)
12028 || !validate_arg (src, POINTER_TYPE)
12029 || !validate_arg (size, INTEGER_TYPE))
12030 return NULL_TREE;
12032 /* If SRC and DEST are the same (and not volatile), return DEST. */
12033 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12034 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12036 if (! host_integerp (size, 1))
12037 return NULL_TREE;
12039 if (! integer_all_onesp (size))
12041 len = c_strlen (src, 1);
12042 if (! len || ! host_integerp (len, 1))
12044 /* If LEN is not constant, try MAXLEN too.
12045 For MAXLEN only allow optimizing into non-_ocs function
12046 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12047 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12049 if (fcode == BUILT_IN_STPCPY_CHK)
12051 if (! ignore)
12052 return NULL_TREE;
12054 /* If return value of __stpcpy_chk is ignored,
12055 optimize into __strcpy_chk. */
12056 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12057 if (!fn)
12058 return NULL_TREE;
12060 return build_call_expr (fn, 3, dest, src, size);
12063 if (! len || TREE_SIDE_EFFECTS (len))
12064 return NULL_TREE;
12066 /* If c_strlen returned something, but not a constant,
12067 transform __strcpy_chk into __memcpy_chk. */
12068 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12069 if (!fn)
12070 return NULL_TREE;
12072 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12073 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12074 build_call_expr (fn, 4,
12075 dest, src, len, size));
12078 else
12079 maxlen = len;
12081 if (! tree_int_cst_lt (maxlen, size))
12082 return NULL_TREE;
12085 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12086 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12087 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12088 if (!fn)
12089 return NULL_TREE;
12091 return build_call_expr (fn, 2, dest, src);
12094 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12095 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12096 length passed as third argument. */
12098 tree
12099 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12100 tree maxlen)
12102 tree fn;
12104 if (!validate_arg (dest, POINTER_TYPE)
12105 || !validate_arg (src, POINTER_TYPE)
12106 || !validate_arg (len, INTEGER_TYPE)
12107 || !validate_arg (size, INTEGER_TYPE))
12108 return NULL_TREE;
12110 if (! host_integerp (size, 1))
12111 return NULL_TREE;
12113 if (! integer_all_onesp (size))
12115 if (! host_integerp (len, 1))
12117 /* If LEN is not constant, try MAXLEN too.
12118 For MAXLEN only allow optimizing into non-_ocs function
12119 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12120 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12121 return NULL_TREE;
12123 else
12124 maxlen = len;
12126 if (tree_int_cst_lt (size, maxlen))
12127 return NULL_TREE;
12130 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12131 fn = built_in_decls[BUILT_IN_STRNCPY];
12132 if (!fn)
12133 return NULL_TREE;
12135 return build_call_expr (fn, 3, dest, src, len);
12138 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12139 are the arguments to the call. */
12141 static tree
12142 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12144 tree fn;
12145 const char *p;
12147 if (!validate_arg (dest, POINTER_TYPE)
12148 || !validate_arg (src, POINTER_TYPE)
12149 || !validate_arg (size, INTEGER_TYPE))
12150 return NULL_TREE;
12152 p = c_getstr (src);
12153 /* If the SRC parameter is "", return DEST. */
12154 if (p && *p == '\0')
12155 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12157 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12158 return NULL_TREE;
12160 /* If __builtin_strcat_chk is used, assume strcat is available. */
12161 fn = built_in_decls[BUILT_IN_STRCAT];
12162 if (!fn)
12163 return NULL_TREE;
12165 return build_call_expr (fn, 2, dest, src);
12168 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12169 LEN, and SIZE. */
12171 static tree
12172 fold_builtin_strncat_chk (tree fndecl,
12173 tree dest, tree src, tree len, tree size)
12175 tree fn;
12176 const char *p;
12178 if (!validate_arg (dest, POINTER_TYPE)
12179 || !validate_arg (src, POINTER_TYPE)
12180 || !validate_arg (size, INTEGER_TYPE)
12181 || !validate_arg (size, INTEGER_TYPE))
12182 return NULL_TREE;
12184 p = c_getstr (src);
12185 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12186 if (p && *p == '\0')
12187 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12188 else if (integer_zerop (len))
12189 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12191 if (! host_integerp (size, 1))
12192 return NULL_TREE;
12194 if (! integer_all_onesp (size))
12196 tree src_len = c_strlen (src, 1);
12197 if (src_len
12198 && host_integerp (src_len, 1)
12199 && host_integerp (len, 1)
12200 && ! tree_int_cst_lt (len, src_len))
12202 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12203 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12204 if (!fn)
12205 return NULL_TREE;
12207 return build_call_expr (fn, 3, dest, src, size);
12209 return NULL_TREE;
12212 /* If __builtin_strncat_chk is used, assume strncat is available. */
12213 fn = built_in_decls[BUILT_IN_STRNCAT];
12214 if (!fn)
12215 return NULL_TREE;
12217 return build_call_expr (fn, 3, dest, src, len);
12220 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12221 a normal call should be emitted rather than expanding the function
12222 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12224 static tree
12225 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12227 tree dest, size, len, fn, fmt, flag;
12228 const char *fmt_str;
12229 int nargs = call_expr_nargs (exp);
12231 /* Verify the required arguments in the original call. */
12232 if (nargs < 4)
12233 return NULL_TREE;
12234 dest = CALL_EXPR_ARG (exp, 0);
12235 if (!validate_arg (dest, POINTER_TYPE))
12236 return NULL_TREE;
12237 flag = CALL_EXPR_ARG (exp, 1);
12238 if (!validate_arg (flag, INTEGER_TYPE))
12239 return NULL_TREE;
12240 size = CALL_EXPR_ARG (exp, 2);
12241 if (!validate_arg (size, INTEGER_TYPE))
12242 return NULL_TREE;
12243 fmt = CALL_EXPR_ARG (exp, 3);
12244 if (!validate_arg (fmt, POINTER_TYPE))
12245 return NULL_TREE;
12247 if (! host_integerp (size, 1))
12248 return NULL_TREE;
12250 len = NULL_TREE;
12252 if (!init_target_chars ())
12253 return NULL_TREE;
12255 /* Check whether the format is a literal string constant. */
12256 fmt_str = c_getstr (fmt);
12257 if (fmt_str != NULL)
12259 /* If the format doesn't contain % args or %%, we know the size. */
12260 if (strchr (fmt_str, target_percent) == 0)
12262 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12263 len = build_int_cstu (size_type_node, strlen (fmt_str));
12265 /* If the format is "%s" and first ... argument is a string literal,
12266 we know the size too. */
12267 else if (fcode == BUILT_IN_SPRINTF_CHK
12268 && strcmp (fmt_str, target_percent_s) == 0)
12270 tree arg;
12272 if (nargs == 5)
12274 arg = CALL_EXPR_ARG (exp, 4);
12275 if (validate_arg (arg, POINTER_TYPE))
12277 len = c_strlen (arg, 1);
12278 if (! len || ! host_integerp (len, 1))
12279 len = NULL_TREE;
12285 if (! integer_all_onesp (size))
12287 if (! len || ! tree_int_cst_lt (len, size))
12288 return NULL_TREE;
12291 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12292 or if format doesn't contain % chars or is "%s". */
12293 if (! integer_zerop (flag))
12295 if (fmt_str == NULL)
12296 return NULL_TREE;
12297 if (strchr (fmt_str, target_percent) != NULL
12298 && strcmp (fmt_str, target_percent_s))
12299 return NULL_TREE;
12302 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12303 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12304 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12305 if (!fn)
12306 return NULL_TREE;
12308 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12311 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12312 a normal call should be emitted rather than expanding the function
12313 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12314 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12315 passed as second argument. */
12317 tree
12318 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12319 enum built_in_function fcode)
12321 tree dest, size, len, fn, fmt, flag;
12322 const char *fmt_str;
12324 /* Verify the required arguments in the original call. */
12325 if (call_expr_nargs (exp) < 5)
12326 return NULL_TREE;
12327 dest = CALL_EXPR_ARG (exp, 0);
12328 if (!validate_arg (dest, POINTER_TYPE))
12329 return NULL_TREE;
12330 len = CALL_EXPR_ARG (exp, 1);
12331 if (!validate_arg (len, INTEGER_TYPE))
12332 return NULL_TREE;
12333 flag = CALL_EXPR_ARG (exp, 2);
12334 if (!validate_arg (flag, INTEGER_TYPE))
12335 return NULL_TREE;
12336 size = CALL_EXPR_ARG (exp, 3);
12337 if (!validate_arg (size, INTEGER_TYPE))
12338 return NULL_TREE;
12339 fmt = CALL_EXPR_ARG (exp, 4);
12340 if (!validate_arg (fmt, POINTER_TYPE))
12341 return NULL_TREE;
12343 if (! host_integerp (size, 1))
12344 return NULL_TREE;
12346 if (! integer_all_onesp (size))
12348 if (! host_integerp (len, 1))
12350 /* If LEN is not constant, try MAXLEN too.
12351 For MAXLEN only allow optimizing into non-_ocs function
12352 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12353 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12354 return NULL_TREE;
12356 else
12357 maxlen = len;
12359 if (tree_int_cst_lt (size, maxlen))
12360 return NULL_TREE;
12363 if (!init_target_chars ())
12364 return NULL_TREE;
12366 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12367 or if format doesn't contain % chars or is "%s". */
12368 if (! integer_zerop (flag))
12370 fmt_str = c_getstr (fmt);
12371 if (fmt_str == NULL)
12372 return NULL_TREE;
12373 if (strchr (fmt_str, target_percent) != NULL
12374 && strcmp (fmt_str, target_percent_s))
12375 return NULL_TREE;
12378 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12379 available. */
12380 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12381 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12382 if (!fn)
12383 return NULL_TREE;
12385 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12388 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12389 FMT and ARG are the arguments to the call; we don't fold cases with
12390 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12392 Return NULL_TREE if no simplification was possible, otherwise return the
12393 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12394 code of the function to be simplified. */
12396 static tree
12397 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12398 enum built_in_function fcode)
12400 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12401 const char *fmt_str = NULL;
12403 /* If the return value is used, don't do the transformation. */
12404 if (! ignore)
12405 return NULL_TREE;
12407 /* Verify the required arguments in the original call. */
12408 if (!validate_arg (fmt, POINTER_TYPE))
12409 return NULL_TREE;
12411 /* Check whether the format is a literal string constant. */
12412 fmt_str = c_getstr (fmt);
12413 if (fmt_str == NULL)
12414 return NULL_TREE;
12416 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12418 /* If we're using an unlocked function, assume the other
12419 unlocked functions exist explicitly. */
12420 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12421 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12423 else
12425 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12426 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12429 if (!init_target_chars ())
12430 return NULL_TREE;
12432 if (strcmp (fmt_str, target_percent_s) == 0
12433 || strchr (fmt_str, target_percent) == NULL)
12435 const char *str;
12437 if (strcmp (fmt_str, target_percent_s) == 0)
12439 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12440 return NULL_TREE;
12442 if (!arg || !validate_arg (arg, POINTER_TYPE))
12443 return NULL_TREE;
12445 str = c_getstr (arg);
12446 if (str == NULL)
12447 return NULL_TREE;
12449 else
12451 /* The format specifier doesn't contain any '%' characters. */
12452 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12453 && arg)
12454 return NULL_TREE;
12455 str = fmt_str;
12458 /* If the string was "", printf does nothing. */
12459 if (str[0] == '\0')
12460 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12462 /* If the string has length of 1, call putchar. */
12463 if (str[1] == '\0')
12465 /* Given printf("c"), (where c is any one character,)
12466 convert "c"[0] to an int and pass that to the replacement
12467 function. */
12468 newarg = build_int_cst (NULL_TREE, str[0]);
12469 if (fn_putchar)
12470 call = build_call_expr (fn_putchar, 1, newarg);
12472 else
12474 /* If the string was "string\n", call puts("string"). */
12475 size_t len = strlen (str);
12476 if ((unsigned char)str[len - 1] == target_newline)
12478 /* Create a NUL-terminated string that's one char shorter
12479 than the original, stripping off the trailing '\n'. */
12480 char *newstr = XALLOCAVEC (char, len);
12481 memcpy (newstr, str, len - 1);
12482 newstr[len - 1] = 0;
12484 newarg = build_string_literal (len, newstr);
12485 if (fn_puts)
12486 call = build_call_expr (fn_puts, 1, newarg);
12488 else
12489 /* We'd like to arrange to call fputs(string,stdout) here,
12490 but we need stdout and don't have a way to get it yet. */
12491 return NULL_TREE;
12495 /* The other optimizations can be done only on the non-va_list variants. */
12496 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12497 return NULL_TREE;
12499 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12500 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12502 if (!arg || !validate_arg (arg, POINTER_TYPE))
12503 return NULL_TREE;
12504 if (fn_puts)
12505 call = build_call_expr (fn_puts, 1, arg);
12508 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12509 else if (strcmp (fmt_str, target_percent_c) == 0)
12511 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12512 return NULL_TREE;
12513 if (fn_putchar)
12514 call = build_call_expr (fn_putchar, 1, arg);
12517 if (!call)
12518 return NULL_TREE;
12520 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12523 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12524 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12525 more than 3 arguments, and ARG may be null in the 2-argument case.
12527 Return NULL_TREE if no simplification was possible, otherwise return the
12528 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12529 code of the function to be simplified. */
12531 static tree
12532 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12533 enum built_in_function fcode)
12535 tree fn_fputc, fn_fputs, call = NULL_TREE;
12536 const char *fmt_str = NULL;
12538 /* If the return value is used, don't do the transformation. */
12539 if (! ignore)
12540 return NULL_TREE;
12542 /* Verify the required arguments in the original call. */
12543 if (!validate_arg (fp, POINTER_TYPE))
12544 return NULL_TREE;
12545 if (!validate_arg (fmt, POINTER_TYPE))
12546 return NULL_TREE;
12548 /* Check whether the format is a literal string constant. */
12549 fmt_str = c_getstr (fmt);
12550 if (fmt_str == NULL)
12551 return NULL_TREE;
12553 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12555 /* If we're using an unlocked function, assume the other
12556 unlocked functions exist explicitly. */
12557 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12558 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12560 else
12562 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12563 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12566 if (!init_target_chars ())
12567 return NULL_TREE;
12569 /* If the format doesn't contain % args or %%, use strcpy. */
12570 if (strchr (fmt_str, target_percent) == NULL)
12572 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12573 && arg)
12574 return NULL_TREE;
12576 /* If the format specifier was "", fprintf does nothing. */
12577 if (fmt_str[0] == '\0')
12579 /* If FP has side-effects, just wait until gimplification is
12580 done. */
12581 if (TREE_SIDE_EFFECTS (fp))
12582 return NULL_TREE;
12584 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12587 /* When "string" doesn't contain %, replace all cases of
12588 fprintf (fp, string) with fputs (string, fp). The fputs
12589 builtin will take care of special cases like length == 1. */
12590 if (fn_fputs)
12591 call = build_call_expr (fn_fputs, 2, fmt, fp);
12594 /* The other optimizations can be done only on the non-va_list variants. */
12595 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12596 return NULL_TREE;
12598 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12599 else if (strcmp (fmt_str, target_percent_s) == 0)
12601 if (!arg || !validate_arg (arg, POINTER_TYPE))
12602 return NULL_TREE;
12603 if (fn_fputs)
12604 call = build_call_expr (fn_fputs, 2, arg, fp);
12607 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12608 else if (strcmp (fmt_str, target_percent_c) == 0)
12610 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12611 return NULL_TREE;
12612 if (fn_fputc)
12613 call = build_call_expr (fn_fputc, 2, arg, fp);
12616 if (!call)
12617 return NULL_TREE;
12618 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12621 /* Initialize format string characters in the target charset. */
12623 static bool
12624 init_target_chars (void)
12626 static bool init;
12627 if (!init)
12629 target_newline = lang_hooks.to_target_charset ('\n');
12630 target_percent = lang_hooks.to_target_charset ('%');
12631 target_c = lang_hooks.to_target_charset ('c');
12632 target_s = lang_hooks.to_target_charset ('s');
12633 if (target_newline == 0 || target_percent == 0 || target_c == 0
12634 || target_s == 0)
12635 return false;
12637 target_percent_c[0] = target_percent;
12638 target_percent_c[1] = target_c;
12639 target_percent_c[2] = '\0';
12641 target_percent_s[0] = target_percent;
12642 target_percent_s[1] = target_s;
12643 target_percent_s[2] = '\0';
12645 target_percent_s_newline[0] = target_percent;
12646 target_percent_s_newline[1] = target_s;
12647 target_percent_s_newline[2] = target_newline;
12648 target_percent_s_newline[3] = '\0';
12650 init = true;
12652 return true;
12655 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12656 and no overflow/underflow occurred. INEXACT is true if M was not
12657 exactly calculated. TYPE is the tree type for the result. This
12658 function assumes that you cleared the MPFR flags and then
12659 calculated M to see if anything subsequently set a flag prior to
12660 entering this function. Return NULL_TREE if any checks fail. */
12662 static tree
12663 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12665 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12666 overflow/underflow occurred. If -frounding-math, proceed iff the
12667 result of calling FUNC was exact. */
12668 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12669 && (!flag_rounding_math || !inexact))
12671 REAL_VALUE_TYPE rr;
12673 real_from_mpfr (&rr, m, type, GMP_RNDN);
12674 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12675 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12676 but the mpft_t is not, then we underflowed in the
12677 conversion. */
12678 if (real_isfinite (&rr)
12679 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12681 REAL_VALUE_TYPE rmode;
12683 real_convert (&rmode, TYPE_MODE (type), &rr);
12684 /* Proceed iff the specified mode can hold the value. */
12685 if (real_identical (&rmode, &rr))
12686 return build_real (type, rmode);
12689 return NULL_TREE;
12692 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12693 FUNC on it and return the resulting value as a tree with type TYPE.
12694 If MIN and/or MAX are not NULL, then the supplied ARG must be
12695 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12696 acceptable values, otherwise they are not. The mpfr precision is
12697 set to the precision of TYPE. We assume that function FUNC returns
12698 zero if the result could be calculated exactly within the requested
12699 precision. */
12701 static tree
12702 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12703 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12704 bool inclusive)
12706 tree result = NULL_TREE;
12708 STRIP_NOPS (arg);
12710 /* To proceed, MPFR must exactly represent the target floating point
12711 format, which only happens when the target base equals two. */
12712 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12713 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12715 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12717 if (real_isfinite (ra)
12718 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12719 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12721 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12722 int inexact;
12723 mpfr_t m;
12725 mpfr_init2 (m, prec);
12726 mpfr_from_real (m, ra, GMP_RNDN);
12727 mpfr_clear_flags ();
12728 inexact = func (m, m, GMP_RNDN);
12729 result = do_mpfr_ckconv (m, type, inexact);
12730 mpfr_clear (m);
12734 return result;
12737 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12738 FUNC on it and return the resulting value as a tree with type TYPE.
12739 The mpfr precision is set to the precision of TYPE. We assume that
12740 function FUNC returns zero if the result could be calculated
12741 exactly within the requested precision. */
12743 static tree
12744 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12745 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12747 tree result = NULL_TREE;
12749 STRIP_NOPS (arg1);
12750 STRIP_NOPS (arg2);
12752 /* To proceed, MPFR must exactly represent the target floating point
12753 format, which only happens when the target base equals two. */
12754 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12755 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12756 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12758 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12759 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12761 if (real_isfinite (ra1) && real_isfinite (ra2))
12763 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12764 int inexact;
12765 mpfr_t m1, m2;
12767 mpfr_inits2 (prec, m1, m2, NULL);
12768 mpfr_from_real (m1, ra1, GMP_RNDN);
12769 mpfr_from_real (m2, ra2, GMP_RNDN);
12770 mpfr_clear_flags ();
12771 inexact = func (m1, m1, m2, GMP_RNDN);
12772 result = do_mpfr_ckconv (m1, type, inexact);
12773 mpfr_clears (m1, m2, NULL);
12777 return result;
12780 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12781 FUNC on it and return the resulting value as a tree with type TYPE.
12782 The mpfr precision is set to the precision of TYPE. We assume that
12783 function FUNC returns zero if the result could be calculated
12784 exactly within the requested precision. */
12786 static tree
12787 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12788 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12790 tree result = NULL_TREE;
12792 STRIP_NOPS (arg1);
12793 STRIP_NOPS (arg2);
12794 STRIP_NOPS (arg3);
12796 /* To proceed, MPFR must exactly represent the target floating point
12797 format, which only happens when the target base equals two. */
12798 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12799 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12800 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12801 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12803 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12804 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12805 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12807 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12809 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12810 int inexact;
12811 mpfr_t m1, m2, m3;
12813 mpfr_inits2 (prec, m1, m2, m3, NULL);
12814 mpfr_from_real (m1, ra1, GMP_RNDN);
12815 mpfr_from_real (m2, ra2, GMP_RNDN);
12816 mpfr_from_real (m3, ra3, GMP_RNDN);
12817 mpfr_clear_flags ();
12818 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12819 result = do_mpfr_ckconv (m1, type, inexact);
12820 mpfr_clears (m1, m2, m3, NULL);
12824 return result;
12827 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12828 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12829 If ARG_SINP and ARG_COSP are NULL then the result is returned
12830 as a complex value.
12831 The type is taken from the type of ARG and is used for setting the
12832 precision of the calculation and results. */
12834 static tree
12835 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12837 tree const type = TREE_TYPE (arg);
12838 tree result = NULL_TREE;
12840 STRIP_NOPS (arg);
12842 /* To proceed, MPFR must exactly represent the target floating point
12843 format, which only happens when the target base equals two. */
12844 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12845 && TREE_CODE (arg) == REAL_CST
12846 && !TREE_OVERFLOW (arg))
12848 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12850 if (real_isfinite (ra))
12852 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12853 tree result_s, result_c;
12854 int inexact;
12855 mpfr_t m, ms, mc;
12857 mpfr_inits2 (prec, m, ms, mc, NULL);
12858 mpfr_from_real (m, ra, GMP_RNDN);
12859 mpfr_clear_flags ();
12860 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12861 result_s = do_mpfr_ckconv (ms, type, inexact);
12862 result_c = do_mpfr_ckconv (mc, type, inexact);
12863 mpfr_clears (m, ms, mc, NULL);
12864 if (result_s && result_c)
12866 /* If we are to return in a complex value do so. */
12867 if (!arg_sinp && !arg_cosp)
12868 return build_complex (build_complex_type (type),
12869 result_c, result_s);
12871 /* Dereference the sin/cos pointer arguments. */
12872 arg_sinp = build_fold_indirect_ref (arg_sinp);
12873 arg_cosp = build_fold_indirect_ref (arg_cosp);
12874 /* Proceed if valid pointer type were passed in. */
12875 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12876 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12878 /* Set the values. */
12879 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12880 result_s);
12881 TREE_SIDE_EFFECTS (result_s) = 1;
12882 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12883 result_c);
12884 TREE_SIDE_EFFECTS (result_c) = 1;
12885 /* Combine the assignments into a compound expr. */
12886 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12887 result_s, result_c));
12892 return result;
12895 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12896 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12897 two-argument mpfr order N Bessel function FUNC on them and return
12898 the resulting value as a tree with type TYPE. The mpfr precision
12899 is set to the precision of TYPE. We assume that function FUNC
12900 returns zero if the result could be calculated exactly within the
12901 requested precision. */
12902 static tree
12903 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12904 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12905 const REAL_VALUE_TYPE *min, bool inclusive)
12907 tree result = NULL_TREE;
12909 STRIP_NOPS (arg1);
12910 STRIP_NOPS (arg2);
12912 /* To proceed, MPFR must exactly represent the target floating point
12913 format, which only happens when the target base equals two. */
12914 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12915 && host_integerp (arg1, 0)
12916 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12918 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12919 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12921 if (n == (long)n
12922 && real_isfinite (ra)
12923 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12925 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12926 int inexact;
12927 mpfr_t m;
12929 mpfr_init2 (m, prec);
12930 mpfr_from_real (m, ra, GMP_RNDN);
12931 mpfr_clear_flags ();
12932 inexact = func (m, n, m, GMP_RNDN);
12933 result = do_mpfr_ckconv (m, type, inexact);
12934 mpfr_clear (m);
12938 return result;
12941 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12942 the pointer *(ARG_QUO) and return the result. The type is taken
12943 from the type of ARG0 and is used for setting the precision of the
12944 calculation and results. */
12946 static tree
12947 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12949 tree const type = TREE_TYPE (arg0);
12950 tree result = NULL_TREE;
12952 STRIP_NOPS (arg0);
12953 STRIP_NOPS (arg1);
12955 /* To proceed, MPFR must exactly represent the target floating point
12956 format, which only happens when the target base equals two. */
12957 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12958 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12959 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12961 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12962 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12964 if (real_isfinite (ra0) && real_isfinite (ra1))
12966 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12967 tree result_rem;
12968 long integer_quo;
12969 mpfr_t m0, m1;
12971 mpfr_inits2 (prec, m0, m1, NULL);
12972 mpfr_from_real (m0, ra0, GMP_RNDN);
12973 mpfr_from_real (m1, ra1, GMP_RNDN);
12974 mpfr_clear_flags ();
12975 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12976 /* Remquo is independent of the rounding mode, so pass
12977 inexact=0 to do_mpfr_ckconv(). */
12978 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12979 mpfr_clears (m0, m1, NULL);
12980 if (result_rem)
12982 /* MPFR calculates quo in the host's long so it may
12983 return more bits in quo than the target int can hold
12984 if sizeof(host long) > sizeof(target int). This can
12985 happen even for native compilers in LP64 mode. In
12986 these cases, modulo the quo value with the largest
12987 number that the target int can hold while leaving one
12988 bit for the sign. */
12989 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12990 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12992 /* Dereference the quo pointer argument. */
12993 arg_quo = build_fold_indirect_ref (arg_quo);
12994 /* Proceed iff a valid pointer type was passed in. */
12995 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12997 /* Set the value. */
12998 tree result_quo = fold_build2 (MODIFY_EXPR,
12999 TREE_TYPE (arg_quo), arg_quo,
13000 build_int_cst (NULL, integer_quo));
13001 TREE_SIDE_EFFECTS (result_quo) = 1;
13002 /* Combine the quo assignment with the rem. */
13003 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13004 result_quo, result_rem));
13009 return result;
13012 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13013 resulting value as a tree with type TYPE. The mpfr precision is
13014 set to the precision of TYPE. We assume that this mpfr function
13015 returns zero if the result could be calculated exactly within the
13016 requested precision. In addition, the integer pointer represented
13017 by ARG_SG will be dereferenced and set to the appropriate signgam
13018 (-1,1) value. */
13020 static tree
13021 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13023 tree result = NULL_TREE;
13025 STRIP_NOPS (arg);
13027 /* To proceed, MPFR must exactly represent the target floating point
13028 format, which only happens when the target base equals two. Also
13029 verify ARG is a constant and that ARG_SG is an int pointer. */
13030 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13031 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13032 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13033 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13035 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13037 /* In addition to NaN and Inf, the argument cannot be zero or a
13038 negative integer. */
13039 if (real_isfinite (ra)
13040 && ra->cl != rvc_zero
13041 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13043 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13044 int inexact, sg;
13045 mpfr_t m;
13046 tree result_lg;
13048 mpfr_init2 (m, prec);
13049 mpfr_from_real (m, ra, GMP_RNDN);
13050 mpfr_clear_flags ();
13051 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13052 result_lg = do_mpfr_ckconv (m, type, inexact);
13053 mpfr_clear (m);
13054 if (result_lg)
13056 tree result_sg;
13058 /* Dereference the arg_sg pointer argument. */
13059 arg_sg = build_fold_indirect_ref (arg_sg);
13060 /* Assign the signgam value into *arg_sg. */
13061 result_sg = fold_build2 (MODIFY_EXPR,
13062 TREE_TYPE (arg_sg), arg_sg,
13063 build_int_cst (NULL, sg));
13064 TREE_SIDE_EFFECTS (result_sg) = 1;
13065 /* Combine the signgam assignment with the lgamma result. */
13066 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13067 result_sg, result_lg));
13072 return result;
13074 #endif