Remove copy_renames.
[official-gcc/graphite-test-results.git] / gcc / builtins.c
blobe57449ddcaecf139b4c3491a913159854409ee9e
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
241 /* Return true if DECL is a function symbol representing a built-in. */
243 bool
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
254 static bool
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
271 unsigned int inner;
273 inner = max_align;
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
287 tree next_offset;
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
318 else
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
346 bool
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
367 return 0;
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
375 while (1)
377 switch (TREE_CODE (exp))
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
400 exp = TREE_OPERAND (exp, 0);
401 break;
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
407 default:
408 return align;
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
429 tree
430 c_strlen (tree src, int only_value)
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
442 tree len1, len2;
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
506 return NULL_TREE;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
521 static const char *
522 c_getstr (tree src)
524 tree offset_node;
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
576 static int
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
593 if (val != hostval)
594 return 1;
596 *p = hostval;
597 return 0;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
604 static tree
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 int i;
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
647 #endif
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
760 rtx chain;
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
805 #endif
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
903 if (JUMP_P (insn))
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
908 else if (CALL_P (insn))
909 break;
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 if (JUMP_P (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
991 else if (CALL_P (insn))
992 break;
995 return const0_rtx;
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1036 static void
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1106 #endif
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1159 set_mem_attributes (mem, exp, 0);
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1227 offset = -1;
1228 length = -1;
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1247 return mem;
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1269 static int
1270 apply_args_size (void)
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1301 else
1303 apply_args_mode[regno] = VOIDmode;
1306 return size;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1312 static int
1313 apply_result_size (void)
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1322 size = 0;
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (targetm.calls.function_value_regno_p (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1346 return size;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1354 static rtx
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1448 static rtx
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1606 rtx valreg = 0;
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1626 else
1627 #endif
1628 gcc_unreachable ();
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1643 OK_DEFER_POP;
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1652 static void
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1672 #endif
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1764 switch (fn)
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1850 default:
1851 return NULL_TREE;
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1888 #ifdef TARGET_EDOM
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1894 #else
1895 rtx errno_rtx
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 #endif
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899 emit_label (lab);
1900 return;
1902 #endif
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1909 NO_DEFER_POP;
1910 expand_call (exp, target, 0);
1911 OK_DEFER_POP;
1912 emit_label (lab);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1921 static rtx
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1925 rtx op0, insns;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1929 tree arg;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 return NULL_RTX;
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1941 break;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1980 break;
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1986 default:
1987 gcc_unreachable ();
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 errno_set = false;
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2008 start_sequence ();
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2014 if (target != 0)
2016 if (errno_set)
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2021 end_sequence ();
2022 emit_insn (insns);
2023 return target;
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 end_sequence ();
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2040 operands. */
2042 static rtx
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2049 tree arg0, arg1;
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2059 default:
2060 break;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 return NULL_RTX;
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 return 0;
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2091 default:
2092 gcc_unreachable ();
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100 return NULL_RTX;
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2114 start_sequence ();
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2124 if (target == 0)
2126 end_sequence ();
2127 return expand_call (exp, target, target == const0_rtx);
2130 if (errno_set)
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2138 return target;
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2148 static rtx
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2152 rtx op0, insns;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2155 tree arg;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 return NULL_RTX;
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2167 default:
2168 gcc_unreachable ();
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2183 default:
2184 gcc_unreachable ();
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2199 start_sequence ();
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2205 int result;
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214 break;
2215 default:
2216 gcc_unreachable ();
2218 gcc_assert (result);
2220 else
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2225 if (target != 0)
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2229 end_sequence ();
2230 emit_insn (insns);
2231 return target;
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 end_sequence ();
2240 target = expand_call (exp, target, target == const0_rtx);
2242 return target;
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2272 break;
2273 default:
2274 gcc_unreachable ();
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2284 if (builtin_optab)
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2291 isnan, etc).
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2297 static rtx
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2301 rtx op0;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2304 tree arg;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2318 if (!target
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339 return target;
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2344 return NULL_RTX;
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2350 function. */
2352 static rtx
2353 expand_builtin_sincos (tree exp)
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2358 int result;
2359 location_t loc = EXPR_LOCATION (exp);
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 return NULL_RTX;
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2374 return NULL_RTX;
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2388 /* Move target1 and target2 to the memory locations indicated
2389 by op1 and op2. */
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2393 return const0_rtx;
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2401 static rtx
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2404 tree fndecl = get_callee_fndecl (exp);
2405 tree arg, type;
2406 enum machine_mode mode;
2407 rtx op0, op1, op2;
2408 location_t loc = EXPR_LOCATION (exp);
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2411 return NULL_RTX;
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2430 else if (TARGET_HAS_SINCOS)
2432 tree call, fn = NULL_TREE;
2433 tree top1, top2;
2434 rtx op1a, op2a;
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 else
2443 gcc_unreachable ();
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2457 else
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2468 else
2469 gcc_unreachable ();
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2476 tree fntype;
2477 const char *name = NULL;
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 name = "cexpf";
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 name = "cexp";
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 name = "cexpl";
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2511 static tree
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2514 va_list ap;
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2518 va_start (ap, n);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520 va_end (ap);
2521 SET_EXPR_LOCATION (fn, loc);
2522 return fn;
2524 #define build_call_nofold(...) \
2525 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2527 /* Expand a call to one of the builtin rounding functions gcc defines
2528 as an extension (lfloor and lceil). As these are gcc extensions we
2529 do not need to worry about setting errno to EDOM.
2530 If expanding via optab fails, lower expression to (int)(floor(x)).
2531 EXP is the expression that is a call to the builtin function;
2532 if convenient, the result should be placed in TARGET. */
2534 static rtx
2535 expand_builtin_int_roundingfn (tree exp, rtx target)
2537 convert_optab builtin_optab;
2538 rtx op0, insns, tmp;
2539 tree fndecl = get_callee_fndecl (exp);
2540 enum built_in_function fallback_fn;
2541 tree fallback_fndecl;
2542 enum machine_mode mode;
2543 tree arg;
2545 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2546 gcc_unreachable ();
2548 arg = CALL_EXPR_ARG (exp, 0);
2550 switch (DECL_FUNCTION_CODE (fndecl))
2552 CASE_FLT_FN (BUILT_IN_LCEIL):
2553 CASE_FLT_FN (BUILT_IN_LLCEIL):
2554 builtin_optab = lceil_optab;
2555 fallback_fn = BUILT_IN_CEIL;
2556 break;
2558 CASE_FLT_FN (BUILT_IN_LFLOOR):
2559 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2560 builtin_optab = lfloor_optab;
2561 fallback_fn = BUILT_IN_FLOOR;
2562 break;
2564 default:
2565 gcc_unreachable ();
2568 /* Make a suitable register to place result in. */
2569 mode = TYPE_MODE (TREE_TYPE (exp));
2571 target = gen_reg_rtx (mode);
2573 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2574 need to expand the argument again. This way, we will not perform
2575 side-effects more the once. */
2576 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2578 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2580 start_sequence ();
2582 /* Compute into TARGET. */
2583 if (expand_sfix_optab (target, op0, builtin_optab))
2585 /* Output the entire sequence. */
2586 insns = get_insns ();
2587 end_sequence ();
2588 emit_insn (insns);
2589 return target;
2592 /* If we were unable to expand via the builtin, stop the sequence
2593 (without outputting the insns). */
2594 end_sequence ();
2596 /* Fall back to floating point rounding optab. */
2597 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2599 /* For non-C99 targets we may end up without a fallback fndecl here
2600 if the user called __builtin_lfloor directly. In this case emit
2601 a call to the floor/ceil variants nevertheless. This should result
2602 in the best user experience for not full C99 targets. */
2603 if (fallback_fndecl == NULL_TREE)
2605 tree fntype;
2606 const char *name = NULL;
2608 switch (DECL_FUNCTION_CODE (fndecl))
2610 case BUILT_IN_LCEIL:
2611 case BUILT_IN_LLCEIL:
2612 name = "ceil";
2613 break;
2614 case BUILT_IN_LCEILF:
2615 case BUILT_IN_LLCEILF:
2616 name = "ceilf";
2617 break;
2618 case BUILT_IN_LCEILL:
2619 case BUILT_IN_LLCEILL:
2620 name = "ceill";
2621 break;
2622 case BUILT_IN_LFLOOR:
2623 case BUILT_IN_LLFLOOR:
2624 name = "floor";
2625 break;
2626 case BUILT_IN_LFLOORF:
2627 case BUILT_IN_LLFLOORF:
2628 name = "floorf";
2629 break;
2630 case BUILT_IN_LFLOORL:
2631 case BUILT_IN_LLFLOORL:
2632 name = "floorl";
2633 break;
2634 default:
2635 gcc_unreachable ();
2638 fntype = build_function_type_list (TREE_TYPE (arg),
2639 TREE_TYPE (arg), NULL_TREE);
2640 fallback_fndecl = build_fn_decl (name, fntype);
2643 exp = build_call_nofold (fallback_fndecl, 1, arg);
2645 tmp = expand_normal (exp);
2647 /* Truncate the result of floating point optab to integer
2648 via expand_fix (). */
2649 target = gen_reg_rtx (mode);
2650 expand_fix (target, tmp, 0);
2652 return target;
2655 /* Expand a call to one of the builtin math functions doing integer
2656 conversion (lrint).
2657 Return 0 if a normal call should be emitted rather than expanding the
2658 function in-line. EXP is the expression that is a call to the builtin
2659 function; if convenient, the result should be placed in TARGET. */
2661 static rtx
2662 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2664 convert_optab builtin_optab;
2665 rtx op0, insns;
2666 tree fndecl = get_callee_fndecl (exp);
2667 tree arg;
2668 enum machine_mode mode;
2670 /* There's no easy way to detect the case we need to set EDOM. */
2671 if (flag_errno_math)
2672 return NULL_RTX;
2674 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2675 gcc_unreachable ();
2677 arg = CALL_EXPR_ARG (exp, 0);
2679 switch (DECL_FUNCTION_CODE (fndecl))
2681 CASE_FLT_FN (BUILT_IN_LRINT):
2682 CASE_FLT_FN (BUILT_IN_LLRINT):
2683 builtin_optab = lrint_optab; break;
2684 CASE_FLT_FN (BUILT_IN_LROUND):
2685 CASE_FLT_FN (BUILT_IN_LLROUND):
2686 builtin_optab = lround_optab; break;
2687 default:
2688 gcc_unreachable ();
2691 /* Make a suitable register to place result in. */
2692 mode = TYPE_MODE (TREE_TYPE (exp));
2694 target = gen_reg_rtx (mode);
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2701 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2703 start_sequence ();
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2709 end_sequence ();
2710 emit_insn (insns);
2711 return target;
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns) and call to the library function
2716 with the stabilized argument list. */
2717 end_sequence ();
2719 target = expand_call (exp, target, target == const0_rtx);
2721 return target;
2724 /* To evaluate powi(x,n), the floating point value x raised to the
2725 constant integer exponent n, we use a hybrid algorithm that
2726 combines the "window method" with look-up tables. For an
2727 introduction to exponentiation algorithms and "addition chains",
2728 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2729 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2730 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2731 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2733 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2734 multiplications to inline before calling the system library's pow
2735 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2736 so this default never requires calling pow, powf or powl. */
2738 #ifndef POWI_MAX_MULTS
2739 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2740 #endif
2742 /* The size of the "optimal power tree" lookup table. All
2743 exponents less than this value are simply looked up in the
2744 powi_table below. This threshold is also used to size the
2745 cache of pseudo registers that hold intermediate results. */
2746 #define POWI_TABLE_SIZE 256
2748 /* The size, in bits of the window, used in the "window method"
2749 exponentiation algorithm. This is equivalent to a radix of
2750 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2751 #define POWI_WINDOW_SIZE 3
2753 /* The following table is an efficient representation of an
2754 "optimal power tree". For each value, i, the corresponding
2755 value, j, in the table states than an optimal evaluation
2756 sequence for calculating pow(x,i) can be found by evaluating
2757 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2758 100 integers is given in Knuth's "Seminumerical algorithms". */
2760 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2762 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2763 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2764 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2765 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2766 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2767 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2768 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2769 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2770 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2771 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2772 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2773 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2774 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2775 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2776 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2777 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2778 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2779 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2780 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2781 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2782 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2783 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2784 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2785 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2786 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2787 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2788 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2789 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2790 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2791 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2792 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2793 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2797 /* Return the number of multiplications required to calculate
2798 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2799 subroutine of powi_cost. CACHE is an array indicating
2800 which exponents have already been calculated. */
2802 static int
2803 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2805 /* If we've already calculated this exponent, then this evaluation
2806 doesn't require any additional multiplications. */
2807 if (cache[n])
2808 return 0;
2810 cache[n] = true;
2811 return powi_lookup_cost (n - powi_table[n], cache)
2812 + powi_lookup_cost (powi_table[n], cache) + 1;
2815 /* Return the number of multiplications required to calculate
2816 powi(x,n) for an arbitrary x, given the exponent N. This
2817 function needs to be kept in sync with expand_powi below. */
2819 static int
2820 powi_cost (HOST_WIDE_INT n)
2822 bool cache[POWI_TABLE_SIZE];
2823 unsigned HOST_WIDE_INT digit;
2824 unsigned HOST_WIDE_INT val;
2825 int result;
2827 if (n == 0)
2828 return 0;
2830 /* Ignore the reciprocal when calculating the cost. */
2831 val = (n < 0) ? -n : n;
2833 /* Initialize the exponent cache. */
2834 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2835 cache[1] = true;
2837 result = 0;
2839 while (val >= POWI_TABLE_SIZE)
2841 if (val & 1)
2843 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2844 result += powi_lookup_cost (digit, cache)
2845 + POWI_WINDOW_SIZE + 1;
2846 val >>= POWI_WINDOW_SIZE;
2848 else
2850 val >>= 1;
2851 result++;
2855 return result + powi_lookup_cost (val, cache);
2858 /* Recursive subroutine of expand_powi. This function takes the array,
2859 CACHE, of already calculated exponents and an exponent N and returns
2860 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2862 static rtx
2863 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2865 unsigned HOST_WIDE_INT digit;
2866 rtx target, result;
2867 rtx op0, op1;
2869 if (n < POWI_TABLE_SIZE)
2871 if (cache[n])
2872 return cache[n];
2874 target = gen_reg_rtx (mode);
2875 cache[n] = target;
2877 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2878 op1 = expand_powi_1 (mode, powi_table[n], cache);
2880 else if (n & 1)
2882 target = gen_reg_rtx (mode);
2883 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2884 op0 = expand_powi_1 (mode, n - digit, cache);
2885 op1 = expand_powi_1 (mode, digit, cache);
2887 else
2889 target = gen_reg_rtx (mode);
2890 op0 = expand_powi_1 (mode, n >> 1, cache);
2891 op1 = op0;
2894 result = expand_mult (mode, op0, op1, target, 0);
2895 if (result != target)
2896 emit_move_insn (target, result);
2897 return target;
2900 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2901 floating point operand in mode MODE, and N is the exponent. This
2902 function needs to be kept in sync with powi_cost above. */
2904 static rtx
2905 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2907 rtx cache[POWI_TABLE_SIZE];
2908 rtx result;
2910 if (n == 0)
2911 return CONST1_RTX (mode);
2913 memset (cache, 0, sizeof (cache));
2914 cache[1] = x;
2916 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2918 /* If the original exponent was negative, reciprocate the result. */
2919 if (n < 0)
2920 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2921 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2923 return result;
2926 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2927 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2928 if we can simplify it. */
2929 static rtx
2930 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2931 rtx subtarget)
2933 if (TREE_CODE (arg1) == REAL_CST
2934 && !TREE_OVERFLOW (arg1)
2935 && flag_unsafe_math_optimizations)
2937 enum machine_mode mode = TYPE_MODE (type);
2938 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2939 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2940 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2941 tree op = NULL_TREE;
2943 if (sqrtfn)
2945 /* Optimize pow (x, 0.5) into sqrt. */
2946 if (REAL_VALUES_EQUAL (c, dconsthalf))
2947 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2949 else
2951 REAL_VALUE_TYPE dconst1_4 = dconst1;
2952 REAL_VALUE_TYPE dconst3_4;
2953 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2955 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2956 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2958 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2959 machines that a builtin sqrt instruction is smaller than a
2960 call to pow with 0.25, so do this optimization even if
2961 -Os. */
2962 if (REAL_VALUES_EQUAL (c, dconst1_4))
2964 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2965 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2968 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2969 are optimizing for space. */
2970 else if (optimize_insn_for_speed_p ()
2971 && !TREE_SIDE_EFFECTS (arg0)
2972 && REAL_VALUES_EQUAL (c, dconst3_4))
2974 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2975 tree sqrt2 = builtin_save_expr (sqrt1);
2976 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2977 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2982 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2983 cbrt/sqrts instead of pow (x, 1./6.). */
2984 if (cbrtfn && ! op
2985 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2987 /* First try 1/3. */
2988 REAL_VALUE_TYPE dconst1_3
2989 = real_value_truncate (mode, dconst_third ());
2991 if (REAL_VALUES_EQUAL (c, dconst1_3))
2992 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2994 /* Now try 1/6. */
2995 else if (optimize_insn_for_speed_p ())
2997 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2998 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3000 if (REAL_VALUES_EQUAL (c, dconst1_6))
3002 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3003 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3008 if (op)
3009 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3012 return NULL_RTX;
3015 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3016 a normal call should be emitted rather than expanding the function
3017 in-line. EXP is the expression that is a call to the builtin
3018 function; if convenient, the result should be placed in TARGET. */
3020 static rtx
3021 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3023 tree arg0, arg1;
3024 tree fn, narg0;
3025 tree type = TREE_TYPE (exp);
3026 REAL_VALUE_TYPE cint, c, c2;
3027 HOST_WIDE_INT n;
3028 rtx op, op2;
3029 enum machine_mode mode = TYPE_MODE (type);
3031 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3032 return NULL_RTX;
3034 arg0 = CALL_EXPR_ARG (exp, 0);
3035 arg1 = CALL_EXPR_ARG (exp, 1);
3037 if (TREE_CODE (arg1) != REAL_CST
3038 || TREE_OVERFLOW (arg1))
3039 return expand_builtin_mathfn_2 (exp, target, subtarget);
3041 /* Handle constant exponents. */
3043 /* For integer valued exponents we can expand to an optimal multiplication
3044 sequence using expand_powi. */
3045 c = TREE_REAL_CST (arg1);
3046 n = real_to_integer (&c);
3047 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3048 if (real_identical (&c, &cint)
3049 && ((n >= -1 && n <= 2)
3050 || (flag_unsafe_math_optimizations
3051 && optimize_insn_for_speed_p ()
3052 && powi_cost (n) <= POWI_MAX_MULTS)))
3054 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3055 if (n != 1)
3057 op = force_reg (mode, op);
3058 op = expand_powi (op, mode, n);
3060 return op;
3063 narg0 = builtin_save_expr (arg0);
3065 /* If the exponent is not integer valued, check if it is half of an integer.
3066 In this case we can expand to sqrt (x) * x**(n/2). */
3067 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3068 if (fn != NULL_TREE)
3070 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3071 n = real_to_integer (&c2);
3072 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3073 if (real_identical (&c2, &cint)
3074 && ((flag_unsafe_math_optimizations
3075 && optimize_insn_for_speed_p ()
3076 && powi_cost (n/2) <= POWI_MAX_MULTS)
3077 /* Even the c == 0.5 case cannot be done unconditionally
3078 when we need to preserve signed zeros, as
3079 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3080 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3081 /* For c == 1.5 we can assume that x * sqrt (x) is always
3082 smaller than pow (x, 1.5) if sqrt will not be expanded
3083 as a call. */
3084 || (n == 3
3085 && (optab_handler (sqrt_optab, mode)->insn_code
3086 != CODE_FOR_nothing))))
3088 tree call_expr = build_call_nofold (fn, 1, narg0);
3089 /* Use expand_expr in case the newly built call expression
3090 was folded to a non-call. */
3091 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3092 if (n != 1)
3094 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3095 op2 = force_reg (mode, op2);
3096 op2 = expand_powi (op2, mode, abs (n / 2));
3097 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3098 0, OPTAB_LIB_WIDEN);
3099 /* If the original exponent was negative, reciprocate the
3100 result. */
3101 if (n < 0)
3102 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3103 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3105 return op;
3109 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3110 call. */
3111 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3112 subtarget);
3113 if (op)
3114 return op;
3116 /* Try if the exponent is a third of an integer. In this case
3117 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3118 different from pow (x, 1./3.) due to rounding and behavior
3119 with negative x we need to constrain this transformation to
3120 unsafe math and positive x or finite math. */
3121 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3122 if (fn != NULL_TREE
3123 && flag_unsafe_math_optimizations
3124 && (tree_expr_nonnegative_p (arg0)
3125 || !HONOR_NANS (mode)))
3127 REAL_VALUE_TYPE dconst3;
3128 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3129 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3130 real_round (&c2, mode, &c2);
3131 n = real_to_integer (&c2);
3132 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3133 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3134 real_convert (&c2, mode, &c2);
3135 if (real_identical (&c2, &c)
3136 && ((optimize_insn_for_speed_p ()
3137 && powi_cost (n/3) <= POWI_MAX_MULTS)
3138 || n == 1))
3140 tree call_expr = build_call_nofold (fn, 1,narg0);
3141 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3142 if (abs (n) % 3 == 2)
3143 op = expand_simple_binop (mode, MULT, op, op, op,
3144 0, OPTAB_LIB_WIDEN);
3145 if (n != 1)
3147 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3148 op2 = force_reg (mode, op2);
3149 op2 = expand_powi (op2, mode, abs (n / 3));
3150 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3151 0, OPTAB_LIB_WIDEN);
3152 /* If the original exponent was negative, reciprocate the
3153 result. */
3154 if (n < 0)
3155 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3156 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3158 return op;
3162 /* Fall back to optab expansion. */
3163 return expand_builtin_mathfn_2 (exp, target, subtarget);
3166 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3167 a normal call should be emitted rather than expanding the function
3168 in-line. EXP is the expression that is a call to the builtin
3169 function; if convenient, the result should be placed in TARGET. */
3171 static rtx
3172 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3174 tree arg0, arg1;
3175 rtx op0, op1;
3176 enum machine_mode mode;
3177 enum machine_mode mode2;
3179 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3180 return NULL_RTX;
3182 arg0 = CALL_EXPR_ARG (exp, 0);
3183 arg1 = CALL_EXPR_ARG (exp, 1);
3184 mode = TYPE_MODE (TREE_TYPE (exp));
3186 /* Handle constant power. */
3188 if (TREE_CODE (arg1) == INTEGER_CST
3189 && !TREE_OVERFLOW (arg1))
3191 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3193 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3194 Otherwise, check the number of multiplications required. */
3195 if ((TREE_INT_CST_HIGH (arg1) == 0
3196 || TREE_INT_CST_HIGH (arg1) == -1)
3197 && ((n >= -1 && n <= 2)
3198 || (optimize_insn_for_speed_p ()
3199 && powi_cost (n) <= POWI_MAX_MULTS)))
3201 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3202 op0 = force_reg (mode, op0);
3203 return expand_powi (op0, mode, n);
3207 /* Emit a libcall to libgcc. */
3209 /* Mode of the 2nd argument must match that of an int. */
3210 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3212 if (target == NULL_RTX)
3213 target = gen_reg_rtx (mode);
3215 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3216 if (GET_MODE (op0) != mode)
3217 op0 = convert_to_mode (mode, op0, 0);
3218 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3219 if (GET_MODE (op1) != mode2)
3220 op1 = convert_to_mode (mode2, op1, 0);
3222 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3223 target, LCT_CONST, mode, 2,
3224 op0, mode, op1, mode2);
3226 return target;
3229 /* Expand expression EXP which is a call to the strlen builtin. Return
3230 NULL_RTX if we failed the caller should emit a normal call, otherwise
3231 try to get the result in TARGET, if convenient. */
3233 static rtx
3234 expand_builtin_strlen (tree exp, rtx target,
3235 enum machine_mode target_mode)
3237 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3238 return NULL_RTX;
3239 else
3241 rtx pat;
3242 tree len;
3243 tree src = CALL_EXPR_ARG (exp, 0);
3244 rtx result, src_reg, char_rtx, before_strlen;
3245 enum machine_mode insn_mode = target_mode, char_mode;
3246 enum insn_code icode = CODE_FOR_nothing;
3247 int align;
3249 /* If the length can be computed at compile-time, return it. */
3250 len = c_strlen (src, 0);
3251 if (len)
3252 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3254 /* If the length can be computed at compile-time and is constant
3255 integer, but there are side-effects in src, evaluate
3256 src for side-effects, then return len.
3257 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3258 can be optimized into: i++; x = 3; */
3259 len = c_strlen (src, 1);
3260 if (len && TREE_CODE (len) == INTEGER_CST)
3262 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3263 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3266 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3268 /* If SRC is not a pointer type, don't do this operation inline. */
3269 if (align == 0)
3270 return NULL_RTX;
3272 /* Bail out if we can't compute strlen in the right mode. */
3273 while (insn_mode != VOIDmode)
3275 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3276 if (icode != CODE_FOR_nothing)
3277 break;
3279 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3281 if (insn_mode == VOIDmode)
3282 return NULL_RTX;
3284 /* Make a place to write the result of the instruction. */
3285 result = target;
3286 if (! (result != 0
3287 && REG_P (result)
3288 && GET_MODE (result) == insn_mode
3289 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3290 result = gen_reg_rtx (insn_mode);
3292 /* Make a place to hold the source address. We will not expand
3293 the actual source until we are sure that the expansion will
3294 not fail -- there are trees that cannot be expanded twice. */
3295 src_reg = gen_reg_rtx (Pmode);
3297 /* Mark the beginning of the strlen sequence so we can emit the
3298 source operand later. */
3299 before_strlen = get_last_insn ();
3301 char_rtx = const0_rtx;
3302 char_mode = insn_data[(int) icode].operand[2].mode;
3303 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3304 char_mode))
3305 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3307 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3308 char_rtx, GEN_INT (align));
3309 if (! pat)
3310 return NULL_RTX;
3311 emit_insn (pat);
3313 /* Now that we are assured of success, expand the source. */
3314 start_sequence ();
3315 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3316 if (pat != src_reg)
3317 emit_move_insn (src_reg, pat);
3318 pat = get_insns ();
3319 end_sequence ();
3321 if (before_strlen)
3322 emit_insn_after (pat, before_strlen);
3323 else
3324 emit_insn_before (pat, get_insns ());
3326 /* Return the value in the proper mode for this function. */
3327 if (GET_MODE (result) == target_mode)
3328 target = result;
3329 else if (target != 0)
3330 convert_move (target, result, 0);
3331 else
3332 target = convert_to_mode (target_mode, result, 0);
3334 return target;
3338 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3339 bytes from constant string DATA + OFFSET and return it as target
3340 constant. */
3342 static rtx
3343 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3344 enum machine_mode mode)
3346 const char *str = (const char *) data;
3348 gcc_assert (offset >= 0
3349 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3350 <= strlen (str) + 1));
3352 return c_readstr (str + offset, mode);
3355 /* Expand a call EXP to the memcpy builtin.
3356 Return NULL_RTX if we failed, the caller should emit a normal call,
3357 otherwise try to get the result in TARGET, if convenient (and in
3358 mode MODE if that's convenient). */
3360 static rtx
3361 expand_builtin_memcpy (tree exp, rtx target)
3363 if (!validate_arglist (exp,
3364 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3365 return NULL_RTX;
3366 else
3368 tree dest = CALL_EXPR_ARG (exp, 0);
3369 tree src = CALL_EXPR_ARG (exp, 1);
3370 tree len = CALL_EXPR_ARG (exp, 2);
3371 const char *src_str;
3372 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3373 unsigned int dest_align
3374 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3375 rtx dest_mem, src_mem, dest_addr, len_rtx;
3376 HOST_WIDE_INT expected_size = -1;
3377 unsigned int expected_align = 0;
3379 /* If DEST is not a pointer type, call the normal function. */
3380 if (dest_align == 0)
3381 return NULL_RTX;
3383 /* If either SRC is not a pointer type, don't do this
3384 operation in-line. */
3385 if (src_align == 0)
3386 return NULL_RTX;
3388 if (currently_expanding_gimple_stmt)
3389 stringop_block_profile (currently_expanding_gimple_stmt,
3390 &expected_align, &expected_size);
3392 if (expected_align < dest_align)
3393 expected_align = dest_align;
3394 dest_mem = get_memory_rtx (dest, len);
3395 set_mem_align (dest_mem, dest_align);
3396 len_rtx = expand_normal (len);
3397 src_str = c_getstr (src);
3399 /* If SRC is a string constant and block move would be done
3400 by pieces, we can avoid loading the string from memory
3401 and only stored the computed constants. */
3402 if (src_str
3403 && CONST_INT_P (len_rtx)
3404 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3405 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3407 dest_align, false))
3409 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3410 builtin_memcpy_read_str,
3411 CONST_CAST (char *, src_str),
3412 dest_align, false, 0);
3413 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3414 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3415 return dest_mem;
3418 src_mem = get_memory_rtx (src, len);
3419 set_mem_align (src_mem, src_align);
3421 /* Copy word part most expediently. */
3422 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3423 CALL_EXPR_TAILCALL (exp)
3424 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3425 expected_align, expected_size);
3427 if (dest_addr == 0)
3429 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3430 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3432 return dest_addr;
3436 /* Expand a call EXP to the mempcpy builtin.
3437 Return NULL_RTX if we failed; the caller should emit a normal call,
3438 otherwise try to get the result in TARGET, if convenient (and in
3439 mode MODE if that's convenient). If ENDP is 0 return the
3440 destination pointer, if ENDP is 1 return the end pointer ala
3441 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3442 stpcpy. */
3444 static rtx
3445 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
3450 else
3452 tree dest = CALL_EXPR_ARG (exp, 0);
3453 tree src = CALL_EXPR_ARG (exp, 1);
3454 tree len = CALL_EXPR_ARG (exp, 2);
3455 return expand_builtin_mempcpy_args (dest, src, len,
3456 target, mode, /*endp=*/ 1);
3460 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3461 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3462 so that this can also be called without constructing an actual CALL_EXPR.
3463 The other arguments and return value are the same as for
3464 expand_builtin_mempcpy. */
3466 static rtx
3467 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3468 rtx target, enum machine_mode mode, int endp)
3470 /* If return value is ignored, transform mempcpy into memcpy. */
3471 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3473 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3474 tree result = build_call_nofold (fn, 3, dest, src, len);
3475 return expand_expr (result, target, mode, EXPAND_NORMAL);
3477 else
3479 const char *src_str;
3480 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3481 unsigned int dest_align
3482 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3483 rtx dest_mem, src_mem, len_rtx;
3485 /* If either SRC or DEST is not a pointer type, don't do this
3486 operation in-line. */
3487 if (dest_align == 0 || src_align == 0)
3488 return NULL_RTX;
3490 /* If LEN is not constant, call the normal function. */
3491 if (! host_integerp (len, 1))
3492 return NULL_RTX;
3494 len_rtx = expand_normal (len);
3495 src_str = c_getstr (src);
3497 /* If SRC is a string constant and block move would be done
3498 by pieces, we can avoid loading the string from memory
3499 and only stored the computed constants. */
3500 if (src_str
3501 && CONST_INT_P (len_rtx)
3502 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3503 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3504 CONST_CAST (char *, src_str),
3505 dest_align, false))
3507 dest_mem = get_memory_rtx (dest, len);
3508 set_mem_align (dest_mem, dest_align);
3509 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3510 builtin_memcpy_read_str,
3511 CONST_CAST (char *, src_str),
3512 dest_align, false, endp);
3513 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3514 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3515 return dest_mem;
3518 if (CONST_INT_P (len_rtx)
3519 && can_move_by_pieces (INTVAL (len_rtx),
3520 MIN (dest_align, src_align)))
3522 dest_mem = get_memory_rtx (dest, len);
3523 set_mem_align (dest_mem, dest_align);
3524 src_mem = get_memory_rtx (src, len);
3525 set_mem_align (src_mem, src_align);
3526 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3527 MIN (dest_align, src_align), endp);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3530 return dest_mem;
3533 return NULL_RTX;
3537 #ifndef HAVE_movstr
3538 # define HAVE_movstr 0
3539 # define CODE_FOR_movstr CODE_FOR_nothing
3540 #endif
3542 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3543 we failed, the caller should emit a normal call, otherwise try to
3544 get the result in TARGET, if convenient. If ENDP is 0 return the
3545 destination pointer, if ENDP is 1 return the end pointer ala
3546 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3547 stpcpy. */
3549 static rtx
3550 expand_movstr (tree dest, tree src, rtx target, int endp)
3552 rtx end;
3553 rtx dest_mem;
3554 rtx src_mem;
3555 rtx insn;
3556 const struct insn_data * data;
3558 if (!HAVE_movstr)
3559 return NULL_RTX;
3561 dest_mem = get_memory_rtx (dest, NULL);
3562 src_mem = get_memory_rtx (src, NULL);
3563 data = insn_data + CODE_FOR_movstr;
3564 if (!endp)
3566 target = force_reg (Pmode, XEXP (dest_mem, 0));
3567 dest_mem = replace_equiv_address (dest_mem, target);
3568 end = gen_reg_rtx (Pmode);
3570 else
3572 if (target == 0
3573 || target == const0_rtx
3574 || ! (*data->operand[0].predicate) (target, Pmode))
3576 end = gen_reg_rtx (Pmode);
3577 if (target != const0_rtx)
3578 target = end;
3580 else
3581 end = target;
3584 if (data->operand[0].mode != VOIDmode)
3585 end = gen_lowpart (data->operand[0].mode, end);
3587 insn = data->genfun (end, dest_mem, src_mem);
3589 gcc_assert (insn);
3591 emit_insn (insn);
3593 /* movstr is supposed to set end to the address of the NUL
3594 terminator. If the caller requested a mempcpy-like return value,
3595 adjust it. */
3596 if (endp == 1 && target != const0_rtx)
3598 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3599 emit_move_insn (target, force_operand (tem, NULL_RTX));
3602 return target;
3605 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3606 NULL_RTX if we failed the caller should emit a normal call, otherwise
3607 try to get the result in TARGET, if convenient (and in mode MODE if that's
3608 convenient). */
3610 static rtx
3611 expand_builtin_strcpy (tree exp, rtx target)
3613 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3615 tree dest = CALL_EXPR_ARG (exp, 0);
3616 tree src = CALL_EXPR_ARG (exp, 1);
3617 return expand_builtin_strcpy_args (dest, src, target);
3619 return NULL_RTX;
3622 /* Helper function to do the actual work for expand_builtin_strcpy. The
3623 arguments to the builtin_strcpy call DEST and SRC are broken out
3624 so that this can also be called without constructing an actual CALL_EXPR.
3625 The other arguments and return value are the same as for
3626 expand_builtin_strcpy. */
3628 static rtx
3629 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3631 return expand_movstr (dest, src, target, /*endp=*/0);
3634 /* Expand a call EXP to the stpcpy builtin.
3635 Return NULL_RTX if we failed the caller should emit a normal call,
3636 otherwise try to get the result in TARGET, if convenient (and in
3637 mode MODE if that's convenient). */
3639 static rtx
3640 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3642 tree dst, src;
3643 location_t loc = EXPR_LOCATION (exp);
3645 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3646 return NULL_RTX;
3648 dst = CALL_EXPR_ARG (exp, 0);
3649 src = CALL_EXPR_ARG (exp, 1);
3651 /* If return value is ignored, transform stpcpy into strcpy. */
3652 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3654 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3655 tree result = build_call_nofold (fn, 2, dst, src);
3656 return expand_expr (result, target, mode, EXPAND_NORMAL);
3658 else
3660 tree len, lenp1;
3661 rtx ret;
3663 /* Ensure we get an actual string whose length can be evaluated at
3664 compile-time, not an expression containing a string. This is
3665 because the latter will potentially produce pessimized code
3666 when used to produce the return value. */
3667 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3668 return expand_movstr (dst, src, target, /*endp=*/2);
3670 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3671 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3672 target, mode, /*endp=*/2);
3674 if (ret)
3675 return ret;
3677 if (TREE_CODE (len) == INTEGER_CST)
3679 rtx len_rtx = expand_normal (len);
3681 if (CONST_INT_P (len_rtx))
3683 ret = expand_builtin_strcpy_args (dst, src, target);
3685 if (ret)
3687 if (! target)
3689 if (mode != VOIDmode)
3690 target = gen_reg_rtx (mode);
3691 else
3692 target = gen_reg_rtx (GET_MODE (ret));
3694 if (GET_MODE (target) != GET_MODE (ret))
3695 ret = gen_lowpart (GET_MODE (target), ret);
3697 ret = plus_constant (ret, INTVAL (len_rtx));
3698 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3699 gcc_assert (ret);
3701 return target;
3706 return expand_movstr (dst, src, target, /*endp=*/2);
3710 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3711 bytes from constant string DATA + OFFSET and return it as target
3712 constant. */
3715 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3716 enum machine_mode mode)
3718 const char *str = (const char *) data;
3720 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3721 return const0_rtx;
3723 return c_readstr (str + offset, mode);
3726 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3727 NULL_RTX if we failed the caller should emit a normal call. */
3729 static rtx
3730 expand_builtin_strncpy (tree exp, rtx target)
3732 location_t loc = EXPR_LOCATION (exp);
3734 if (validate_arglist (exp,
3735 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3737 tree dest = CALL_EXPR_ARG (exp, 0);
3738 tree src = CALL_EXPR_ARG (exp, 1);
3739 tree len = CALL_EXPR_ARG (exp, 2);
3740 tree slen = c_strlen (src, 1);
3742 /* We must be passed a constant len and src parameter. */
3743 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3744 return NULL_RTX;
3746 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3748 /* We're required to pad with trailing zeros if the requested
3749 len is greater than strlen(s2)+1. In that case try to
3750 use store_by_pieces, if it fails, punt. */
3751 if (tree_int_cst_lt (slen, len))
3753 unsigned int dest_align
3754 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3755 const char *p = c_getstr (src);
3756 rtx dest_mem;
3758 if (!p || dest_align == 0 || !host_integerp (len, 1)
3759 || !can_store_by_pieces (tree_low_cst (len, 1),
3760 builtin_strncpy_read_str,
3761 CONST_CAST (char *, p),
3762 dest_align, false))
3763 return NULL_RTX;
3765 dest_mem = get_memory_rtx (dest, len);
3766 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3767 builtin_strncpy_read_str,
3768 CONST_CAST (char *, p), dest_align, false, 0);
3769 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3770 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3771 return dest_mem;
3774 return NULL_RTX;
3777 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3778 bytes from constant string DATA + OFFSET and return it as target
3779 constant. */
3782 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3783 enum machine_mode mode)
3785 const char *c = (const char *) data;
3786 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3788 memset (p, *c, GET_MODE_SIZE (mode));
3790 return c_readstr (p, mode);
3793 /* Callback routine for store_by_pieces. Return the RTL of a register
3794 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3795 char value given in the RTL register data. For example, if mode is
3796 4 bytes wide, return the RTL for 0x01010101*data. */
3798 static rtx
3799 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3800 enum machine_mode mode)
3802 rtx target, coeff;
3803 size_t size;
3804 char *p;
3806 size = GET_MODE_SIZE (mode);
3807 if (size == 1)
3808 return (rtx) data;
3810 p = XALLOCAVEC (char, size);
3811 memset (p, 1, size);
3812 coeff = c_readstr (p, mode);
3814 target = convert_to_mode (mode, (rtx) data, 1);
3815 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3816 return force_reg (mode, target);
3819 /* Expand expression EXP, which is a call to the memset builtin. Return
3820 NULL_RTX if we failed the caller should emit a normal call, otherwise
3821 try to get the result in TARGET, if convenient (and in mode MODE if that's
3822 convenient). */
3824 static rtx
3825 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3827 if (!validate_arglist (exp,
3828 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3829 return NULL_RTX;
3830 else
3832 tree dest = CALL_EXPR_ARG (exp, 0);
3833 tree val = CALL_EXPR_ARG (exp, 1);
3834 tree len = CALL_EXPR_ARG (exp, 2);
3835 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3839 /* Helper function to do the actual work for expand_builtin_memset. The
3840 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3841 so that this can also be called without constructing an actual CALL_EXPR.
3842 The other arguments and return value are the same as for
3843 expand_builtin_memset. */
3845 static rtx
3846 expand_builtin_memset_args (tree dest, tree val, tree len,
3847 rtx target, enum machine_mode mode, tree orig_exp)
3849 tree fndecl, fn;
3850 enum built_in_function fcode;
3851 char c;
3852 unsigned int dest_align;
3853 rtx dest_mem, dest_addr, len_rtx;
3854 HOST_WIDE_INT expected_size = -1;
3855 unsigned int expected_align = 0;
3857 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3859 /* If DEST is not a pointer type, don't do this operation in-line. */
3860 if (dest_align == 0)
3861 return NULL_RTX;
3863 if (currently_expanding_gimple_stmt)
3864 stringop_block_profile (currently_expanding_gimple_stmt,
3865 &expected_align, &expected_size);
3867 if (expected_align < dest_align)
3868 expected_align = dest_align;
3870 /* If the LEN parameter is zero, return DEST. */
3871 if (integer_zerop (len))
3873 /* Evaluate and ignore VAL in case it has side-effects. */
3874 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3875 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3878 /* Stabilize the arguments in case we fail. */
3879 dest = builtin_save_expr (dest);
3880 val = builtin_save_expr (val);
3881 len = builtin_save_expr (len);
3883 len_rtx = expand_normal (len);
3884 dest_mem = get_memory_rtx (dest, len);
3886 if (TREE_CODE (val) != INTEGER_CST)
3888 rtx val_rtx;
3890 val_rtx = expand_normal (val);
3891 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3892 val_rtx, 0);
3894 /* Assume that we can memset by pieces if we can store
3895 * the coefficients by pieces (in the required modes).
3896 * We can't pass builtin_memset_gen_str as that emits RTL. */
3897 c = 1;
3898 if (host_integerp (len, 1)
3899 && can_store_by_pieces (tree_low_cst (len, 1),
3900 builtin_memset_read_str, &c, dest_align,
3901 true))
3903 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3904 val_rtx);
3905 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3906 builtin_memset_gen_str, val_rtx, dest_align,
3907 true, 0);
3909 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3910 dest_align, expected_align,
3911 expected_size))
3912 goto do_libcall;
3914 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3915 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3916 return dest_mem;
3919 if (target_char_cast (val, &c))
3920 goto do_libcall;
3922 if (c)
3924 if (host_integerp (len, 1)
3925 && can_store_by_pieces (tree_low_cst (len, 1),
3926 builtin_memset_read_str, &c, dest_align,
3927 true))
3928 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3929 builtin_memset_read_str, &c, dest_align, true, 0);
3930 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3931 dest_align, expected_align,
3932 expected_size))
3933 goto do_libcall;
3935 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3936 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3937 return dest_mem;
3940 set_mem_align (dest_mem, dest_align);
3941 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3942 CALL_EXPR_TAILCALL (orig_exp)
3943 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3944 expected_align, expected_size);
3946 if (dest_addr == 0)
3948 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3949 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3952 return dest_addr;
3954 do_libcall:
3955 fndecl = get_callee_fndecl (orig_exp);
3956 fcode = DECL_FUNCTION_CODE (fndecl);
3957 if (fcode == BUILT_IN_MEMSET)
3958 fn = build_call_nofold (fndecl, 3, dest, val, len);
3959 else if (fcode == BUILT_IN_BZERO)
3960 fn = build_call_nofold (fndecl, 2, dest, len);
3961 else
3962 gcc_unreachable ();
3963 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3964 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3965 return expand_call (fn, target, target == const0_rtx);
3968 /* Expand expression EXP, which is a call to the bzero builtin. Return
3969 NULL_RTX if we failed the caller should emit a normal call. */
3971 static rtx
3972 expand_builtin_bzero (tree exp)
3974 tree dest, size;
3975 location_t loc = EXPR_LOCATION (exp);
3977 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3978 return NULL_RTX;
3980 dest = CALL_EXPR_ARG (exp, 0);
3981 size = CALL_EXPR_ARG (exp, 1);
3983 /* New argument list transforming bzero(ptr x, int y) to
3984 memset(ptr x, int 0, size_t y). This is done this way
3985 so that if it isn't expanded inline, we fallback to
3986 calling bzero instead of memset. */
3988 return expand_builtin_memset_args (dest, integer_zero_node,
3989 fold_convert_loc (loc, sizetype, size),
3990 const0_rtx, VOIDmode, exp);
3993 /* Expand expression EXP, which is a call to the memcmp built-in function.
3994 Return NULL_RTX if we failed and the
3995 caller should emit a normal call, otherwise try to get the result in
3996 TARGET, if convenient (and in mode MODE, if that's convenient). */
3998 static rtx
3999 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4000 ATTRIBUTE_UNUSED enum machine_mode mode)
4002 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4004 if (!validate_arglist (exp,
4005 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4006 return NULL_RTX;
4008 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4010 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4011 rtx result;
4012 rtx insn;
4013 tree arg1 = CALL_EXPR_ARG (exp, 0);
4014 tree arg2 = CALL_EXPR_ARG (exp, 1);
4015 tree len = CALL_EXPR_ARG (exp, 2);
4017 int arg1_align
4018 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4019 int arg2_align
4020 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4021 enum machine_mode insn_mode;
4023 #ifdef HAVE_cmpmemsi
4024 if (HAVE_cmpmemsi)
4025 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4026 else
4027 #endif
4028 #ifdef HAVE_cmpstrnsi
4029 if (HAVE_cmpstrnsi)
4030 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4031 else
4032 #endif
4033 return NULL_RTX;
4035 /* If we don't have POINTER_TYPE, call the function. */
4036 if (arg1_align == 0 || arg2_align == 0)
4037 return NULL_RTX;
4039 /* Make a place to write the result of the instruction. */
4040 result = target;
4041 if (! (result != 0
4042 && REG_P (result) && GET_MODE (result) == insn_mode
4043 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4044 result = gen_reg_rtx (insn_mode);
4046 arg1_rtx = get_memory_rtx (arg1, len);
4047 arg2_rtx = get_memory_rtx (arg2, len);
4048 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4050 /* Set MEM_SIZE as appropriate. */
4051 if (CONST_INT_P (arg3_rtx))
4053 set_mem_size (arg1_rtx, arg3_rtx);
4054 set_mem_size (arg2_rtx, arg3_rtx);
4057 #ifdef HAVE_cmpmemsi
4058 if (HAVE_cmpmemsi)
4059 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4060 GEN_INT (MIN (arg1_align, arg2_align)));
4061 else
4062 #endif
4063 #ifdef HAVE_cmpstrnsi
4064 if (HAVE_cmpstrnsi)
4065 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4066 GEN_INT (MIN (arg1_align, arg2_align)));
4067 else
4068 #endif
4069 gcc_unreachable ();
4071 if (insn)
4072 emit_insn (insn);
4073 else
4074 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4075 TYPE_MODE (integer_type_node), 3,
4076 XEXP (arg1_rtx, 0), Pmode,
4077 XEXP (arg2_rtx, 0), Pmode,
4078 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4079 TYPE_UNSIGNED (sizetype)),
4080 TYPE_MODE (sizetype));
4082 /* Return the value in the proper mode for this function. */
4083 mode = TYPE_MODE (TREE_TYPE (exp));
4084 if (GET_MODE (result) == mode)
4085 return result;
4086 else if (target != 0)
4088 convert_move (target, result, 0);
4089 return target;
4091 else
4092 return convert_to_mode (mode, result, 0);
4094 #endif
4096 return NULL_RTX;
4099 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4100 if we failed the caller should emit a normal call, otherwise try to get
4101 the result in TARGET, if convenient. */
4103 static rtx
4104 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4106 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4107 return NULL_RTX;
4109 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4110 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4111 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4113 rtx arg1_rtx, arg2_rtx;
4114 rtx result, insn = NULL_RTX;
4115 tree fndecl, fn;
4116 tree arg1 = CALL_EXPR_ARG (exp, 0);
4117 tree arg2 = CALL_EXPR_ARG (exp, 1);
4119 int arg1_align
4120 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4121 int arg2_align
4122 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4124 /* If we don't have POINTER_TYPE, call the function. */
4125 if (arg1_align == 0 || arg2_align == 0)
4126 return NULL_RTX;
4128 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4129 arg1 = builtin_save_expr (arg1);
4130 arg2 = builtin_save_expr (arg2);
4132 arg1_rtx = get_memory_rtx (arg1, NULL);
4133 arg2_rtx = get_memory_rtx (arg2, NULL);
4135 #ifdef HAVE_cmpstrsi
4136 /* Try to call cmpstrsi. */
4137 if (HAVE_cmpstrsi)
4139 enum machine_mode insn_mode
4140 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4142 /* Make a place to write the result of the instruction. */
4143 result = target;
4144 if (! (result != 0
4145 && REG_P (result) && GET_MODE (result) == insn_mode
4146 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4147 result = gen_reg_rtx (insn_mode);
4149 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4150 GEN_INT (MIN (arg1_align, arg2_align)));
4152 #endif
4153 #ifdef HAVE_cmpstrnsi
4154 /* Try to determine at least one length and call cmpstrnsi. */
4155 if (!insn && HAVE_cmpstrnsi)
4157 tree len;
4158 rtx arg3_rtx;
4160 enum machine_mode insn_mode
4161 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4162 tree len1 = c_strlen (arg1, 1);
4163 tree len2 = c_strlen (arg2, 1);
4165 if (len1)
4166 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4167 if (len2)
4168 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4170 /* If we don't have a constant length for the first, use the length
4171 of the second, if we know it. We don't require a constant for
4172 this case; some cost analysis could be done if both are available
4173 but neither is constant. For now, assume they're equally cheap,
4174 unless one has side effects. If both strings have constant lengths,
4175 use the smaller. */
4177 if (!len1)
4178 len = len2;
4179 else if (!len2)
4180 len = len1;
4181 else if (TREE_SIDE_EFFECTS (len1))
4182 len = len2;
4183 else if (TREE_SIDE_EFFECTS (len2))
4184 len = len1;
4185 else if (TREE_CODE (len1) != INTEGER_CST)
4186 len = len2;
4187 else if (TREE_CODE (len2) != INTEGER_CST)
4188 len = len1;
4189 else if (tree_int_cst_lt (len1, len2))
4190 len = len1;
4191 else
4192 len = len2;
4194 /* If both arguments have side effects, we cannot optimize. */
4195 if (!len || TREE_SIDE_EFFECTS (len))
4196 goto do_libcall;
4198 arg3_rtx = expand_normal (len);
4200 /* Make a place to write the result of the instruction. */
4201 result = target;
4202 if (! (result != 0
4203 && REG_P (result) && GET_MODE (result) == insn_mode
4204 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4205 result = gen_reg_rtx (insn_mode);
4207 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4208 GEN_INT (MIN (arg1_align, arg2_align)));
4210 #endif
4212 if (insn)
4214 enum machine_mode mode;
4215 emit_insn (insn);
4217 /* Return the value in the proper mode for this function. */
4218 mode = TYPE_MODE (TREE_TYPE (exp));
4219 if (GET_MODE (result) == mode)
4220 return result;
4221 if (target == 0)
4222 return convert_to_mode (mode, result, 0);
4223 convert_move (target, result, 0);
4224 return target;
4227 /* Expand the library call ourselves using a stabilized argument
4228 list to avoid re-evaluating the function's arguments twice. */
4229 #ifdef HAVE_cmpstrnsi
4230 do_libcall:
4231 #endif
4232 fndecl = get_callee_fndecl (exp);
4233 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4234 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4235 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4236 return expand_call (fn, target, target == const0_rtx);
4238 #endif
4239 return NULL_RTX;
4242 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4243 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4244 the result in TARGET, if convenient. */
4246 static rtx
4247 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4248 ATTRIBUTE_UNUSED enum machine_mode mode)
4250 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4252 if (!validate_arglist (exp,
4253 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4254 return NULL_RTX;
4256 /* If c_strlen can determine an expression for one of the string
4257 lengths, and it doesn't have side effects, then emit cmpstrnsi
4258 using length MIN(strlen(string)+1, arg3). */
4259 #ifdef HAVE_cmpstrnsi
4260 if (HAVE_cmpstrnsi)
4262 tree len, len1, len2;
4263 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4264 rtx result, insn;
4265 tree fndecl, fn;
4266 tree arg1 = CALL_EXPR_ARG (exp, 0);
4267 tree arg2 = CALL_EXPR_ARG (exp, 1);
4268 tree arg3 = CALL_EXPR_ARG (exp, 2);
4270 int arg1_align
4271 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4272 int arg2_align
4273 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4274 enum machine_mode insn_mode
4275 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4277 len1 = c_strlen (arg1, 1);
4278 len2 = c_strlen (arg2, 1);
4280 if (len1)
4281 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4282 if (len2)
4283 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4285 /* If we don't have a constant length for the first, use the length
4286 of the second, if we know it. We don't require a constant for
4287 this case; some cost analysis could be done if both are available
4288 but neither is constant. For now, assume they're equally cheap,
4289 unless one has side effects. If both strings have constant lengths,
4290 use the smaller. */
4292 if (!len1)
4293 len = len2;
4294 else if (!len2)
4295 len = len1;
4296 else if (TREE_SIDE_EFFECTS (len1))
4297 len = len2;
4298 else if (TREE_SIDE_EFFECTS (len2))
4299 len = len1;
4300 else if (TREE_CODE (len1) != INTEGER_CST)
4301 len = len2;
4302 else if (TREE_CODE (len2) != INTEGER_CST)
4303 len = len1;
4304 else if (tree_int_cst_lt (len1, len2))
4305 len = len1;
4306 else
4307 len = len2;
4309 /* If both arguments have side effects, we cannot optimize. */
4310 if (!len || TREE_SIDE_EFFECTS (len))
4311 return NULL_RTX;
4313 /* The actual new length parameter is MIN(len,arg3). */
4314 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4315 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4317 /* If we don't have POINTER_TYPE, call the function. */
4318 if (arg1_align == 0 || arg2_align == 0)
4319 return NULL_RTX;
4321 /* Make a place to write the result of the instruction. */
4322 result = target;
4323 if (! (result != 0
4324 && REG_P (result) && GET_MODE (result) == insn_mode
4325 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4326 result = gen_reg_rtx (insn_mode);
4328 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4329 arg1 = builtin_save_expr (arg1);
4330 arg2 = builtin_save_expr (arg2);
4331 len = builtin_save_expr (len);
4333 arg1_rtx = get_memory_rtx (arg1, len);
4334 arg2_rtx = get_memory_rtx (arg2, len);
4335 arg3_rtx = expand_normal (len);
4336 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4337 GEN_INT (MIN (arg1_align, arg2_align)));
4338 if (insn)
4340 emit_insn (insn);
4342 /* Return the value in the proper mode for this function. */
4343 mode = TYPE_MODE (TREE_TYPE (exp));
4344 if (GET_MODE (result) == mode)
4345 return result;
4346 if (target == 0)
4347 return convert_to_mode (mode, result, 0);
4348 convert_move (target, result, 0);
4349 return target;
4352 /* Expand the library call ourselves using a stabilized argument
4353 list to avoid re-evaluating the function's arguments twice. */
4354 fndecl = get_callee_fndecl (exp);
4355 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4356 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4357 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4358 return expand_call (fn, target, target == const0_rtx);
4360 #endif
4361 return NULL_RTX;
4364 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4365 if that's convenient. */
4368 expand_builtin_saveregs (void)
4370 rtx val, seq;
4372 /* Don't do __builtin_saveregs more than once in a function.
4373 Save the result of the first call and reuse it. */
4374 if (saveregs_value != 0)
4375 return saveregs_value;
4377 /* When this function is called, it means that registers must be
4378 saved on entry to this function. So we migrate the call to the
4379 first insn of this function. */
4381 start_sequence ();
4383 /* Do whatever the machine needs done in this case. */
4384 val = targetm.calls.expand_builtin_saveregs ();
4386 seq = get_insns ();
4387 end_sequence ();
4389 saveregs_value = val;
4391 /* Put the insns after the NOTE that starts the function. If this
4392 is inside a start_sequence, make the outer-level insn chain current, so
4393 the code is placed at the start of the function. */
4394 push_topmost_sequence ();
4395 emit_insn_after (seq, entry_of_function ());
4396 pop_topmost_sequence ();
4398 return val;
4401 /* __builtin_args_info (N) returns word N of the arg space info
4402 for the current function. The number and meanings of words
4403 is controlled by the definition of CUMULATIVE_ARGS. */
4405 static rtx
4406 expand_builtin_args_info (tree exp)
4408 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4409 int *word_ptr = (int *) &crtl->args.info;
4411 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4413 if (call_expr_nargs (exp) != 0)
4415 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4416 error ("argument of %<__builtin_args_info%> must be constant");
4417 else
4419 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4421 if (wordnum < 0 || wordnum >= nwords)
4422 error ("argument of %<__builtin_args_info%> out of range");
4423 else
4424 return GEN_INT (word_ptr[wordnum]);
4427 else
4428 error ("missing argument in %<__builtin_args_info%>");
4430 return const0_rtx;
4433 /* Expand a call to __builtin_next_arg. */
4435 static rtx
4436 expand_builtin_next_arg (void)
4438 /* Checking arguments is already done in fold_builtin_next_arg
4439 that must be called before this function. */
4440 return expand_binop (ptr_mode, add_optab,
4441 crtl->args.internal_arg_pointer,
4442 crtl->args.arg_offset_rtx,
4443 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4446 /* Make it easier for the backends by protecting the valist argument
4447 from multiple evaluations. */
4449 static tree
4450 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4452 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4454 gcc_assert (vatype != NULL_TREE);
4456 if (TREE_CODE (vatype) == ARRAY_TYPE)
4458 if (TREE_SIDE_EFFECTS (valist))
4459 valist = save_expr (valist);
4461 /* For this case, the backends will be expecting a pointer to
4462 vatype, but it's possible we've actually been given an array
4463 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4464 So fix it. */
4465 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4467 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4468 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4471 else
4473 tree pt;
4475 if (! needs_lvalue)
4477 if (! TREE_SIDE_EFFECTS (valist))
4478 return valist;
4480 pt = build_pointer_type (vatype);
4481 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4482 TREE_SIDE_EFFECTS (valist) = 1;
4485 if (TREE_SIDE_EFFECTS (valist))
4486 valist = save_expr (valist);
4487 valist = build_fold_indirect_ref_loc (loc, valist);
4490 return valist;
4493 /* The "standard" definition of va_list is void*. */
4495 tree
4496 std_build_builtin_va_list (void)
4498 return ptr_type_node;
4501 /* The "standard" abi va_list is va_list_type_node. */
4503 tree
4504 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4506 return va_list_type_node;
4509 /* The "standard" type of va_list is va_list_type_node. */
4511 tree
4512 std_canonical_va_list_type (tree type)
4514 tree wtype, htype;
4516 if (INDIRECT_REF_P (type))
4517 type = TREE_TYPE (type);
4518 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4519 type = TREE_TYPE (type);
4520 wtype = va_list_type_node;
4521 htype = type;
4522 /* Treat structure va_list types. */
4523 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4524 htype = TREE_TYPE (htype);
4525 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4527 /* If va_list is an array type, the argument may have decayed
4528 to a pointer type, e.g. by being passed to another function.
4529 In that case, unwrap both types so that we can compare the
4530 underlying records. */
4531 if (TREE_CODE (htype) == ARRAY_TYPE
4532 || POINTER_TYPE_P (htype))
4534 wtype = TREE_TYPE (wtype);
4535 htype = TREE_TYPE (htype);
4538 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4539 return va_list_type_node;
4541 return NULL_TREE;
4544 /* The "standard" implementation of va_start: just assign `nextarg' to
4545 the variable. */
4547 void
4548 std_expand_builtin_va_start (tree valist, rtx nextarg)
4550 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4551 convert_move (va_r, nextarg, 0);
4554 /* Expand EXP, a call to __builtin_va_start. */
4556 static rtx
4557 expand_builtin_va_start (tree exp)
4559 rtx nextarg;
4560 tree valist;
4561 location_t loc = EXPR_LOCATION (exp);
4563 if (call_expr_nargs (exp) < 2)
4565 error_at (loc, "too few arguments to function %<va_start%>");
4566 return const0_rtx;
4569 if (fold_builtin_next_arg (exp, true))
4570 return const0_rtx;
4572 nextarg = expand_builtin_next_arg ();
4573 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4575 if (targetm.expand_builtin_va_start)
4576 targetm.expand_builtin_va_start (valist, nextarg);
4577 else
4578 std_expand_builtin_va_start (valist, nextarg);
4580 return const0_rtx;
4583 /* The "standard" implementation of va_arg: read the value from the
4584 current (padded) address and increment by the (padded) size. */
4586 tree
4587 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4588 gimple_seq *post_p)
4590 tree addr, t, type_size, rounded_size, valist_tmp;
4591 unsigned HOST_WIDE_INT align, boundary;
4592 bool indirect;
4594 #ifdef ARGS_GROW_DOWNWARD
4595 /* All of the alignment and movement below is for args-grow-up machines.
4596 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4597 implement their own specialized gimplify_va_arg_expr routines. */
4598 gcc_unreachable ();
4599 #endif
4601 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4602 if (indirect)
4603 type = build_pointer_type (type);
4605 align = PARM_BOUNDARY / BITS_PER_UNIT;
4606 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4608 /* When we align parameter on stack for caller, if the parameter
4609 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4610 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4611 here with caller. */
4612 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4613 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4615 boundary /= BITS_PER_UNIT;
4617 /* Hoist the valist value into a temporary for the moment. */
4618 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4620 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4621 requires greater alignment, we must perform dynamic alignment. */
4622 if (boundary > align
4623 && !integer_zerop (TYPE_SIZE (type)))
4625 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4626 fold_build2 (POINTER_PLUS_EXPR,
4627 TREE_TYPE (valist),
4628 valist_tmp, size_int (boundary - 1)));
4629 gimplify_and_add (t, pre_p);
4631 t = fold_convert (sizetype, valist_tmp);
4632 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4633 fold_convert (TREE_TYPE (valist),
4634 fold_build2 (BIT_AND_EXPR, sizetype, t,
4635 size_int (-boundary))));
4636 gimplify_and_add (t, pre_p);
4638 else
4639 boundary = align;
4641 /* If the actual alignment is less than the alignment of the type,
4642 adjust the type accordingly so that we don't assume strict alignment
4643 when dereferencing the pointer. */
4644 boundary *= BITS_PER_UNIT;
4645 if (boundary < TYPE_ALIGN (type))
4647 type = build_variant_type_copy (type);
4648 TYPE_ALIGN (type) = boundary;
4651 /* Compute the rounded size of the type. */
4652 type_size = size_in_bytes (type);
4653 rounded_size = round_up (type_size, align);
4655 /* Reduce rounded_size so it's sharable with the postqueue. */
4656 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4658 /* Get AP. */
4659 addr = valist_tmp;
4660 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4662 /* Small args are padded downward. */
4663 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4664 rounded_size, size_int (align));
4665 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4666 size_binop (MINUS_EXPR, rounded_size, type_size));
4667 addr = fold_build2 (POINTER_PLUS_EXPR,
4668 TREE_TYPE (addr), addr, t);
4671 /* Compute new value for AP. */
4672 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4673 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4674 gimplify_and_add (t, pre_p);
4676 addr = fold_convert (build_pointer_type (type), addr);
4678 if (indirect)
4679 addr = build_va_arg_indirect_ref (addr);
4681 return build_va_arg_indirect_ref (addr);
4684 /* Build an indirect-ref expression over the given TREE, which represents a
4685 piece of a va_arg() expansion. */
4686 tree
4687 build_va_arg_indirect_ref (tree addr)
4689 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4691 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4692 mf_mark (addr);
4694 return addr;
4697 /* Return a dummy expression of type TYPE in order to keep going after an
4698 error. */
4700 static tree
4701 dummy_object (tree type)
4703 tree t = build_int_cst (build_pointer_type (type), 0);
4704 return build1 (INDIRECT_REF, type, t);
4707 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4708 builtin function, but a very special sort of operator. */
4710 enum gimplify_status
4711 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4713 tree promoted_type, have_va_type;
4714 tree valist = TREE_OPERAND (*expr_p, 0);
4715 tree type = TREE_TYPE (*expr_p);
4716 tree t;
4717 location_t loc = EXPR_LOCATION (*expr_p);
4719 /* Verify that valist is of the proper type. */
4720 have_va_type = TREE_TYPE (valist);
4721 if (have_va_type == error_mark_node)
4722 return GS_ERROR;
4723 have_va_type = targetm.canonical_va_list_type (have_va_type);
4725 if (have_va_type == NULL_TREE)
4727 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4728 return GS_ERROR;
4731 /* Generate a diagnostic for requesting data of a type that cannot
4732 be passed through `...' due to type promotion at the call site. */
4733 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4734 != type)
4736 static bool gave_help;
4737 bool warned;
4739 /* Unfortunately, this is merely undefined, rather than a constraint
4740 violation, so we cannot make this an error. If this call is never
4741 executed, the program is still strictly conforming. */
4742 warned = warning_at (loc, 0,
4743 "%qT is promoted to %qT when passed through %<...%>",
4744 type, promoted_type);
4745 if (!gave_help && warned)
4747 gave_help = true;
4748 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4749 promoted_type, type);
4752 /* We can, however, treat "undefined" any way we please.
4753 Call abort to encourage the user to fix the program. */
4754 if (warned)
4755 inform (loc, "if this code is reached, the program will abort");
4756 /* Before the abort, allow the evaluation of the va_list
4757 expression to exit or longjmp. */
4758 gimplify_and_add (valist, pre_p);
4759 t = build_call_expr_loc (loc,
4760 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4761 gimplify_and_add (t, pre_p);
4763 /* This is dead code, but go ahead and finish so that the
4764 mode of the result comes out right. */
4765 *expr_p = dummy_object (type);
4766 return GS_ALL_DONE;
4768 else
4770 /* Make it easier for the backends by protecting the valist argument
4771 from multiple evaluations. */
4772 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4774 /* For this case, the backends will be expecting a pointer to
4775 TREE_TYPE (abi), but it's possible we've
4776 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4777 So fix it. */
4778 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4780 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4781 valist = fold_convert_loc (loc, p1,
4782 build_fold_addr_expr_loc (loc, valist));
4785 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4787 else
4788 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4790 if (!targetm.gimplify_va_arg_expr)
4791 /* FIXME: Once most targets are converted we should merely
4792 assert this is non-null. */
4793 return GS_ALL_DONE;
4795 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4796 return GS_OK;
4800 /* Expand EXP, a call to __builtin_va_end. */
4802 static rtx
4803 expand_builtin_va_end (tree exp)
4805 tree valist = CALL_EXPR_ARG (exp, 0);
4807 /* Evaluate for side effects, if needed. I hate macros that don't
4808 do that. */
4809 if (TREE_SIDE_EFFECTS (valist))
4810 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4812 return const0_rtx;
4815 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4816 builtin rather than just as an assignment in stdarg.h because of the
4817 nastiness of array-type va_list types. */
4819 static rtx
4820 expand_builtin_va_copy (tree exp)
4822 tree dst, src, t;
4823 location_t loc = EXPR_LOCATION (exp);
4825 dst = CALL_EXPR_ARG (exp, 0);
4826 src = CALL_EXPR_ARG (exp, 1);
4828 dst = stabilize_va_list_loc (loc, dst, 1);
4829 src = stabilize_va_list_loc (loc, src, 0);
4831 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4833 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4835 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4836 TREE_SIDE_EFFECTS (t) = 1;
4837 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4839 else
4841 rtx dstb, srcb, size;
4843 /* Evaluate to pointers. */
4844 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4845 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4846 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4847 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4849 dstb = convert_memory_address (Pmode, dstb);
4850 srcb = convert_memory_address (Pmode, srcb);
4852 /* "Dereference" to BLKmode memories. */
4853 dstb = gen_rtx_MEM (BLKmode, dstb);
4854 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4855 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4856 srcb = gen_rtx_MEM (BLKmode, srcb);
4857 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4858 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4860 /* Copy. */
4861 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4864 return const0_rtx;
4867 /* Expand a call to one of the builtin functions __builtin_frame_address or
4868 __builtin_return_address. */
4870 static rtx
4871 expand_builtin_frame_address (tree fndecl, tree exp)
4873 /* The argument must be a nonnegative integer constant.
4874 It counts the number of frames to scan up the stack.
4875 The value is the return address saved in that frame. */
4876 if (call_expr_nargs (exp) == 0)
4877 /* Warning about missing arg was already issued. */
4878 return const0_rtx;
4879 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4881 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4882 error ("invalid argument to %<__builtin_frame_address%>");
4883 else
4884 error ("invalid argument to %<__builtin_return_address%>");
4885 return const0_rtx;
4887 else
4889 rtx tem
4890 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4891 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4893 /* Some ports cannot access arbitrary stack frames. */
4894 if (tem == NULL)
4896 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4897 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4898 else
4899 warning (0, "unsupported argument to %<__builtin_return_address%>");
4900 return const0_rtx;
4903 /* For __builtin_frame_address, return what we've got. */
4904 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4905 return tem;
4907 if (!REG_P (tem)
4908 && ! CONSTANT_P (tem))
4909 tem = copy_to_mode_reg (Pmode, tem);
4910 return tem;
4914 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4915 we failed and the caller should emit a normal call, otherwise try to get
4916 the result in TARGET, if convenient. */
4918 static rtx
4919 expand_builtin_alloca (tree exp, rtx target)
4921 rtx op0;
4922 rtx result;
4924 /* Emit normal call if marked not-inlineable. */
4925 if (CALL_CANNOT_INLINE_P (exp))
4926 return NULL_RTX;
4928 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4929 return NULL_RTX;
4931 /* Compute the argument. */
4932 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4934 /* Allocate the desired space. */
4935 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4936 result = convert_memory_address (ptr_mode, result);
4938 return result;
4941 /* Expand a call to a bswap builtin with argument ARG0. MODE
4942 is the mode to expand with. */
4944 static rtx
4945 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4947 enum machine_mode mode;
4948 tree arg;
4949 rtx op0;
4951 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4952 return NULL_RTX;
4954 arg = CALL_EXPR_ARG (exp, 0);
4955 mode = TYPE_MODE (TREE_TYPE (arg));
4956 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4958 target = expand_unop (mode, bswap_optab, op0, target, 1);
4960 gcc_assert (target);
4962 return convert_to_mode (mode, target, 0);
4965 /* Expand a call to a unary builtin in EXP.
4966 Return NULL_RTX if a normal call should be emitted rather than expanding the
4967 function in-line. If convenient, the result should be placed in TARGET.
4968 SUBTARGET may be used as the target for computing one of EXP's operands. */
4970 static rtx
4971 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4972 rtx subtarget, optab op_optab)
4974 rtx op0;
4976 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4977 return NULL_RTX;
4979 /* Compute the argument. */
4980 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4981 VOIDmode, EXPAND_NORMAL);
4982 /* Compute op, into TARGET if possible.
4983 Set TARGET to wherever the result comes back. */
4984 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4985 op_optab, op0, target, 1);
4986 gcc_assert (target);
4988 return convert_to_mode (target_mode, target, 0);
4991 /* Expand a call to __builtin_expect. We just return our argument
4992 as the builtin_expect semantic should've been already executed by
4993 tree branch prediction pass. */
4995 static rtx
4996 expand_builtin_expect (tree exp, rtx target)
4998 tree arg;
5000 if (call_expr_nargs (exp) < 2)
5001 return const0_rtx;
5002 arg = CALL_EXPR_ARG (exp, 0);
5004 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5005 /* When guessing was done, the hints should be already stripped away. */
5006 gcc_assert (!flag_guess_branch_prob
5007 || optimize == 0 || seen_error ());
5008 return target;
5011 void
5012 expand_builtin_trap (void)
5014 #ifdef HAVE_trap
5015 if (HAVE_trap)
5016 emit_insn (gen_trap ());
5017 else
5018 #endif
5019 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5020 emit_barrier ();
5023 /* Expand a call to __builtin_unreachable. We do nothing except emit
5024 a barrier saying that control flow will not pass here.
5026 It is the responsibility of the program being compiled to ensure
5027 that control flow does never reach __builtin_unreachable. */
5028 static void
5029 expand_builtin_unreachable (void)
5031 emit_barrier ();
5034 /* Expand EXP, a call to fabs, fabsf or fabsl.
5035 Return NULL_RTX if a normal call should be emitted rather than expanding
5036 the function inline. If convenient, the result should be placed
5037 in TARGET. SUBTARGET may be used as the target for computing
5038 the operand. */
5040 static rtx
5041 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5043 enum machine_mode mode;
5044 tree arg;
5045 rtx op0;
5047 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5048 return NULL_RTX;
5050 arg = CALL_EXPR_ARG (exp, 0);
5051 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5052 mode = TYPE_MODE (TREE_TYPE (arg));
5053 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5054 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5057 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5058 Return NULL is a normal call should be emitted rather than expanding the
5059 function inline. If convenient, the result should be placed in TARGET.
5060 SUBTARGET may be used as the target for computing the operand. */
5062 static rtx
5063 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5065 rtx op0, op1;
5066 tree arg;
5068 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5069 return NULL_RTX;
5071 arg = CALL_EXPR_ARG (exp, 0);
5072 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5074 arg = CALL_EXPR_ARG (exp, 1);
5075 op1 = expand_normal (arg);
5077 return expand_copysign (op0, op1, target);
5080 /* Create a new constant string literal and return a char* pointer to it.
5081 The STRING_CST value is the LEN characters at STR. */
5082 tree
5083 build_string_literal (int len, const char *str)
5085 tree t, elem, index, type;
5087 t = build_string (len, str);
5088 elem = build_type_variant (char_type_node, 1, 0);
5089 index = build_index_type (size_int (len - 1));
5090 type = build_array_type (elem, index);
5091 TREE_TYPE (t) = type;
5092 TREE_CONSTANT (t) = 1;
5093 TREE_READONLY (t) = 1;
5094 TREE_STATIC (t) = 1;
5096 type = build_pointer_type (elem);
5097 t = build1 (ADDR_EXPR, type,
5098 build4 (ARRAY_REF, elem,
5099 t, integer_zero_node, NULL_TREE, NULL_TREE));
5100 return t;
5103 /* Expand a call to either the entry or exit function profiler. */
5105 static rtx
5106 expand_builtin_profile_func (bool exitp)
5108 rtx this_rtx, which;
5110 this_rtx = DECL_RTL (current_function_decl);
5111 gcc_assert (MEM_P (this_rtx));
5112 this_rtx = XEXP (this_rtx, 0);
5114 if (exitp)
5115 which = profile_function_exit_libfunc;
5116 else
5117 which = profile_function_entry_libfunc;
5119 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5120 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5122 Pmode);
5124 return const0_rtx;
5127 /* Expand a call to __builtin___clear_cache. */
5129 static rtx
5130 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5132 #ifndef HAVE_clear_cache
5133 #ifdef CLEAR_INSN_CACHE
5134 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5135 does something. Just do the default expansion to a call to
5136 __clear_cache(). */
5137 return NULL_RTX;
5138 #else
5139 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5140 does nothing. There is no need to call it. Do nothing. */
5141 return const0_rtx;
5142 #endif /* CLEAR_INSN_CACHE */
5143 #else
5144 /* We have a "clear_cache" insn, and it will handle everything. */
5145 tree begin, end;
5146 rtx begin_rtx, end_rtx;
5147 enum insn_code icode;
5149 /* We must not expand to a library call. If we did, any
5150 fallback library function in libgcc that might contain a call to
5151 __builtin___clear_cache() would recurse infinitely. */
5152 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5154 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5155 return const0_rtx;
5158 if (HAVE_clear_cache)
5160 icode = CODE_FOR_clear_cache;
5162 begin = CALL_EXPR_ARG (exp, 0);
5163 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5164 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5165 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5166 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5168 end = CALL_EXPR_ARG (exp, 1);
5169 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5170 end_rtx = convert_memory_address (Pmode, end_rtx);
5171 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5172 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5174 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5176 return const0_rtx;
5177 #endif /* HAVE_clear_cache */
5180 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5182 static rtx
5183 round_trampoline_addr (rtx tramp)
5185 rtx temp, addend, mask;
5187 /* If we don't need too much alignment, we'll have been guaranteed
5188 proper alignment by get_trampoline_type. */
5189 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5190 return tramp;
5192 /* Round address up to desired boundary. */
5193 temp = gen_reg_rtx (Pmode);
5194 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5195 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5197 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5198 temp, 0, OPTAB_LIB_WIDEN);
5199 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5200 temp, 0, OPTAB_LIB_WIDEN);
5202 return tramp;
5205 static rtx
5206 expand_builtin_init_trampoline (tree exp)
5208 tree t_tramp, t_func, t_chain;
5209 rtx m_tramp, r_tramp, r_chain, tmp;
5211 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5212 POINTER_TYPE, VOID_TYPE))
5213 return NULL_RTX;
5215 t_tramp = CALL_EXPR_ARG (exp, 0);
5216 t_func = CALL_EXPR_ARG (exp, 1);
5217 t_chain = CALL_EXPR_ARG (exp, 2);
5219 r_tramp = expand_normal (t_tramp);
5220 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5221 MEM_NOTRAP_P (m_tramp) = 1;
5223 /* The TRAMP argument should be the address of a field within the
5224 local function's FRAME decl. Let's see if we can fill in the
5225 to fill in the MEM_ATTRs for this memory. */
5226 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5227 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5228 true, 0);
5230 tmp = round_trampoline_addr (r_tramp);
5231 if (tmp != r_tramp)
5233 m_tramp = change_address (m_tramp, BLKmode, tmp);
5234 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5235 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5238 /* The FUNC argument should be the address of the nested function.
5239 Extract the actual function decl to pass to the hook. */
5240 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5241 t_func = TREE_OPERAND (t_func, 0);
5242 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5244 r_chain = expand_normal (t_chain);
5246 /* Generate insns to initialize the trampoline. */
5247 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5249 trampolines_created = 1;
5250 return const0_rtx;
5253 static rtx
5254 expand_builtin_adjust_trampoline (tree exp)
5256 rtx tramp;
5258 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5259 return NULL_RTX;
5261 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5262 tramp = round_trampoline_addr (tramp);
5263 if (targetm.calls.trampoline_adjust_address)
5264 tramp = targetm.calls.trampoline_adjust_address (tramp);
5266 return tramp;
5269 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5270 function. The function first checks whether the back end provides
5271 an insn to implement signbit for the respective mode. If not, it
5272 checks whether the floating point format of the value is such that
5273 the sign bit can be extracted. If that is not the case, the
5274 function returns NULL_RTX to indicate that a normal call should be
5275 emitted rather than expanding the function in-line. EXP is the
5276 expression that is a call to the builtin function; if convenient,
5277 the result should be placed in TARGET. */
5278 static rtx
5279 expand_builtin_signbit (tree exp, rtx target)
5281 const struct real_format *fmt;
5282 enum machine_mode fmode, imode, rmode;
5283 tree arg;
5284 int word, bitpos;
5285 enum insn_code icode;
5286 rtx temp;
5287 location_t loc = EXPR_LOCATION (exp);
5289 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5290 return NULL_RTX;
5292 arg = CALL_EXPR_ARG (exp, 0);
5293 fmode = TYPE_MODE (TREE_TYPE (arg));
5294 rmode = TYPE_MODE (TREE_TYPE (exp));
5295 fmt = REAL_MODE_FORMAT (fmode);
5297 arg = builtin_save_expr (arg);
5299 /* Expand the argument yielding a RTX expression. */
5300 temp = expand_normal (arg);
5302 /* Check if the back end provides an insn that handles signbit for the
5303 argument's mode. */
5304 icode = signbit_optab->handlers [(int) fmode].insn_code;
5305 if (icode != CODE_FOR_nothing)
5307 rtx last = get_last_insn ();
5308 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5309 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5310 return target;
5311 delete_insns_since (last);
5314 /* For floating point formats without a sign bit, implement signbit
5315 as "ARG < 0.0". */
5316 bitpos = fmt->signbit_ro;
5317 if (bitpos < 0)
5319 /* But we can't do this if the format supports signed zero. */
5320 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5321 return NULL_RTX;
5323 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5324 build_real (TREE_TYPE (arg), dconst0));
5325 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5328 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5330 imode = int_mode_for_mode (fmode);
5331 if (imode == BLKmode)
5332 return NULL_RTX;
5333 temp = gen_lowpart (imode, temp);
5335 else
5337 imode = word_mode;
5338 /* Handle targets with different FP word orders. */
5339 if (FLOAT_WORDS_BIG_ENDIAN)
5340 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5341 else
5342 word = bitpos / BITS_PER_WORD;
5343 temp = operand_subword_force (temp, word, fmode);
5344 bitpos = bitpos % BITS_PER_WORD;
5347 /* Force the intermediate word_mode (or narrower) result into a
5348 register. This avoids attempting to create paradoxical SUBREGs
5349 of floating point modes below. */
5350 temp = force_reg (imode, temp);
5352 /* If the bitpos is within the "result mode" lowpart, the operation
5353 can be implement with a single bitwise AND. Otherwise, we need
5354 a right shift and an AND. */
5356 if (bitpos < GET_MODE_BITSIZE (rmode))
5358 double_int mask = double_int_setbit (double_int_zero, bitpos);
5360 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5361 temp = gen_lowpart (rmode, temp);
5362 temp = expand_binop (rmode, and_optab, temp,
5363 immed_double_int_const (mask, rmode),
5364 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5366 else
5368 /* Perform a logical right shift to place the signbit in the least
5369 significant bit, then truncate the result to the desired mode
5370 and mask just this bit. */
5371 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5372 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5373 temp = gen_lowpart (rmode, temp);
5374 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5375 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5378 return temp;
5381 /* Expand fork or exec calls. TARGET is the desired target of the
5382 call. EXP is the call. FN is the
5383 identificator of the actual function. IGNORE is nonzero if the
5384 value is to be ignored. */
5386 static rtx
5387 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5389 tree id, decl;
5390 tree call;
5392 /* If we are not profiling, just call the function. */
5393 if (!profile_arc_flag)
5394 return NULL_RTX;
5396 /* Otherwise call the wrapper. This should be equivalent for the rest of
5397 compiler, so the code does not diverge, and the wrapper may run the
5398 code necessary for keeping the profiling sane. */
5400 switch (DECL_FUNCTION_CODE (fn))
5402 case BUILT_IN_FORK:
5403 id = get_identifier ("__gcov_fork");
5404 break;
5406 case BUILT_IN_EXECL:
5407 id = get_identifier ("__gcov_execl");
5408 break;
5410 case BUILT_IN_EXECV:
5411 id = get_identifier ("__gcov_execv");
5412 break;
5414 case BUILT_IN_EXECLP:
5415 id = get_identifier ("__gcov_execlp");
5416 break;
5418 case BUILT_IN_EXECLE:
5419 id = get_identifier ("__gcov_execle");
5420 break;
5422 case BUILT_IN_EXECVP:
5423 id = get_identifier ("__gcov_execvp");
5424 break;
5426 case BUILT_IN_EXECVE:
5427 id = get_identifier ("__gcov_execve");
5428 break;
5430 default:
5431 gcc_unreachable ();
5434 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5435 FUNCTION_DECL, id, TREE_TYPE (fn));
5436 DECL_EXTERNAL (decl) = 1;
5437 TREE_PUBLIC (decl) = 1;
5438 DECL_ARTIFICIAL (decl) = 1;
5439 TREE_NOTHROW (decl) = 1;
5440 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5441 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5442 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5443 return expand_call (call, target, ignore);
5448 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5449 the pointer in these functions is void*, the tree optimizers may remove
5450 casts. The mode computed in expand_builtin isn't reliable either, due
5451 to __sync_bool_compare_and_swap.
5453 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5454 group of builtins. This gives us log2 of the mode size. */
5456 static inline enum machine_mode
5457 get_builtin_sync_mode (int fcode_diff)
5459 /* The size is not negotiable, so ask not to get BLKmode in return
5460 if the target indicates that a smaller size would be better. */
5461 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5464 /* Expand the memory expression LOC and return the appropriate memory operand
5465 for the builtin_sync operations. */
5467 static rtx
5468 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5470 rtx addr, mem;
5472 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5473 addr = convert_memory_address (Pmode, addr);
5475 /* Note that we explicitly do not want any alias information for this
5476 memory, so that we kill all other live memories. Otherwise we don't
5477 satisfy the full barrier semantics of the intrinsic. */
5478 mem = validize_mem (gen_rtx_MEM (mode, addr));
5480 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5481 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5482 MEM_VOLATILE_P (mem) = 1;
5484 return mem;
5487 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5488 EXP is the CALL_EXPR. CODE is the rtx code
5489 that corresponds to the arithmetic or logical operation from the name;
5490 an exception here is that NOT actually means NAND. TARGET is an optional
5491 place for us to store the results; AFTER is true if this is the
5492 fetch_and_xxx form. IGNORE is true if we don't actually care about
5493 the result of the operation at all. */
5495 static rtx
5496 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5497 enum rtx_code code, bool after,
5498 rtx target, bool ignore)
5500 rtx val, mem;
5501 enum machine_mode old_mode;
5502 location_t loc = EXPR_LOCATION (exp);
5504 if (code == NOT && warn_sync_nand)
5506 tree fndecl = get_callee_fndecl (exp);
5507 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5509 static bool warned_f_a_n, warned_n_a_f;
5511 switch (fcode)
5513 case BUILT_IN_FETCH_AND_NAND_1:
5514 case BUILT_IN_FETCH_AND_NAND_2:
5515 case BUILT_IN_FETCH_AND_NAND_4:
5516 case BUILT_IN_FETCH_AND_NAND_8:
5517 case BUILT_IN_FETCH_AND_NAND_16:
5519 if (warned_f_a_n)
5520 break;
5522 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5523 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5524 warned_f_a_n = true;
5525 break;
5527 case BUILT_IN_NAND_AND_FETCH_1:
5528 case BUILT_IN_NAND_AND_FETCH_2:
5529 case BUILT_IN_NAND_AND_FETCH_4:
5530 case BUILT_IN_NAND_AND_FETCH_8:
5531 case BUILT_IN_NAND_AND_FETCH_16:
5533 if (warned_n_a_f)
5534 break;
5536 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5537 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5538 warned_n_a_f = true;
5539 break;
5541 default:
5542 gcc_unreachable ();
5546 /* Expand the operands. */
5547 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5549 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5550 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5551 of CONST_INTs, where we know the old_mode only from the call argument. */
5552 old_mode = GET_MODE (val);
5553 if (old_mode == VOIDmode)
5554 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5555 val = convert_modes (mode, old_mode, val, 1);
5557 if (ignore)
5558 return expand_sync_operation (mem, val, code);
5559 else
5560 return expand_sync_fetch_operation (mem, val, code, after, target);
5563 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5564 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5565 true if this is the boolean form. TARGET is a place for us to store the
5566 results; this is NOT optional if IS_BOOL is true. */
5568 static rtx
5569 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5570 bool is_bool, rtx target)
5572 rtx old_val, new_val, mem;
5573 enum machine_mode old_mode;
5575 /* Expand the operands. */
5576 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5579 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5580 mode, EXPAND_NORMAL);
5581 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5582 of CONST_INTs, where we know the old_mode only from the call argument. */
5583 old_mode = GET_MODE (old_val);
5584 if (old_mode == VOIDmode)
5585 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5586 old_val = convert_modes (mode, old_mode, old_val, 1);
5588 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5589 mode, EXPAND_NORMAL);
5590 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5591 of CONST_INTs, where we know the old_mode only from the call argument. */
5592 old_mode = GET_MODE (new_val);
5593 if (old_mode == VOIDmode)
5594 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5595 new_val = convert_modes (mode, old_mode, new_val, 1);
5597 if (is_bool)
5598 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5599 else
5600 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5603 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5604 general form is actually an atomic exchange, and some targets only
5605 support a reduced form with the second argument being a constant 1.
5606 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5607 the results. */
5609 static rtx
5610 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5611 rtx target)
5613 rtx val, mem;
5614 enum machine_mode old_mode;
5616 /* Expand the operands. */
5617 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5618 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5619 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5620 of CONST_INTs, where we know the old_mode only from the call argument. */
5621 old_mode = GET_MODE (val);
5622 if (old_mode == VOIDmode)
5623 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5624 val = convert_modes (mode, old_mode, val, 1);
5626 return expand_sync_lock_test_and_set (mem, val, target);
5629 /* Expand the __sync_synchronize intrinsic. */
5631 static void
5632 expand_builtin_synchronize (void)
5634 gimple x;
5635 VEC (tree, gc) *v_clobbers;
5637 #ifdef HAVE_memory_barrier
5638 if (HAVE_memory_barrier)
5640 emit_insn (gen_memory_barrier ());
5641 return;
5643 #endif
5645 if (synchronize_libfunc != NULL_RTX)
5647 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5648 return;
5651 /* If no explicit memory barrier instruction is available, create an
5652 empty asm stmt with a memory clobber. */
5653 v_clobbers = VEC_alloc (tree, gc, 1);
5654 VEC_quick_push (tree, v_clobbers,
5655 tree_cons (NULL, build_string (6, "memory"), NULL));
5656 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5657 gimple_asm_set_volatile (x, true);
5658 expand_asm_stmt (x);
5661 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5663 static void
5664 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5666 enum insn_code icode;
5667 rtx mem, insn;
5668 rtx val = const0_rtx;
5670 /* Expand the operands. */
5671 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5673 /* If there is an explicit operation in the md file, use it. */
5674 icode = sync_lock_release[mode];
5675 if (icode != CODE_FOR_nothing)
5677 if (!insn_data[icode].operand[1].predicate (val, mode))
5678 val = force_reg (mode, val);
5680 insn = GEN_FCN (icode) (mem, val);
5681 if (insn)
5683 emit_insn (insn);
5684 return;
5688 /* Otherwise we can implement this operation by emitting a barrier
5689 followed by a store of zero. */
5690 expand_builtin_synchronize ();
5691 emit_move_insn (mem, val);
5694 /* Expand an expression EXP that calls a built-in function,
5695 with result going to TARGET if that's convenient
5696 (and in mode MODE if that's convenient).
5697 SUBTARGET may be used as the target for computing one of EXP's operands.
5698 IGNORE is nonzero if the value is to be ignored. */
5701 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5702 int ignore)
5704 tree fndecl = get_callee_fndecl (exp);
5705 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5706 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5708 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5709 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5711 /* When not optimizing, generate calls to library functions for a certain
5712 set of builtins. */
5713 if (!optimize
5714 && !called_as_built_in (fndecl)
5715 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5716 && fcode != BUILT_IN_ALLOCA
5717 && fcode != BUILT_IN_FREE)
5718 return expand_call (exp, target, ignore);
5720 /* The built-in function expanders test for target == const0_rtx
5721 to determine whether the function's result will be ignored. */
5722 if (ignore)
5723 target = const0_rtx;
5725 /* If the result of a pure or const built-in function is ignored, and
5726 none of its arguments are volatile, we can avoid expanding the
5727 built-in call and just evaluate the arguments for side-effects. */
5728 if (target == const0_rtx
5729 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5731 bool volatilep = false;
5732 tree arg;
5733 call_expr_arg_iterator iter;
5735 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5736 if (TREE_THIS_VOLATILE (arg))
5738 volatilep = true;
5739 break;
5742 if (! volatilep)
5744 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5745 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5746 return const0_rtx;
5750 switch (fcode)
5752 CASE_FLT_FN (BUILT_IN_FABS):
5753 target = expand_builtin_fabs (exp, target, subtarget);
5754 if (target)
5755 return target;
5756 break;
5758 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5759 target = expand_builtin_copysign (exp, target, subtarget);
5760 if (target)
5761 return target;
5762 break;
5764 /* Just do a normal library call if we were unable to fold
5765 the values. */
5766 CASE_FLT_FN (BUILT_IN_CABS):
5767 break;
5769 CASE_FLT_FN (BUILT_IN_EXP):
5770 CASE_FLT_FN (BUILT_IN_EXP10):
5771 CASE_FLT_FN (BUILT_IN_POW10):
5772 CASE_FLT_FN (BUILT_IN_EXP2):
5773 CASE_FLT_FN (BUILT_IN_EXPM1):
5774 CASE_FLT_FN (BUILT_IN_LOGB):
5775 CASE_FLT_FN (BUILT_IN_LOG):
5776 CASE_FLT_FN (BUILT_IN_LOG10):
5777 CASE_FLT_FN (BUILT_IN_LOG2):
5778 CASE_FLT_FN (BUILT_IN_LOG1P):
5779 CASE_FLT_FN (BUILT_IN_TAN):
5780 CASE_FLT_FN (BUILT_IN_ASIN):
5781 CASE_FLT_FN (BUILT_IN_ACOS):
5782 CASE_FLT_FN (BUILT_IN_ATAN):
5783 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5784 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5785 because of possible accuracy problems. */
5786 if (! flag_unsafe_math_optimizations)
5787 break;
5788 CASE_FLT_FN (BUILT_IN_SQRT):
5789 CASE_FLT_FN (BUILT_IN_FLOOR):
5790 CASE_FLT_FN (BUILT_IN_CEIL):
5791 CASE_FLT_FN (BUILT_IN_TRUNC):
5792 CASE_FLT_FN (BUILT_IN_ROUND):
5793 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5794 CASE_FLT_FN (BUILT_IN_RINT):
5795 target = expand_builtin_mathfn (exp, target, subtarget);
5796 if (target)
5797 return target;
5798 break;
5800 CASE_FLT_FN (BUILT_IN_ILOGB):
5801 if (! flag_unsafe_math_optimizations)
5802 break;
5803 CASE_FLT_FN (BUILT_IN_ISINF):
5804 CASE_FLT_FN (BUILT_IN_FINITE):
5805 case BUILT_IN_ISFINITE:
5806 case BUILT_IN_ISNORMAL:
5807 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5808 if (target)
5809 return target;
5810 break;
5812 CASE_FLT_FN (BUILT_IN_LCEIL):
5813 CASE_FLT_FN (BUILT_IN_LLCEIL):
5814 CASE_FLT_FN (BUILT_IN_LFLOOR):
5815 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5816 target = expand_builtin_int_roundingfn (exp, target);
5817 if (target)
5818 return target;
5819 break;
5821 CASE_FLT_FN (BUILT_IN_LRINT):
5822 CASE_FLT_FN (BUILT_IN_LLRINT):
5823 CASE_FLT_FN (BUILT_IN_LROUND):
5824 CASE_FLT_FN (BUILT_IN_LLROUND):
5825 target = expand_builtin_int_roundingfn_2 (exp, target);
5826 if (target)
5827 return target;
5828 break;
5830 CASE_FLT_FN (BUILT_IN_POW):
5831 target = expand_builtin_pow (exp, target, subtarget);
5832 if (target)
5833 return target;
5834 break;
5836 CASE_FLT_FN (BUILT_IN_POWI):
5837 target = expand_builtin_powi (exp, target, subtarget);
5838 if (target)
5839 return target;
5840 break;
5842 CASE_FLT_FN (BUILT_IN_ATAN2):
5843 CASE_FLT_FN (BUILT_IN_LDEXP):
5844 CASE_FLT_FN (BUILT_IN_SCALB):
5845 CASE_FLT_FN (BUILT_IN_SCALBN):
5846 CASE_FLT_FN (BUILT_IN_SCALBLN):
5847 if (! flag_unsafe_math_optimizations)
5848 break;
5850 CASE_FLT_FN (BUILT_IN_FMOD):
5851 CASE_FLT_FN (BUILT_IN_REMAINDER):
5852 CASE_FLT_FN (BUILT_IN_DREM):
5853 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5854 if (target)
5855 return target;
5856 break;
5858 CASE_FLT_FN (BUILT_IN_CEXPI):
5859 target = expand_builtin_cexpi (exp, target, subtarget);
5860 gcc_assert (target);
5861 return target;
5863 CASE_FLT_FN (BUILT_IN_SIN):
5864 CASE_FLT_FN (BUILT_IN_COS):
5865 if (! flag_unsafe_math_optimizations)
5866 break;
5867 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5868 if (target)
5869 return target;
5870 break;
5872 CASE_FLT_FN (BUILT_IN_SINCOS):
5873 if (! flag_unsafe_math_optimizations)
5874 break;
5875 target = expand_builtin_sincos (exp);
5876 if (target)
5877 return target;
5878 break;
5880 case BUILT_IN_APPLY_ARGS:
5881 return expand_builtin_apply_args ();
5883 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5884 FUNCTION with a copy of the parameters described by
5885 ARGUMENTS, and ARGSIZE. It returns a block of memory
5886 allocated on the stack into which is stored all the registers
5887 that might possibly be used for returning the result of a
5888 function. ARGUMENTS is the value returned by
5889 __builtin_apply_args. ARGSIZE is the number of bytes of
5890 arguments that must be copied. ??? How should this value be
5891 computed? We'll also need a safe worst case value for varargs
5892 functions. */
5893 case BUILT_IN_APPLY:
5894 if (!validate_arglist (exp, POINTER_TYPE,
5895 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5896 && !validate_arglist (exp, REFERENCE_TYPE,
5897 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5898 return const0_rtx;
5899 else
5901 rtx ops[3];
5903 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5904 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5905 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5907 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5910 /* __builtin_return (RESULT) causes the function to return the
5911 value described by RESULT. RESULT is address of the block of
5912 memory returned by __builtin_apply. */
5913 case BUILT_IN_RETURN:
5914 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5915 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5916 return const0_rtx;
5918 case BUILT_IN_SAVEREGS:
5919 return expand_builtin_saveregs ();
5921 case BUILT_IN_ARGS_INFO:
5922 return expand_builtin_args_info (exp);
5924 case BUILT_IN_VA_ARG_PACK:
5925 /* All valid uses of __builtin_va_arg_pack () are removed during
5926 inlining. */
5927 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5928 return const0_rtx;
5930 case BUILT_IN_VA_ARG_PACK_LEN:
5931 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5932 inlining. */
5933 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5934 return const0_rtx;
5936 /* Return the address of the first anonymous stack arg. */
5937 case BUILT_IN_NEXT_ARG:
5938 if (fold_builtin_next_arg (exp, false))
5939 return const0_rtx;
5940 return expand_builtin_next_arg ();
5942 case BUILT_IN_CLEAR_CACHE:
5943 target = expand_builtin___clear_cache (exp);
5944 if (target)
5945 return target;
5946 break;
5948 case BUILT_IN_CLASSIFY_TYPE:
5949 return expand_builtin_classify_type (exp);
5951 case BUILT_IN_CONSTANT_P:
5952 return const0_rtx;
5954 case BUILT_IN_FRAME_ADDRESS:
5955 case BUILT_IN_RETURN_ADDRESS:
5956 return expand_builtin_frame_address (fndecl, exp);
5958 /* Returns the address of the area where the structure is returned.
5959 0 otherwise. */
5960 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5961 if (call_expr_nargs (exp) != 0
5962 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5963 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5964 return const0_rtx;
5965 else
5966 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5968 case BUILT_IN_ALLOCA:
5969 target = expand_builtin_alloca (exp, target);
5970 if (target)
5971 return target;
5972 break;
5974 case BUILT_IN_STACK_SAVE:
5975 return expand_stack_save ();
5977 case BUILT_IN_STACK_RESTORE:
5978 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5979 return const0_rtx;
5981 case BUILT_IN_BSWAP32:
5982 case BUILT_IN_BSWAP64:
5983 target = expand_builtin_bswap (exp, target, subtarget);
5985 if (target)
5986 return target;
5987 break;
5989 CASE_INT_FN (BUILT_IN_FFS):
5990 case BUILT_IN_FFSIMAX:
5991 target = expand_builtin_unop (target_mode, exp, target,
5992 subtarget, ffs_optab);
5993 if (target)
5994 return target;
5995 break;
5997 CASE_INT_FN (BUILT_IN_CLZ):
5998 case BUILT_IN_CLZIMAX:
5999 target = expand_builtin_unop (target_mode, exp, target,
6000 subtarget, clz_optab);
6001 if (target)
6002 return target;
6003 break;
6005 CASE_INT_FN (BUILT_IN_CTZ):
6006 case BUILT_IN_CTZIMAX:
6007 target = expand_builtin_unop (target_mode, exp, target,
6008 subtarget, ctz_optab);
6009 if (target)
6010 return target;
6011 break;
6013 CASE_INT_FN (BUILT_IN_POPCOUNT):
6014 case BUILT_IN_POPCOUNTIMAX:
6015 target = expand_builtin_unop (target_mode, exp, target,
6016 subtarget, popcount_optab);
6017 if (target)
6018 return target;
6019 break;
6021 CASE_INT_FN (BUILT_IN_PARITY):
6022 case BUILT_IN_PARITYIMAX:
6023 target = expand_builtin_unop (target_mode, exp, target,
6024 subtarget, parity_optab);
6025 if (target)
6026 return target;
6027 break;
6029 case BUILT_IN_STRLEN:
6030 target = expand_builtin_strlen (exp, target, target_mode);
6031 if (target)
6032 return target;
6033 break;
6035 case BUILT_IN_STRCPY:
6036 target = expand_builtin_strcpy (exp, target);
6037 if (target)
6038 return target;
6039 break;
6041 case BUILT_IN_STRNCPY:
6042 target = expand_builtin_strncpy (exp, target);
6043 if (target)
6044 return target;
6045 break;
6047 case BUILT_IN_STPCPY:
6048 target = expand_builtin_stpcpy (exp, target, mode);
6049 if (target)
6050 return target;
6051 break;
6053 case BUILT_IN_MEMCPY:
6054 target = expand_builtin_memcpy (exp, target);
6055 if (target)
6056 return target;
6057 break;
6059 case BUILT_IN_MEMPCPY:
6060 target = expand_builtin_mempcpy (exp, target, mode);
6061 if (target)
6062 return target;
6063 break;
6065 case BUILT_IN_MEMSET:
6066 target = expand_builtin_memset (exp, target, mode);
6067 if (target)
6068 return target;
6069 break;
6071 case BUILT_IN_BZERO:
6072 target = expand_builtin_bzero (exp);
6073 if (target)
6074 return target;
6075 break;
6077 case BUILT_IN_STRCMP:
6078 target = expand_builtin_strcmp (exp, target);
6079 if (target)
6080 return target;
6081 break;
6083 case BUILT_IN_STRNCMP:
6084 target = expand_builtin_strncmp (exp, target, mode);
6085 if (target)
6086 return target;
6087 break;
6089 case BUILT_IN_BCMP:
6090 case BUILT_IN_MEMCMP:
6091 target = expand_builtin_memcmp (exp, target, mode);
6092 if (target)
6093 return target;
6094 break;
6096 case BUILT_IN_SETJMP:
6097 /* This should have been lowered to the builtins below. */
6098 gcc_unreachable ();
6100 case BUILT_IN_SETJMP_SETUP:
6101 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6102 and the receiver label. */
6103 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6105 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6106 VOIDmode, EXPAND_NORMAL);
6107 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6108 rtx label_r = label_rtx (label);
6110 /* This is copied from the handling of non-local gotos. */
6111 expand_builtin_setjmp_setup (buf_addr, label_r);
6112 nonlocal_goto_handler_labels
6113 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6114 nonlocal_goto_handler_labels);
6115 /* ??? Do not let expand_label treat us as such since we would
6116 not want to be both on the list of non-local labels and on
6117 the list of forced labels. */
6118 FORCED_LABEL (label) = 0;
6119 return const0_rtx;
6121 break;
6123 case BUILT_IN_SETJMP_DISPATCHER:
6124 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6125 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6127 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6128 rtx label_r = label_rtx (label);
6130 /* Remove the dispatcher label from the list of non-local labels
6131 since the receiver labels have been added to it above. */
6132 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6133 return const0_rtx;
6135 break;
6137 case BUILT_IN_SETJMP_RECEIVER:
6138 /* __builtin_setjmp_receiver is passed the receiver label. */
6139 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6141 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6142 rtx label_r = label_rtx (label);
6144 expand_builtin_setjmp_receiver (label_r);
6145 return const0_rtx;
6147 break;
6149 /* __builtin_longjmp is passed a pointer to an array of five words.
6150 It's similar to the C library longjmp function but works with
6151 __builtin_setjmp above. */
6152 case BUILT_IN_LONGJMP:
6153 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6155 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6156 VOIDmode, EXPAND_NORMAL);
6157 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6159 if (value != const1_rtx)
6161 error ("%<__builtin_longjmp%> second argument must be 1");
6162 return const0_rtx;
6165 expand_builtin_longjmp (buf_addr, value);
6166 return const0_rtx;
6168 break;
6170 case BUILT_IN_NONLOCAL_GOTO:
6171 target = expand_builtin_nonlocal_goto (exp);
6172 if (target)
6173 return target;
6174 break;
6176 /* This updates the setjmp buffer that is its argument with the value
6177 of the current stack pointer. */
6178 case BUILT_IN_UPDATE_SETJMP_BUF:
6179 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6181 rtx buf_addr
6182 = expand_normal (CALL_EXPR_ARG (exp, 0));
6184 expand_builtin_update_setjmp_buf (buf_addr);
6185 return const0_rtx;
6187 break;
6189 case BUILT_IN_TRAP:
6190 expand_builtin_trap ();
6191 return const0_rtx;
6193 case BUILT_IN_UNREACHABLE:
6194 expand_builtin_unreachable ();
6195 return const0_rtx;
6197 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6198 case BUILT_IN_SIGNBITD32:
6199 case BUILT_IN_SIGNBITD64:
6200 case BUILT_IN_SIGNBITD128:
6201 target = expand_builtin_signbit (exp, target);
6202 if (target)
6203 return target;
6204 break;
6206 /* Various hooks for the DWARF 2 __throw routine. */
6207 case BUILT_IN_UNWIND_INIT:
6208 expand_builtin_unwind_init ();
6209 return const0_rtx;
6210 case BUILT_IN_DWARF_CFA:
6211 return virtual_cfa_rtx;
6212 #ifdef DWARF2_UNWIND_INFO
6213 case BUILT_IN_DWARF_SP_COLUMN:
6214 return expand_builtin_dwarf_sp_column ();
6215 case BUILT_IN_INIT_DWARF_REG_SIZES:
6216 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6217 return const0_rtx;
6218 #endif
6219 case BUILT_IN_FROB_RETURN_ADDR:
6220 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6221 case BUILT_IN_EXTRACT_RETURN_ADDR:
6222 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6223 case BUILT_IN_EH_RETURN:
6224 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6225 CALL_EXPR_ARG (exp, 1));
6226 return const0_rtx;
6227 #ifdef EH_RETURN_DATA_REGNO
6228 case BUILT_IN_EH_RETURN_DATA_REGNO:
6229 return expand_builtin_eh_return_data_regno (exp);
6230 #endif
6231 case BUILT_IN_EXTEND_POINTER:
6232 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6233 case BUILT_IN_EH_POINTER:
6234 return expand_builtin_eh_pointer (exp);
6235 case BUILT_IN_EH_FILTER:
6236 return expand_builtin_eh_filter (exp);
6237 case BUILT_IN_EH_COPY_VALUES:
6238 return expand_builtin_eh_copy_values (exp);
6240 case BUILT_IN_VA_START:
6241 return expand_builtin_va_start (exp);
6242 case BUILT_IN_VA_END:
6243 return expand_builtin_va_end (exp);
6244 case BUILT_IN_VA_COPY:
6245 return expand_builtin_va_copy (exp);
6246 case BUILT_IN_EXPECT:
6247 return expand_builtin_expect (exp, target);
6248 case BUILT_IN_PREFETCH:
6249 expand_builtin_prefetch (exp);
6250 return const0_rtx;
6252 case BUILT_IN_PROFILE_FUNC_ENTER:
6253 return expand_builtin_profile_func (false);
6254 case BUILT_IN_PROFILE_FUNC_EXIT:
6255 return expand_builtin_profile_func (true);
6257 case BUILT_IN_INIT_TRAMPOLINE:
6258 return expand_builtin_init_trampoline (exp);
6259 case BUILT_IN_ADJUST_TRAMPOLINE:
6260 return expand_builtin_adjust_trampoline (exp);
6262 case BUILT_IN_FORK:
6263 case BUILT_IN_EXECL:
6264 case BUILT_IN_EXECV:
6265 case BUILT_IN_EXECLP:
6266 case BUILT_IN_EXECLE:
6267 case BUILT_IN_EXECVP:
6268 case BUILT_IN_EXECVE:
6269 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6270 if (target)
6271 return target;
6272 break;
6274 case BUILT_IN_FETCH_AND_ADD_1:
6275 case BUILT_IN_FETCH_AND_ADD_2:
6276 case BUILT_IN_FETCH_AND_ADD_4:
6277 case BUILT_IN_FETCH_AND_ADD_8:
6278 case BUILT_IN_FETCH_AND_ADD_16:
6279 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6280 target = expand_builtin_sync_operation (mode, exp, PLUS,
6281 false, target, ignore);
6282 if (target)
6283 return target;
6284 break;
6286 case BUILT_IN_FETCH_AND_SUB_1:
6287 case BUILT_IN_FETCH_AND_SUB_2:
6288 case BUILT_IN_FETCH_AND_SUB_4:
6289 case BUILT_IN_FETCH_AND_SUB_8:
6290 case BUILT_IN_FETCH_AND_SUB_16:
6291 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6292 target = expand_builtin_sync_operation (mode, exp, MINUS,
6293 false, target, ignore);
6294 if (target)
6295 return target;
6296 break;
6298 case BUILT_IN_FETCH_AND_OR_1:
6299 case BUILT_IN_FETCH_AND_OR_2:
6300 case BUILT_IN_FETCH_AND_OR_4:
6301 case BUILT_IN_FETCH_AND_OR_8:
6302 case BUILT_IN_FETCH_AND_OR_16:
6303 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6304 target = expand_builtin_sync_operation (mode, exp, IOR,
6305 false, target, ignore);
6306 if (target)
6307 return target;
6308 break;
6310 case BUILT_IN_FETCH_AND_AND_1:
6311 case BUILT_IN_FETCH_AND_AND_2:
6312 case BUILT_IN_FETCH_AND_AND_4:
6313 case BUILT_IN_FETCH_AND_AND_8:
6314 case BUILT_IN_FETCH_AND_AND_16:
6315 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6316 target = expand_builtin_sync_operation (mode, exp, AND,
6317 false, target, ignore);
6318 if (target)
6319 return target;
6320 break;
6322 case BUILT_IN_FETCH_AND_XOR_1:
6323 case BUILT_IN_FETCH_AND_XOR_2:
6324 case BUILT_IN_FETCH_AND_XOR_4:
6325 case BUILT_IN_FETCH_AND_XOR_8:
6326 case BUILT_IN_FETCH_AND_XOR_16:
6327 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6328 target = expand_builtin_sync_operation (mode, exp, XOR,
6329 false, target, ignore);
6330 if (target)
6331 return target;
6332 break;
6334 case BUILT_IN_FETCH_AND_NAND_1:
6335 case BUILT_IN_FETCH_AND_NAND_2:
6336 case BUILT_IN_FETCH_AND_NAND_4:
6337 case BUILT_IN_FETCH_AND_NAND_8:
6338 case BUILT_IN_FETCH_AND_NAND_16:
6339 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6340 target = expand_builtin_sync_operation (mode, exp, NOT,
6341 false, target, ignore);
6342 if (target)
6343 return target;
6344 break;
6346 case BUILT_IN_ADD_AND_FETCH_1:
6347 case BUILT_IN_ADD_AND_FETCH_2:
6348 case BUILT_IN_ADD_AND_FETCH_4:
6349 case BUILT_IN_ADD_AND_FETCH_8:
6350 case BUILT_IN_ADD_AND_FETCH_16:
6351 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6352 target = expand_builtin_sync_operation (mode, exp, PLUS,
6353 true, target, ignore);
6354 if (target)
6355 return target;
6356 break;
6358 case BUILT_IN_SUB_AND_FETCH_1:
6359 case BUILT_IN_SUB_AND_FETCH_2:
6360 case BUILT_IN_SUB_AND_FETCH_4:
6361 case BUILT_IN_SUB_AND_FETCH_8:
6362 case BUILT_IN_SUB_AND_FETCH_16:
6363 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6364 target = expand_builtin_sync_operation (mode, exp, MINUS,
6365 true, target, ignore);
6366 if (target)
6367 return target;
6368 break;
6370 case BUILT_IN_OR_AND_FETCH_1:
6371 case BUILT_IN_OR_AND_FETCH_2:
6372 case BUILT_IN_OR_AND_FETCH_4:
6373 case BUILT_IN_OR_AND_FETCH_8:
6374 case BUILT_IN_OR_AND_FETCH_16:
6375 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6376 target = expand_builtin_sync_operation (mode, exp, IOR,
6377 true, target, ignore);
6378 if (target)
6379 return target;
6380 break;
6382 case BUILT_IN_AND_AND_FETCH_1:
6383 case BUILT_IN_AND_AND_FETCH_2:
6384 case BUILT_IN_AND_AND_FETCH_4:
6385 case BUILT_IN_AND_AND_FETCH_8:
6386 case BUILT_IN_AND_AND_FETCH_16:
6387 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6388 target = expand_builtin_sync_operation (mode, exp, AND,
6389 true, target, ignore);
6390 if (target)
6391 return target;
6392 break;
6394 case BUILT_IN_XOR_AND_FETCH_1:
6395 case BUILT_IN_XOR_AND_FETCH_2:
6396 case BUILT_IN_XOR_AND_FETCH_4:
6397 case BUILT_IN_XOR_AND_FETCH_8:
6398 case BUILT_IN_XOR_AND_FETCH_16:
6399 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6400 target = expand_builtin_sync_operation (mode, exp, XOR,
6401 true, target, ignore);
6402 if (target)
6403 return target;
6404 break;
6406 case BUILT_IN_NAND_AND_FETCH_1:
6407 case BUILT_IN_NAND_AND_FETCH_2:
6408 case BUILT_IN_NAND_AND_FETCH_4:
6409 case BUILT_IN_NAND_AND_FETCH_8:
6410 case BUILT_IN_NAND_AND_FETCH_16:
6411 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6412 target = expand_builtin_sync_operation (mode, exp, NOT,
6413 true, target, ignore);
6414 if (target)
6415 return target;
6416 break;
6418 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6419 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6420 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6421 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6422 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6423 if (mode == VOIDmode)
6424 mode = TYPE_MODE (boolean_type_node);
6425 if (!target || !register_operand (target, mode))
6426 target = gen_reg_rtx (mode);
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6429 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6430 if (target)
6431 return target;
6432 break;
6434 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6435 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6436 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6437 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6438 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6440 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6441 if (target)
6442 return target;
6443 break;
6445 case BUILT_IN_LOCK_TEST_AND_SET_1:
6446 case BUILT_IN_LOCK_TEST_AND_SET_2:
6447 case BUILT_IN_LOCK_TEST_AND_SET_4:
6448 case BUILT_IN_LOCK_TEST_AND_SET_8:
6449 case BUILT_IN_LOCK_TEST_AND_SET_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6451 target = expand_builtin_lock_test_and_set (mode, exp, target);
6452 if (target)
6453 return target;
6454 break;
6456 case BUILT_IN_LOCK_RELEASE_1:
6457 case BUILT_IN_LOCK_RELEASE_2:
6458 case BUILT_IN_LOCK_RELEASE_4:
6459 case BUILT_IN_LOCK_RELEASE_8:
6460 case BUILT_IN_LOCK_RELEASE_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6462 expand_builtin_lock_release (mode, exp);
6463 return const0_rtx;
6465 case BUILT_IN_SYNCHRONIZE:
6466 expand_builtin_synchronize ();
6467 return const0_rtx;
6469 case BUILT_IN_OBJECT_SIZE:
6470 return expand_builtin_object_size (exp);
6472 case BUILT_IN_MEMCPY_CHK:
6473 case BUILT_IN_MEMPCPY_CHK:
6474 case BUILT_IN_MEMMOVE_CHK:
6475 case BUILT_IN_MEMSET_CHK:
6476 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6477 if (target)
6478 return target;
6479 break;
6481 case BUILT_IN_STRCPY_CHK:
6482 case BUILT_IN_STPCPY_CHK:
6483 case BUILT_IN_STRNCPY_CHK:
6484 case BUILT_IN_STRCAT_CHK:
6485 case BUILT_IN_STRNCAT_CHK:
6486 case BUILT_IN_SNPRINTF_CHK:
6487 case BUILT_IN_VSNPRINTF_CHK:
6488 maybe_emit_chk_warning (exp, fcode);
6489 break;
6491 case BUILT_IN_SPRINTF_CHK:
6492 case BUILT_IN_VSPRINTF_CHK:
6493 maybe_emit_sprintf_chk_warning (exp, fcode);
6494 break;
6496 case BUILT_IN_FREE:
6497 maybe_emit_free_warning (exp);
6498 break;
6500 default: /* just do library call, if unknown builtin */
6501 break;
6504 /* The switch statement above can drop through to cause the function
6505 to be called normally. */
6506 return expand_call (exp, target, ignore);
6509 /* Determine whether a tree node represents a call to a built-in
6510 function. If the tree T is a call to a built-in function with
6511 the right number of arguments of the appropriate types, return
6512 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6513 Otherwise the return value is END_BUILTINS. */
6515 enum built_in_function
6516 builtin_mathfn_code (const_tree t)
6518 const_tree fndecl, arg, parmlist;
6519 const_tree argtype, parmtype;
6520 const_call_expr_arg_iterator iter;
6522 if (TREE_CODE (t) != CALL_EXPR
6523 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6524 return END_BUILTINS;
6526 fndecl = get_callee_fndecl (t);
6527 if (fndecl == NULL_TREE
6528 || TREE_CODE (fndecl) != FUNCTION_DECL
6529 || ! DECL_BUILT_IN (fndecl)
6530 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6531 return END_BUILTINS;
6533 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6534 init_const_call_expr_arg_iterator (t, &iter);
6535 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6537 /* If a function doesn't take a variable number of arguments,
6538 the last element in the list will have type `void'. */
6539 parmtype = TREE_VALUE (parmlist);
6540 if (VOID_TYPE_P (parmtype))
6542 if (more_const_call_expr_args_p (&iter))
6543 return END_BUILTINS;
6544 return DECL_FUNCTION_CODE (fndecl);
6547 if (! more_const_call_expr_args_p (&iter))
6548 return END_BUILTINS;
6550 arg = next_const_call_expr_arg (&iter);
6551 argtype = TREE_TYPE (arg);
6553 if (SCALAR_FLOAT_TYPE_P (parmtype))
6555 if (! SCALAR_FLOAT_TYPE_P (argtype))
6556 return END_BUILTINS;
6558 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6560 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6561 return END_BUILTINS;
6563 else if (POINTER_TYPE_P (parmtype))
6565 if (! POINTER_TYPE_P (argtype))
6566 return END_BUILTINS;
6568 else if (INTEGRAL_TYPE_P (parmtype))
6570 if (! INTEGRAL_TYPE_P (argtype))
6571 return END_BUILTINS;
6573 else
6574 return END_BUILTINS;
6577 /* Variable-length argument list. */
6578 return DECL_FUNCTION_CODE (fndecl);
6581 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6582 evaluate to a constant. */
6584 static tree
6585 fold_builtin_constant_p (tree arg)
6587 /* We return 1 for a numeric type that's known to be a constant
6588 value at compile-time or for an aggregate type that's a
6589 literal constant. */
6590 STRIP_NOPS (arg);
6592 /* If we know this is a constant, emit the constant of one. */
6593 if (CONSTANT_CLASS_P (arg)
6594 || (TREE_CODE (arg) == CONSTRUCTOR
6595 && TREE_CONSTANT (arg)))
6596 return integer_one_node;
6597 if (TREE_CODE (arg) == ADDR_EXPR)
6599 tree op = TREE_OPERAND (arg, 0);
6600 if (TREE_CODE (op) == STRING_CST
6601 || (TREE_CODE (op) == ARRAY_REF
6602 && integer_zerop (TREE_OPERAND (op, 1))
6603 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6604 return integer_one_node;
6607 /* If this expression has side effects, show we don't know it to be a
6608 constant. Likewise if it's a pointer or aggregate type since in
6609 those case we only want literals, since those are only optimized
6610 when generating RTL, not later.
6611 And finally, if we are compiling an initializer, not code, we
6612 need to return a definite result now; there's not going to be any
6613 more optimization done. */
6614 if (TREE_SIDE_EFFECTS (arg)
6615 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6616 || POINTER_TYPE_P (TREE_TYPE (arg))
6617 || cfun == 0
6618 || folding_initializer)
6619 return integer_zero_node;
6621 return NULL_TREE;
6624 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6625 return it as a truthvalue. */
6627 static tree
6628 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6630 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6632 fn = built_in_decls[BUILT_IN_EXPECT];
6633 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6634 ret_type = TREE_TYPE (TREE_TYPE (fn));
6635 pred_type = TREE_VALUE (arg_types);
6636 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6638 pred = fold_convert_loc (loc, pred_type, pred);
6639 expected = fold_convert_loc (loc, expected_type, expected);
6640 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6642 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6643 build_int_cst (ret_type, 0));
6646 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6647 NULL_TREE if no simplification is possible. */
6649 static tree
6650 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6652 tree inner, fndecl;
6653 enum tree_code code;
6655 /* If this is a builtin_expect within a builtin_expect keep the
6656 inner one. See through a comparison against a constant. It
6657 might have been added to create a thruthvalue. */
6658 inner = arg0;
6659 if (COMPARISON_CLASS_P (inner)
6660 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6661 inner = TREE_OPERAND (inner, 0);
6663 if (TREE_CODE (inner) == CALL_EXPR
6664 && (fndecl = get_callee_fndecl (inner))
6665 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6666 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6667 return arg0;
6669 /* Distribute the expected value over short-circuiting operators.
6670 See through the cast from truthvalue_type_node to long. */
6671 inner = arg0;
6672 while (TREE_CODE (inner) == NOP_EXPR
6673 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6674 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6675 inner = TREE_OPERAND (inner, 0);
6677 code = TREE_CODE (inner);
6678 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6680 tree op0 = TREE_OPERAND (inner, 0);
6681 tree op1 = TREE_OPERAND (inner, 1);
6683 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6684 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6685 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6687 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6690 /* If the argument isn't invariant then there's nothing else we can do. */
6691 if (!TREE_CONSTANT (arg0))
6692 return NULL_TREE;
6694 /* If we expect that a comparison against the argument will fold to
6695 a constant return the constant. In practice, this means a true
6696 constant or the address of a non-weak symbol. */
6697 inner = arg0;
6698 STRIP_NOPS (inner);
6699 if (TREE_CODE (inner) == ADDR_EXPR)
6703 inner = TREE_OPERAND (inner, 0);
6705 while (TREE_CODE (inner) == COMPONENT_REF
6706 || TREE_CODE (inner) == ARRAY_REF);
6707 if ((TREE_CODE (inner) == VAR_DECL
6708 || TREE_CODE (inner) == FUNCTION_DECL)
6709 && DECL_WEAK (inner))
6710 return NULL_TREE;
6713 /* Otherwise, ARG0 already has the proper type for the return value. */
6714 return arg0;
6717 /* Fold a call to __builtin_classify_type with argument ARG. */
6719 static tree
6720 fold_builtin_classify_type (tree arg)
6722 if (arg == 0)
6723 return build_int_cst (NULL_TREE, no_type_class);
6725 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6728 /* Fold a call to __builtin_strlen with argument ARG. */
6730 static tree
6731 fold_builtin_strlen (location_t loc, tree type, tree arg)
6733 if (!validate_arg (arg, POINTER_TYPE))
6734 return NULL_TREE;
6735 else
6737 tree len = c_strlen (arg, 0);
6739 if (len)
6740 return fold_convert_loc (loc, type, len);
6742 return NULL_TREE;
6746 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6748 static tree
6749 fold_builtin_inf (location_t loc, tree type, int warn)
6751 REAL_VALUE_TYPE real;
6753 /* __builtin_inff is intended to be usable to define INFINITY on all
6754 targets. If an infinity is not available, INFINITY expands "to a
6755 positive constant of type float that overflows at translation
6756 time", footnote "In this case, using INFINITY will violate the
6757 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6758 Thus we pedwarn to ensure this constraint violation is
6759 diagnosed. */
6760 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6761 pedwarn (loc, 0, "target format does not support infinity");
6763 real_inf (&real);
6764 return build_real (type, real);
6767 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6769 static tree
6770 fold_builtin_nan (tree arg, tree type, int quiet)
6772 REAL_VALUE_TYPE real;
6773 const char *str;
6775 if (!validate_arg (arg, POINTER_TYPE))
6776 return NULL_TREE;
6777 str = c_getstr (arg);
6778 if (!str)
6779 return NULL_TREE;
6781 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6782 return NULL_TREE;
6784 return build_real (type, real);
6787 /* Return true if the floating point expression T has an integer value.
6788 We also allow +Inf, -Inf and NaN to be considered integer values. */
6790 static bool
6791 integer_valued_real_p (tree t)
6793 switch (TREE_CODE (t))
6795 case FLOAT_EXPR:
6796 return true;
6798 case ABS_EXPR:
6799 case SAVE_EXPR:
6800 return integer_valued_real_p (TREE_OPERAND (t, 0));
6802 case COMPOUND_EXPR:
6803 case MODIFY_EXPR:
6804 case BIND_EXPR:
6805 return integer_valued_real_p (TREE_OPERAND (t, 1));
6807 case PLUS_EXPR:
6808 case MINUS_EXPR:
6809 case MULT_EXPR:
6810 case MIN_EXPR:
6811 case MAX_EXPR:
6812 return integer_valued_real_p (TREE_OPERAND (t, 0))
6813 && integer_valued_real_p (TREE_OPERAND (t, 1));
6815 case COND_EXPR:
6816 return integer_valued_real_p (TREE_OPERAND (t, 1))
6817 && integer_valued_real_p (TREE_OPERAND (t, 2));
6819 case REAL_CST:
6820 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6822 case NOP_EXPR:
6824 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6825 if (TREE_CODE (type) == INTEGER_TYPE)
6826 return true;
6827 if (TREE_CODE (type) == REAL_TYPE)
6828 return integer_valued_real_p (TREE_OPERAND (t, 0));
6829 break;
6832 case CALL_EXPR:
6833 switch (builtin_mathfn_code (t))
6835 CASE_FLT_FN (BUILT_IN_CEIL):
6836 CASE_FLT_FN (BUILT_IN_FLOOR):
6837 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6838 CASE_FLT_FN (BUILT_IN_RINT):
6839 CASE_FLT_FN (BUILT_IN_ROUND):
6840 CASE_FLT_FN (BUILT_IN_TRUNC):
6841 return true;
6843 CASE_FLT_FN (BUILT_IN_FMIN):
6844 CASE_FLT_FN (BUILT_IN_FMAX):
6845 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6846 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6848 default:
6849 break;
6851 break;
6853 default:
6854 break;
6856 return false;
6859 /* FNDECL is assumed to be a builtin where truncation can be propagated
6860 across (for instance floor((double)f) == (double)floorf (f).
6861 Do the transformation for a call with argument ARG. */
6863 static tree
6864 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6866 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6868 if (!validate_arg (arg, REAL_TYPE))
6869 return NULL_TREE;
6871 /* Integer rounding functions are idempotent. */
6872 if (fcode == builtin_mathfn_code (arg))
6873 return arg;
6875 /* If argument is already integer valued, and we don't need to worry
6876 about setting errno, there's no need to perform rounding. */
6877 if (! flag_errno_math && integer_valued_real_p (arg))
6878 return arg;
6880 if (optimize)
6882 tree arg0 = strip_float_extensions (arg);
6883 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6884 tree newtype = TREE_TYPE (arg0);
6885 tree decl;
6887 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6888 && (decl = mathfn_built_in (newtype, fcode)))
6889 return fold_convert_loc (loc, ftype,
6890 build_call_expr_loc (loc, decl, 1,
6891 fold_convert_loc (loc,
6892 newtype,
6893 arg0)));
6895 return NULL_TREE;
6898 /* FNDECL is assumed to be builtin which can narrow the FP type of
6899 the argument, for instance lround((double)f) -> lroundf (f).
6900 Do the transformation for a call with argument ARG. */
6902 static tree
6903 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6905 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6907 if (!validate_arg (arg, REAL_TYPE))
6908 return NULL_TREE;
6910 /* If argument is already integer valued, and we don't need to worry
6911 about setting errno, there's no need to perform rounding. */
6912 if (! flag_errno_math && integer_valued_real_p (arg))
6913 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6914 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6916 if (optimize)
6918 tree ftype = TREE_TYPE (arg);
6919 tree arg0 = strip_float_extensions (arg);
6920 tree newtype = TREE_TYPE (arg0);
6921 tree decl;
6923 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6924 && (decl = mathfn_built_in (newtype, fcode)))
6925 return build_call_expr_loc (loc, decl, 1,
6926 fold_convert_loc (loc, newtype, arg0));
6929 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6930 sizeof (long long) == sizeof (long). */
6931 if (TYPE_PRECISION (long_long_integer_type_node)
6932 == TYPE_PRECISION (long_integer_type_node))
6934 tree newfn = NULL_TREE;
6935 switch (fcode)
6937 CASE_FLT_FN (BUILT_IN_LLCEIL):
6938 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6939 break;
6941 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6942 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6943 break;
6945 CASE_FLT_FN (BUILT_IN_LLROUND):
6946 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6947 break;
6949 CASE_FLT_FN (BUILT_IN_LLRINT):
6950 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6951 break;
6953 default:
6954 break;
6957 if (newfn)
6959 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6960 return fold_convert_loc (loc,
6961 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6965 return NULL_TREE;
6968 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6969 return type. Return NULL_TREE if no simplification can be made. */
6971 static tree
6972 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6974 tree res;
6976 if (!validate_arg (arg, COMPLEX_TYPE)
6977 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6978 return NULL_TREE;
6980 /* Calculate the result when the argument is a constant. */
6981 if (TREE_CODE (arg) == COMPLEX_CST
6982 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6983 type, mpfr_hypot)))
6984 return res;
6986 if (TREE_CODE (arg) == COMPLEX_EXPR)
6988 tree real = TREE_OPERAND (arg, 0);
6989 tree imag = TREE_OPERAND (arg, 1);
6991 /* If either part is zero, cabs is fabs of the other. */
6992 if (real_zerop (real))
6993 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6994 if (real_zerop (imag))
6995 return fold_build1_loc (loc, ABS_EXPR, type, real);
6997 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6998 if (flag_unsafe_math_optimizations
6999 && operand_equal_p (real, imag, OEP_PURE_SAME))
7001 const REAL_VALUE_TYPE sqrt2_trunc
7002 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7003 STRIP_NOPS (real);
7004 return fold_build2_loc (loc, MULT_EXPR, type,
7005 fold_build1_loc (loc, ABS_EXPR, type, real),
7006 build_real (type, sqrt2_trunc));
7010 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7011 if (TREE_CODE (arg) == NEGATE_EXPR
7012 || TREE_CODE (arg) == CONJ_EXPR)
7013 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7015 /* Don't do this when optimizing for size. */
7016 if (flag_unsafe_math_optimizations
7017 && optimize && optimize_function_for_speed_p (cfun))
7019 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7021 if (sqrtfn != NULL_TREE)
7023 tree rpart, ipart, result;
7025 arg = builtin_save_expr (arg);
7027 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7028 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7030 rpart = builtin_save_expr (rpart);
7031 ipart = builtin_save_expr (ipart);
7033 result = fold_build2_loc (loc, PLUS_EXPR, type,
7034 fold_build2_loc (loc, MULT_EXPR, type,
7035 rpart, rpart),
7036 fold_build2_loc (loc, MULT_EXPR, type,
7037 ipart, ipart));
7039 return build_call_expr_loc (loc, sqrtfn, 1, result);
7043 return NULL_TREE;
7046 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7047 complex tree type of the result. If NEG is true, the imaginary
7048 zero is negative. */
7050 static tree
7051 build_complex_cproj (tree type, bool neg)
7053 REAL_VALUE_TYPE rinf, rzero = dconst0;
7055 real_inf (&rinf);
7056 rzero.sign = neg;
7057 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7058 build_real (TREE_TYPE (type), rzero));
7061 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7062 return type. Return NULL_TREE if no simplification can be made. */
7064 static tree
7065 fold_builtin_cproj (location_t loc, tree arg, tree type)
7067 if (!validate_arg (arg, COMPLEX_TYPE)
7068 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7069 return NULL_TREE;
7071 /* If there are no infinities, return arg. */
7072 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7073 return non_lvalue_loc (loc, arg);
7075 /* Calculate the result when the argument is a constant. */
7076 if (TREE_CODE (arg) == COMPLEX_CST)
7078 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7079 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7081 if (real_isinf (real) || real_isinf (imag))
7082 return build_complex_cproj (type, imag->sign);
7083 else
7084 return arg;
7086 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7088 tree real = TREE_OPERAND (arg, 0);
7089 tree imag = TREE_OPERAND (arg, 1);
7091 STRIP_NOPS (real);
7092 STRIP_NOPS (imag);
7094 /* If the real part is inf and the imag part is known to be
7095 nonnegative, return (inf + 0i). Remember side-effects are
7096 possible in the imag part. */
7097 if (TREE_CODE (real) == REAL_CST
7098 && real_isinf (TREE_REAL_CST_PTR (real))
7099 && tree_expr_nonnegative_p (imag))
7100 return omit_one_operand_loc (loc, type,
7101 build_complex_cproj (type, false),
7102 arg);
7104 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7105 Remember side-effects are possible in the real part. */
7106 if (TREE_CODE (imag) == REAL_CST
7107 && real_isinf (TREE_REAL_CST_PTR (imag)))
7108 return
7109 omit_one_operand_loc (loc, type,
7110 build_complex_cproj (type, TREE_REAL_CST_PTR
7111 (imag)->sign), arg);
7114 return NULL_TREE;
7117 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7118 Return NULL_TREE if no simplification can be made. */
7120 static tree
7121 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7124 enum built_in_function fcode;
7125 tree res;
7127 if (!validate_arg (arg, REAL_TYPE))
7128 return NULL_TREE;
7130 /* Calculate the result when the argument is a constant. */
7131 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7132 return res;
7134 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7135 fcode = builtin_mathfn_code (arg);
7136 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7138 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7139 arg = fold_build2_loc (loc, MULT_EXPR, type,
7140 CALL_EXPR_ARG (arg, 0),
7141 build_real (type, dconsthalf));
7142 return build_call_expr_loc (loc, expfn, 1, arg);
7145 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7146 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7148 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7150 if (powfn)
7152 tree arg0 = CALL_EXPR_ARG (arg, 0);
7153 tree tree_root;
7154 /* The inner root was either sqrt or cbrt. */
7155 /* This was a conditional expression but it triggered a bug
7156 in Sun C 5.5. */
7157 REAL_VALUE_TYPE dconstroot;
7158 if (BUILTIN_SQRT_P (fcode))
7159 dconstroot = dconsthalf;
7160 else
7161 dconstroot = dconst_third ();
7163 /* Adjust for the outer root. */
7164 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7165 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7166 tree_root = build_real (type, dconstroot);
7167 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7171 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7172 if (flag_unsafe_math_optimizations
7173 && (fcode == BUILT_IN_POW
7174 || fcode == BUILT_IN_POWF
7175 || fcode == BUILT_IN_POWL))
7177 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7178 tree arg0 = CALL_EXPR_ARG (arg, 0);
7179 tree arg1 = CALL_EXPR_ARG (arg, 1);
7180 tree narg1;
7181 if (!tree_expr_nonnegative_p (arg0))
7182 arg0 = build1 (ABS_EXPR, type, arg0);
7183 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7184 build_real (type, dconsthalf));
7185 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7188 return NULL_TREE;
7191 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7192 Return NULL_TREE if no simplification can be made. */
7194 static tree
7195 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7197 const enum built_in_function fcode = builtin_mathfn_code (arg);
7198 tree res;
7200 if (!validate_arg (arg, REAL_TYPE))
7201 return NULL_TREE;
7203 /* Calculate the result when the argument is a constant. */
7204 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7205 return res;
7207 if (flag_unsafe_math_optimizations)
7209 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7210 if (BUILTIN_EXPONENT_P (fcode))
7212 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7213 const REAL_VALUE_TYPE third_trunc =
7214 real_value_truncate (TYPE_MODE (type), dconst_third ());
7215 arg = fold_build2_loc (loc, MULT_EXPR, type,
7216 CALL_EXPR_ARG (arg, 0),
7217 build_real (type, third_trunc));
7218 return build_call_expr_loc (loc, expfn, 1, arg);
7221 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7222 if (BUILTIN_SQRT_P (fcode))
7224 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7226 if (powfn)
7228 tree arg0 = CALL_EXPR_ARG (arg, 0);
7229 tree tree_root;
7230 REAL_VALUE_TYPE dconstroot = dconst_third ();
7232 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7233 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7234 tree_root = build_real (type, dconstroot);
7235 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7239 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7240 if (BUILTIN_CBRT_P (fcode))
7242 tree arg0 = CALL_EXPR_ARG (arg, 0);
7243 if (tree_expr_nonnegative_p (arg0))
7245 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7247 if (powfn)
7249 tree tree_root;
7250 REAL_VALUE_TYPE dconstroot;
7252 real_arithmetic (&dconstroot, MULT_EXPR,
7253 dconst_third_ptr (), dconst_third_ptr ());
7254 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7255 tree_root = build_real (type, dconstroot);
7256 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7261 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7262 if (fcode == BUILT_IN_POW
7263 || fcode == BUILT_IN_POWF
7264 || fcode == BUILT_IN_POWL)
7266 tree arg00 = CALL_EXPR_ARG (arg, 0);
7267 tree arg01 = CALL_EXPR_ARG (arg, 1);
7268 if (tree_expr_nonnegative_p (arg00))
7270 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7271 const REAL_VALUE_TYPE dconstroot
7272 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7273 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7274 build_real (type, dconstroot));
7275 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7279 return NULL_TREE;
7282 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7283 TYPE is the type of the return value. Return NULL_TREE if no
7284 simplification can be made. */
7286 static tree
7287 fold_builtin_cos (location_t loc,
7288 tree arg, tree type, tree fndecl)
7290 tree res, narg;
7292 if (!validate_arg (arg, REAL_TYPE))
7293 return NULL_TREE;
7295 /* Calculate the result when the argument is a constant. */
7296 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7297 return res;
7299 /* Optimize cos(-x) into cos (x). */
7300 if ((narg = fold_strip_sign_ops (arg)))
7301 return build_call_expr_loc (loc, fndecl, 1, narg);
7303 return NULL_TREE;
7306 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7307 Return NULL_TREE if no simplification can be made. */
7309 static tree
7310 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7312 if (validate_arg (arg, REAL_TYPE))
7314 tree res, narg;
7316 /* Calculate the result when the argument is a constant. */
7317 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7318 return res;
7320 /* Optimize cosh(-x) into cosh (x). */
7321 if ((narg = fold_strip_sign_ops (arg)))
7322 return build_call_expr_loc (loc, fndecl, 1, narg);
7325 return NULL_TREE;
7328 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7329 argument ARG. TYPE is the type of the return value. Return
7330 NULL_TREE if no simplification can be made. */
7332 static tree
7333 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7334 bool hyper)
7336 if (validate_arg (arg, COMPLEX_TYPE)
7337 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7339 tree tmp;
7341 /* Calculate the result when the argument is a constant. */
7342 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7343 return tmp;
7345 /* Optimize fn(-x) into fn(x). */
7346 if ((tmp = fold_strip_sign_ops (arg)))
7347 return build_call_expr_loc (loc, fndecl, 1, tmp);
7350 return NULL_TREE;
7353 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7354 Return NULL_TREE if no simplification can be made. */
7356 static tree
7357 fold_builtin_tan (tree arg, tree type)
7359 enum built_in_function fcode;
7360 tree res;
7362 if (!validate_arg (arg, REAL_TYPE))
7363 return NULL_TREE;
7365 /* Calculate the result when the argument is a constant. */
7366 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7367 return res;
7369 /* Optimize tan(atan(x)) = x. */
7370 fcode = builtin_mathfn_code (arg);
7371 if (flag_unsafe_math_optimizations
7372 && (fcode == BUILT_IN_ATAN
7373 || fcode == BUILT_IN_ATANF
7374 || fcode == BUILT_IN_ATANL))
7375 return CALL_EXPR_ARG (arg, 0);
7377 return NULL_TREE;
7380 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7381 NULL_TREE if no simplification can be made. */
7383 static tree
7384 fold_builtin_sincos (location_t loc,
7385 tree arg0, tree arg1, tree arg2)
7387 tree type;
7388 tree res, fn, call;
7390 if (!validate_arg (arg0, REAL_TYPE)
7391 || !validate_arg (arg1, POINTER_TYPE)
7392 || !validate_arg (arg2, POINTER_TYPE))
7393 return NULL_TREE;
7395 type = TREE_TYPE (arg0);
7397 /* Calculate the result when the argument is a constant. */
7398 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7399 return res;
7401 /* Canonicalize sincos to cexpi. */
7402 if (!TARGET_C99_FUNCTIONS)
7403 return NULL_TREE;
7404 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7405 if (!fn)
7406 return NULL_TREE;
7408 call = build_call_expr_loc (loc, fn, 1, arg0);
7409 call = builtin_save_expr (call);
7411 return build2 (COMPOUND_EXPR, void_type_node,
7412 build2 (MODIFY_EXPR, void_type_node,
7413 build_fold_indirect_ref_loc (loc, arg1),
7414 build1 (IMAGPART_EXPR, type, call)),
7415 build2 (MODIFY_EXPR, void_type_node,
7416 build_fold_indirect_ref_loc (loc, arg2),
7417 build1 (REALPART_EXPR, type, call)));
7420 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7421 NULL_TREE if no simplification can be made. */
7423 static tree
7424 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7426 tree rtype;
7427 tree realp, imagp, ifn;
7428 tree res;
7430 if (!validate_arg (arg0, COMPLEX_TYPE)
7431 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7432 return NULL_TREE;
7434 /* Calculate the result when the argument is a constant. */
7435 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7436 return res;
7438 rtype = TREE_TYPE (TREE_TYPE (arg0));
7440 /* In case we can figure out the real part of arg0 and it is constant zero
7441 fold to cexpi. */
7442 if (!TARGET_C99_FUNCTIONS)
7443 return NULL_TREE;
7444 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7445 if (!ifn)
7446 return NULL_TREE;
7448 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7449 && real_zerop (realp))
7451 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7452 return build_call_expr_loc (loc, ifn, 1, narg);
7455 /* In case we can easily decompose real and imaginary parts split cexp
7456 to exp (r) * cexpi (i). */
7457 if (flag_unsafe_math_optimizations
7458 && realp)
7460 tree rfn, rcall, icall;
7462 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7463 if (!rfn)
7464 return NULL_TREE;
7466 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7467 if (!imagp)
7468 return NULL_TREE;
7470 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7471 icall = builtin_save_expr (icall);
7472 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7473 rcall = builtin_save_expr (rcall);
7474 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7475 fold_build2_loc (loc, MULT_EXPR, rtype,
7476 rcall,
7477 fold_build1_loc (loc, REALPART_EXPR,
7478 rtype, icall)),
7479 fold_build2_loc (loc, MULT_EXPR, rtype,
7480 rcall,
7481 fold_build1_loc (loc, IMAGPART_EXPR,
7482 rtype, icall)));
7485 return NULL_TREE;
7488 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7489 Return NULL_TREE if no simplification can be made. */
7491 static tree
7492 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7494 if (!validate_arg (arg, REAL_TYPE))
7495 return NULL_TREE;
7497 /* Optimize trunc of constant value. */
7498 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7500 REAL_VALUE_TYPE r, x;
7501 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7503 x = TREE_REAL_CST (arg);
7504 real_trunc (&r, TYPE_MODE (type), &x);
7505 return build_real (type, r);
7508 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7511 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7512 Return NULL_TREE if no simplification can be made. */
7514 static tree
7515 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7517 if (!validate_arg (arg, REAL_TYPE))
7518 return NULL_TREE;
7520 /* Optimize floor of constant value. */
7521 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7523 REAL_VALUE_TYPE x;
7525 x = TREE_REAL_CST (arg);
7526 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7528 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7529 REAL_VALUE_TYPE r;
7531 real_floor (&r, TYPE_MODE (type), &x);
7532 return build_real (type, r);
7536 /* Fold floor (x) where x is nonnegative to trunc (x). */
7537 if (tree_expr_nonnegative_p (arg))
7539 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7540 if (truncfn)
7541 return build_call_expr_loc (loc, truncfn, 1, arg);
7544 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7547 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7548 Return NULL_TREE if no simplification can be made. */
7550 static tree
7551 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7553 if (!validate_arg (arg, REAL_TYPE))
7554 return NULL_TREE;
7556 /* Optimize ceil of constant value. */
7557 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7559 REAL_VALUE_TYPE x;
7561 x = TREE_REAL_CST (arg);
7562 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7564 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7565 REAL_VALUE_TYPE r;
7567 real_ceil (&r, TYPE_MODE (type), &x);
7568 return build_real (type, r);
7572 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7575 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7576 Return NULL_TREE if no simplification can be made. */
7578 static tree
7579 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7581 if (!validate_arg (arg, REAL_TYPE))
7582 return NULL_TREE;
7584 /* Optimize round of constant value. */
7585 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7587 REAL_VALUE_TYPE x;
7589 x = TREE_REAL_CST (arg);
7590 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7592 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7593 REAL_VALUE_TYPE r;
7595 real_round (&r, TYPE_MODE (type), &x);
7596 return build_real (type, r);
7600 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7603 /* Fold function call to builtin lround, lroundf or lroundl (or the
7604 corresponding long long versions) and other rounding functions. ARG
7605 is the argument to the call. Return NULL_TREE if no simplification
7606 can be made. */
7608 static tree
7609 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7611 if (!validate_arg (arg, REAL_TYPE))
7612 return NULL_TREE;
7614 /* Optimize lround of constant value. */
7615 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7617 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7619 if (real_isfinite (&x))
7621 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7622 tree ftype = TREE_TYPE (arg);
7623 unsigned HOST_WIDE_INT lo2;
7624 HOST_WIDE_INT hi, lo;
7625 REAL_VALUE_TYPE r;
7627 switch (DECL_FUNCTION_CODE (fndecl))
7629 CASE_FLT_FN (BUILT_IN_LFLOOR):
7630 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7631 real_floor (&r, TYPE_MODE (ftype), &x);
7632 break;
7634 CASE_FLT_FN (BUILT_IN_LCEIL):
7635 CASE_FLT_FN (BUILT_IN_LLCEIL):
7636 real_ceil (&r, TYPE_MODE (ftype), &x);
7637 break;
7639 CASE_FLT_FN (BUILT_IN_LROUND):
7640 CASE_FLT_FN (BUILT_IN_LLROUND):
7641 real_round (&r, TYPE_MODE (ftype), &x);
7642 break;
7644 default:
7645 gcc_unreachable ();
7648 REAL_VALUE_TO_INT (&lo, &hi, r);
7649 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7650 return build_int_cst_wide (itype, lo2, hi);
7654 switch (DECL_FUNCTION_CODE (fndecl))
7656 CASE_FLT_FN (BUILT_IN_LFLOOR):
7657 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7658 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7659 if (tree_expr_nonnegative_p (arg))
7660 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7661 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7662 break;
7663 default:;
7666 return fold_fixed_mathfn (loc, fndecl, arg);
7669 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7670 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7671 the argument to the call. Return NULL_TREE if no simplification can
7672 be made. */
7674 static tree
7675 fold_builtin_bitop (tree fndecl, tree arg)
7677 if (!validate_arg (arg, INTEGER_TYPE))
7678 return NULL_TREE;
7680 /* Optimize for constant argument. */
7681 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7683 HOST_WIDE_INT hi, width, result;
7684 unsigned HOST_WIDE_INT lo;
7685 tree type;
7687 type = TREE_TYPE (arg);
7688 width = TYPE_PRECISION (type);
7689 lo = TREE_INT_CST_LOW (arg);
7691 /* Clear all the bits that are beyond the type's precision. */
7692 if (width > HOST_BITS_PER_WIDE_INT)
7694 hi = TREE_INT_CST_HIGH (arg);
7695 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7696 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7698 else
7700 hi = 0;
7701 if (width < HOST_BITS_PER_WIDE_INT)
7702 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7705 switch (DECL_FUNCTION_CODE (fndecl))
7707 CASE_INT_FN (BUILT_IN_FFS):
7708 if (lo != 0)
7709 result = exact_log2 (lo & -lo) + 1;
7710 else if (hi != 0)
7711 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7712 else
7713 result = 0;
7714 break;
7716 CASE_INT_FN (BUILT_IN_CLZ):
7717 if (hi != 0)
7718 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7719 else if (lo != 0)
7720 result = width - floor_log2 (lo) - 1;
7721 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7722 result = width;
7723 break;
7725 CASE_INT_FN (BUILT_IN_CTZ):
7726 if (lo != 0)
7727 result = exact_log2 (lo & -lo);
7728 else if (hi != 0)
7729 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7730 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7731 result = width;
7732 break;
7734 CASE_INT_FN (BUILT_IN_POPCOUNT):
7735 result = 0;
7736 while (lo)
7737 result++, lo &= lo - 1;
7738 while (hi)
7739 result++, hi &= hi - 1;
7740 break;
7742 CASE_INT_FN (BUILT_IN_PARITY):
7743 result = 0;
7744 while (lo)
7745 result++, lo &= lo - 1;
7746 while (hi)
7747 result++, hi &= hi - 1;
7748 result &= 1;
7749 break;
7751 default:
7752 gcc_unreachable ();
7755 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7758 return NULL_TREE;
7761 /* Fold function call to builtin_bswap and the long and long long
7762 variants. Return NULL_TREE if no simplification can be made. */
7763 static tree
7764 fold_builtin_bswap (tree fndecl, tree arg)
7766 if (! validate_arg (arg, INTEGER_TYPE))
7767 return NULL_TREE;
7769 /* Optimize constant value. */
7770 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7772 HOST_WIDE_INT hi, width, r_hi = 0;
7773 unsigned HOST_WIDE_INT lo, r_lo = 0;
7774 tree type;
7776 type = TREE_TYPE (arg);
7777 width = TYPE_PRECISION (type);
7778 lo = TREE_INT_CST_LOW (arg);
7779 hi = TREE_INT_CST_HIGH (arg);
7781 switch (DECL_FUNCTION_CODE (fndecl))
7783 case BUILT_IN_BSWAP32:
7784 case BUILT_IN_BSWAP64:
7786 int s;
7788 for (s = 0; s < width; s += 8)
7790 int d = width - s - 8;
7791 unsigned HOST_WIDE_INT byte;
7793 if (s < HOST_BITS_PER_WIDE_INT)
7794 byte = (lo >> s) & 0xff;
7795 else
7796 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7798 if (d < HOST_BITS_PER_WIDE_INT)
7799 r_lo |= byte << d;
7800 else
7801 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7805 break;
7807 default:
7808 gcc_unreachable ();
7811 if (width < HOST_BITS_PER_WIDE_INT)
7812 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7813 else
7814 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7817 return NULL_TREE;
7820 /* A subroutine of fold_builtin to fold the various logarithmic
7821 functions. Return NULL_TREE if no simplification can me made.
7822 FUNC is the corresponding MPFR logarithm function. */
7824 static tree
7825 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7826 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7828 if (validate_arg (arg, REAL_TYPE))
7830 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7831 tree res;
7832 const enum built_in_function fcode = builtin_mathfn_code (arg);
7834 /* Calculate the result when the argument is a constant. */
7835 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7836 return res;
7838 /* Special case, optimize logN(expN(x)) = x. */
7839 if (flag_unsafe_math_optimizations
7840 && ((func == mpfr_log
7841 && (fcode == BUILT_IN_EXP
7842 || fcode == BUILT_IN_EXPF
7843 || fcode == BUILT_IN_EXPL))
7844 || (func == mpfr_log2
7845 && (fcode == BUILT_IN_EXP2
7846 || fcode == BUILT_IN_EXP2F
7847 || fcode == BUILT_IN_EXP2L))
7848 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7849 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7851 /* Optimize logN(func()) for various exponential functions. We
7852 want to determine the value "x" and the power "exponent" in
7853 order to transform logN(x**exponent) into exponent*logN(x). */
7854 if (flag_unsafe_math_optimizations)
7856 tree exponent = 0, x = 0;
7858 switch (fcode)
7860 CASE_FLT_FN (BUILT_IN_EXP):
7861 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7862 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7863 dconst_e ()));
7864 exponent = CALL_EXPR_ARG (arg, 0);
7865 break;
7866 CASE_FLT_FN (BUILT_IN_EXP2):
7867 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7868 x = build_real (type, dconst2);
7869 exponent = CALL_EXPR_ARG (arg, 0);
7870 break;
7871 CASE_FLT_FN (BUILT_IN_EXP10):
7872 CASE_FLT_FN (BUILT_IN_POW10):
7873 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7875 REAL_VALUE_TYPE dconst10;
7876 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7877 x = build_real (type, dconst10);
7879 exponent = CALL_EXPR_ARG (arg, 0);
7880 break;
7881 CASE_FLT_FN (BUILT_IN_SQRT):
7882 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7883 x = CALL_EXPR_ARG (arg, 0);
7884 exponent = build_real (type, dconsthalf);
7885 break;
7886 CASE_FLT_FN (BUILT_IN_CBRT):
7887 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7888 x = CALL_EXPR_ARG (arg, 0);
7889 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7890 dconst_third ()));
7891 break;
7892 CASE_FLT_FN (BUILT_IN_POW):
7893 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7894 x = CALL_EXPR_ARG (arg, 0);
7895 exponent = CALL_EXPR_ARG (arg, 1);
7896 break;
7897 default:
7898 break;
7901 /* Now perform the optimization. */
7902 if (x && exponent)
7904 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7905 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7910 return NULL_TREE;
7913 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7914 NULL_TREE if no simplification can be made. */
7916 static tree
7917 fold_builtin_hypot (location_t loc, tree fndecl,
7918 tree arg0, tree arg1, tree type)
7920 tree res, narg0, narg1;
7922 if (!validate_arg (arg0, REAL_TYPE)
7923 || !validate_arg (arg1, REAL_TYPE))
7924 return NULL_TREE;
7926 /* Calculate the result when the argument is a constant. */
7927 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7928 return res;
7930 /* If either argument to hypot has a negate or abs, strip that off.
7931 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7932 narg0 = fold_strip_sign_ops (arg0);
7933 narg1 = fold_strip_sign_ops (arg1);
7934 if (narg0 || narg1)
7936 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7937 narg1 ? narg1 : arg1);
7940 /* If either argument is zero, hypot is fabs of the other. */
7941 if (real_zerop (arg0))
7942 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7943 else if (real_zerop (arg1))
7944 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7946 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7947 if (flag_unsafe_math_optimizations
7948 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7950 const REAL_VALUE_TYPE sqrt2_trunc
7951 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7952 return fold_build2_loc (loc, MULT_EXPR, type,
7953 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7954 build_real (type, sqrt2_trunc));
7957 return NULL_TREE;
7961 /* Fold a builtin function call to pow, powf, or powl. Return
7962 NULL_TREE if no simplification can be made. */
7963 static tree
7964 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7966 tree res;
7968 if (!validate_arg (arg0, REAL_TYPE)
7969 || !validate_arg (arg1, REAL_TYPE))
7970 return NULL_TREE;
7972 /* Calculate the result when the argument is a constant. */
7973 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7974 return res;
7976 /* Optimize pow(1.0,y) = 1.0. */
7977 if (real_onep (arg0))
7978 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7980 if (TREE_CODE (arg1) == REAL_CST
7981 && !TREE_OVERFLOW (arg1))
7983 REAL_VALUE_TYPE cint;
7984 REAL_VALUE_TYPE c;
7985 HOST_WIDE_INT n;
7987 c = TREE_REAL_CST (arg1);
7989 /* Optimize pow(x,0.0) = 1.0. */
7990 if (REAL_VALUES_EQUAL (c, dconst0))
7991 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7992 arg0);
7994 /* Optimize pow(x,1.0) = x. */
7995 if (REAL_VALUES_EQUAL (c, dconst1))
7996 return arg0;
7998 /* Optimize pow(x,-1.0) = 1.0/x. */
7999 if (REAL_VALUES_EQUAL (c, dconstm1))
8000 return fold_build2_loc (loc, RDIV_EXPR, type,
8001 build_real (type, dconst1), arg0);
8003 /* Optimize pow(x,0.5) = sqrt(x). */
8004 if (flag_unsafe_math_optimizations
8005 && REAL_VALUES_EQUAL (c, dconsthalf))
8007 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8009 if (sqrtfn != NULL_TREE)
8010 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8013 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8014 if (flag_unsafe_math_optimizations)
8016 const REAL_VALUE_TYPE dconstroot
8017 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8019 if (REAL_VALUES_EQUAL (c, dconstroot))
8021 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8022 if (cbrtfn != NULL_TREE)
8023 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8027 /* Check for an integer exponent. */
8028 n = real_to_integer (&c);
8029 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8030 if (real_identical (&c, &cint))
8032 /* Attempt to evaluate pow at compile-time, unless this should
8033 raise an exception. */
8034 if (TREE_CODE (arg0) == REAL_CST
8035 && !TREE_OVERFLOW (arg0)
8036 && (n > 0
8037 || (!flag_trapping_math && !flag_errno_math)
8038 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8040 REAL_VALUE_TYPE x;
8041 bool inexact;
8043 x = TREE_REAL_CST (arg0);
8044 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8045 if (flag_unsafe_math_optimizations || !inexact)
8046 return build_real (type, x);
8049 /* Strip sign ops from even integer powers. */
8050 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8052 tree narg0 = fold_strip_sign_ops (arg0);
8053 if (narg0)
8054 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8059 if (flag_unsafe_math_optimizations)
8061 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8063 /* Optimize pow(expN(x),y) = expN(x*y). */
8064 if (BUILTIN_EXPONENT_P (fcode))
8066 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8067 tree arg = CALL_EXPR_ARG (arg0, 0);
8068 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8069 return build_call_expr_loc (loc, expfn, 1, arg);
8072 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8073 if (BUILTIN_SQRT_P (fcode))
8075 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8076 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8077 build_real (type, dconsthalf));
8078 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8081 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8082 if (BUILTIN_CBRT_P (fcode))
8084 tree arg = CALL_EXPR_ARG (arg0, 0);
8085 if (tree_expr_nonnegative_p (arg))
8087 const REAL_VALUE_TYPE dconstroot
8088 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8089 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8090 build_real (type, dconstroot));
8091 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8095 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8096 if (fcode == BUILT_IN_POW
8097 || fcode == BUILT_IN_POWF
8098 || fcode == BUILT_IN_POWL)
8100 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8101 if (tree_expr_nonnegative_p (arg00))
8103 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8104 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8105 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8110 return NULL_TREE;
8113 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8114 Return NULL_TREE if no simplification can be made. */
8115 static tree
8116 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8117 tree arg0, tree arg1, tree type)
8119 if (!validate_arg (arg0, REAL_TYPE)
8120 || !validate_arg (arg1, INTEGER_TYPE))
8121 return NULL_TREE;
8123 /* Optimize pow(1.0,y) = 1.0. */
8124 if (real_onep (arg0))
8125 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8127 if (host_integerp (arg1, 0))
8129 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8131 /* Evaluate powi at compile-time. */
8132 if (TREE_CODE (arg0) == REAL_CST
8133 && !TREE_OVERFLOW (arg0))
8135 REAL_VALUE_TYPE x;
8136 x = TREE_REAL_CST (arg0);
8137 real_powi (&x, TYPE_MODE (type), &x, c);
8138 return build_real (type, x);
8141 /* Optimize pow(x,0) = 1.0. */
8142 if (c == 0)
8143 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8144 arg0);
8146 /* Optimize pow(x,1) = x. */
8147 if (c == 1)
8148 return arg0;
8150 /* Optimize pow(x,-1) = 1.0/x. */
8151 if (c == -1)
8152 return fold_build2_loc (loc, RDIV_EXPR, type,
8153 build_real (type, dconst1), arg0);
8156 return NULL_TREE;
8159 /* A subroutine of fold_builtin to fold the various exponent
8160 functions. Return NULL_TREE if no simplification can be made.
8161 FUNC is the corresponding MPFR exponent function. */
8163 static tree
8164 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8165 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8167 if (validate_arg (arg, REAL_TYPE))
8169 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8170 tree res;
8172 /* Calculate the result when the argument is a constant. */
8173 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8174 return res;
8176 /* Optimize expN(logN(x)) = x. */
8177 if (flag_unsafe_math_optimizations)
8179 const enum built_in_function fcode = builtin_mathfn_code (arg);
8181 if ((func == mpfr_exp
8182 && (fcode == BUILT_IN_LOG
8183 || fcode == BUILT_IN_LOGF
8184 || fcode == BUILT_IN_LOGL))
8185 || (func == mpfr_exp2
8186 && (fcode == BUILT_IN_LOG2
8187 || fcode == BUILT_IN_LOG2F
8188 || fcode == BUILT_IN_LOG2L))
8189 || (func == mpfr_exp10
8190 && (fcode == BUILT_IN_LOG10
8191 || fcode == BUILT_IN_LOG10F
8192 || fcode == BUILT_IN_LOG10L)))
8193 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8197 return NULL_TREE;
8200 /* Return true if VAR is a VAR_DECL or a component thereof. */
8202 static bool
8203 var_decl_component_p (tree var)
8205 tree inner = var;
8206 while (handled_component_p (inner))
8207 inner = TREE_OPERAND (inner, 0);
8208 return SSA_VAR_P (inner);
8211 /* Fold function call to builtin memset. Return
8212 NULL_TREE if no simplification can be made. */
8214 static tree
8215 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8216 tree type, bool ignore)
8218 tree var, ret, etype;
8219 unsigned HOST_WIDE_INT length, cval;
8221 if (! validate_arg (dest, POINTER_TYPE)
8222 || ! validate_arg (c, INTEGER_TYPE)
8223 || ! validate_arg (len, INTEGER_TYPE))
8224 return NULL_TREE;
8226 if (! host_integerp (len, 1))
8227 return NULL_TREE;
8229 /* If the LEN parameter is zero, return DEST. */
8230 if (integer_zerop (len))
8231 return omit_one_operand_loc (loc, type, dest, c);
8233 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8234 return NULL_TREE;
8236 var = dest;
8237 STRIP_NOPS (var);
8238 if (TREE_CODE (var) != ADDR_EXPR)
8239 return NULL_TREE;
8241 var = TREE_OPERAND (var, 0);
8242 if (TREE_THIS_VOLATILE (var))
8243 return NULL_TREE;
8245 etype = TREE_TYPE (var);
8246 if (TREE_CODE (etype) == ARRAY_TYPE)
8247 etype = TREE_TYPE (etype);
8249 if (!INTEGRAL_TYPE_P (etype)
8250 && !POINTER_TYPE_P (etype))
8251 return NULL_TREE;
8253 if (! var_decl_component_p (var))
8254 return NULL_TREE;
8256 length = tree_low_cst (len, 1);
8257 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8258 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8259 < (int) length)
8260 return NULL_TREE;
8262 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8263 return NULL_TREE;
8265 if (integer_zerop (c))
8266 cval = 0;
8267 else
8269 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8270 return NULL_TREE;
8272 cval = tree_low_cst (c, 1);
8273 cval &= 0xff;
8274 cval |= cval << 8;
8275 cval |= cval << 16;
8276 cval |= (cval << 31) << 1;
8279 ret = build_int_cst_type (etype, cval);
8280 var = build_fold_indirect_ref_loc (loc,
8281 fold_convert_loc (loc,
8282 build_pointer_type (etype),
8283 dest));
8284 ret = build2 (MODIFY_EXPR, etype, var, ret);
8285 if (ignore)
8286 return ret;
8288 return omit_one_operand_loc (loc, type, dest, ret);
8291 /* Fold function call to builtin memset. Return
8292 NULL_TREE if no simplification can be made. */
8294 static tree
8295 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8297 if (! validate_arg (dest, POINTER_TYPE)
8298 || ! validate_arg (size, INTEGER_TYPE))
8299 return NULL_TREE;
8301 if (!ignore)
8302 return NULL_TREE;
8304 /* New argument list transforming bzero(ptr x, int y) to
8305 memset(ptr x, int 0, size_t y). This is done this way
8306 so that if it isn't expanded inline, we fallback to
8307 calling bzero instead of memset. */
8309 return fold_builtin_memset (loc, dest, integer_zero_node,
8310 fold_convert_loc (loc, sizetype, size),
8311 void_type_node, ignore);
8314 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8315 NULL_TREE if no simplification can be made.
8316 If ENDP is 0, return DEST (like memcpy).
8317 If ENDP is 1, return DEST+LEN (like mempcpy).
8318 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8319 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8320 (memmove). */
8322 static tree
8323 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8324 tree len, tree type, bool ignore, int endp)
8326 tree destvar, srcvar, expr;
8328 if (! validate_arg (dest, POINTER_TYPE)
8329 || ! validate_arg (src, POINTER_TYPE)
8330 || ! validate_arg (len, INTEGER_TYPE))
8331 return NULL_TREE;
8333 /* If the LEN parameter is zero, return DEST. */
8334 if (integer_zerop (len))
8335 return omit_one_operand_loc (loc, type, dest, src);
8337 /* If SRC and DEST are the same (and not volatile), return
8338 DEST{,+LEN,+LEN-1}. */
8339 if (operand_equal_p (src, dest, 0))
8340 expr = len;
8341 else
8343 tree srctype, desttype;
8344 int src_align, dest_align;
8346 if (endp == 3)
8348 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8349 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8351 /* Both DEST and SRC must be pointer types.
8352 ??? This is what old code did. Is the testing for pointer types
8353 really mandatory?
8355 If either SRC is readonly or length is 1, we can use memcpy. */
8356 if (!dest_align || !src_align)
8357 return NULL_TREE;
8358 if (readonly_data_expr (src)
8359 || (host_integerp (len, 1)
8360 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8361 >= tree_low_cst (len, 1))))
8363 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8364 if (!fn)
8365 return NULL_TREE;
8366 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8369 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8370 srcvar = build_fold_indirect_ref_loc (loc, src);
8371 destvar = build_fold_indirect_ref_loc (loc, dest);
8372 if (srcvar
8373 && !TREE_THIS_VOLATILE (srcvar)
8374 && destvar
8375 && !TREE_THIS_VOLATILE (destvar))
8377 tree src_base, dest_base, fn;
8378 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8379 HOST_WIDE_INT size = -1;
8380 HOST_WIDE_INT maxsize = -1;
8382 src_base = srcvar;
8383 if (handled_component_p (src_base))
8384 src_base = get_ref_base_and_extent (src_base, &src_offset,
8385 &size, &maxsize);
8386 dest_base = destvar;
8387 if (handled_component_p (dest_base))
8388 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8389 &size, &maxsize);
8390 if (host_integerp (len, 1))
8392 maxsize = tree_low_cst (len, 1);
8393 if (maxsize
8394 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8395 maxsize = -1;
8396 else
8397 maxsize *= BITS_PER_UNIT;
8399 else
8400 maxsize = -1;
8401 if (SSA_VAR_P (src_base)
8402 && SSA_VAR_P (dest_base))
8404 if (operand_equal_p (src_base, dest_base, 0)
8405 && ranges_overlap_p (src_offset, maxsize,
8406 dest_offset, maxsize))
8407 return NULL_TREE;
8409 else if (TREE_CODE (src_base) == INDIRECT_REF
8410 && TREE_CODE (dest_base) == INDIRECT_REF)
8412 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8413 TREE_OPERAND (dest_base, 0), 0)
8414 || ranges_overlap_p (src_offset, maxsize,
8415 dest_offset, maxsize))
8416 return NULL_TREE;
8418 else
8419 return NULL_TREE;
8421 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8422 if (!fn)
8423 return NULL_TREE;
8424 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8426 return NULL_TREE;
8429 if (!host_integerp (len, 0))
8430 return NULL_TREE;
8431 /* FIXME:
8432 This logic lose for arguments like (type *)malloc (sizeof (type)),
8433 since we strip the casts of up to VOID return value from malloc.
8434 Perhaps we ought to inherit type from non-VOID argument here? */
8435 STRIP_NOPS (src);
8436 STRIP_NOPS (dest);
8437 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8438 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8440 tree tem = TREE_OPERAND (src, 0);
8441 STRIP_NOPS (tem);
8442 if (tem != TREE_OPERAND (src, 0))
8443 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8445 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8447 tree tem = TREE_OPERAND (dest, 0);
8448 STRIP_NOPS (tem);
8449 if (tem != TREE_OPERAND (dest, 0))
8450 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8452 srctype = TREE_TYPE (TREE_TYPE (src));
8453 if (srctype
8454 && TREE_CODE (srctype) == ARRAY_TYPE
8455 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8457 srctype = TREE_TYPE (srctype);
8458 STRIP_NOPS (src);
8459 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8461 desttype = TREE_TYPE (TREE_TYPE (dest));
8462 if (desttype
8463 && TREE_CODE (desttype) == ARRAY_TYPE
8464 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8466 desttype = TREE_TYPE (desttype);
8467 STRIP_NOPS (dest);
8468 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8470 if (!srctype || !desttype
8471 || !TYPE_SIZE_UNIT (srctype)
8472 || !TYPE_SIZE_UNIT (desttype)
8473 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8474 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8475 || TYPE_VOLATILE (srctype)
8476 || TYPE_VOLATILE (desttype))
8477 return NULL_TREE;
8479 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8480 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8481 if (dest_align < (int) TYPE_ALIGN (desttype)
8482 || src_align < (int) TYPE_ALIGN (srctype))
8483 return NULL_TREE;
8485 if (!ignore)
8486 dest = builtin_save_expr (dest);
8488 srcvar = NULL_TREE;
8489 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8491 srcvar = build_fold_indirect_ref_loc (loc, src);
8492 if (TREE_THIS_VOLATILE (srcvar))
8493 return NULL_TREE;
8494 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8495 srcvar = NULL_TREE;
8496 /* With memcpy, it is possible to bypass aliasing rules, so without
8497 this check i.e. execute/20060930-2.c would be misoptimized,
8498 because it use conflicting alias set to hold argument for the
8499 memcpy call. This check is probably unnecessary with
8500 -fno-strict-aliasing. Similarly for destvar. See also
8501 PR29286. */
8502 else if (!var_decl_component_p (srcvar))
8503 srcvar = NULL_TREE;
8506 destvar = NULL_TREE;
8507 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8509 destvar = build_fold_indirect_ref_loc (loc, dest);
8510 if (TREE_THIS_VOLATILE (destvar))
8511 return NULL_TREE;
8512 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8513 destvar = NULL_TREE;
8514 else if (!var_decl_component_p (destvar))
8515 destvar = NULL_TREE;
8518 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8519 return NULL_TREE;
8521 if (srcvar == NULL_TREE)
8523 tree srcptype;
8524 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8525 return NULL_TREE;
8527 srctype = build_qualified_type (desttype, 0);
8528 if (src_align < (int) TYPE_ALIGN (srctype))
8530 if (AGGREGATE_TYPE_P (srctype)
8531 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8532 return NULL_TREE;
8534 srctype = build_variant_type_copy (srctype);
8535 TYPE_ALIGN (srctype) = src_align;
8536 TYPE_USER_ALIGN (srctype) = 1;
8537 TYPE_PACKED (srctype) = 1;
8539 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8540 src = fold_convert_loc (loc, srcptype, src);
8541 srcvar = build_fold_indirect_ref_loc (loc, src);
8543 else if (destvar == NULL_TREE)
8545 tree destptype;
8546 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8547 return NULL_TREE;
8549 desttype = build_qualified_type (srctype, 0);
8550 if (dest_align < (int) TYPE_ALIGN (desttype))
8552 if (AGGREGATE_TYPE_P (desttype)
8553 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8554 return NULL_TREE;
8556 desttype = build_variant_type_copy (desttype);
8557 TYPE_ALIGN (desttype) = dest_align;
8558 TYPE_USER_ALIGN (desttype) = 1;
8559 TYPE_PACKED (desttype) = 1;
8561 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8562 dest = fold_convert_loc (loc, destptype, dest);
8563 destvar = build_fold_indirect_ref_loc (loc, dest);
8566 if (srctype == desttype
8567 || (gimple_in_ssa_p (cfun)
8568 && useless_type_conversion_p (desttype, srctype)))
8569 expr = srcvar;
8570 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8571 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8572 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8573 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8574 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8575 else
8576 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8577 TREE_TYPE (destvar), srcvar);
8578 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8581 if (ignore)
8582 return expr;
8584 if (endp == 0 || endp == 3)
8585 return omit_one_operand_loc (loc, type, dest, expr);
8587 if (expr == len)
8588 expr = NULL_TREE;
8590 if (endp == 2)
8591 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8592 ssize_int (1));
8594 len = fold_convert_loc (loc, sizetype, len);
8595 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8596 dest = fold_convert_loc (loc, type, dest);
8597 if (expr)
8598 dest = omit_one_operand_loc (loc, type, dest, expr);
8599 return dest;
8602 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8603 If LEN is not NULL, it represents the length of the string to be
8604 copied. Return NULL_TREE if no simplification can be made. */
8606 tree
8607 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8609 tree fn;
8611 if (!validate_arg (dest, POINTER_TYPE)
8612 || !validate_arg (src, POINTER_TYPE))
8613 return NULL_TREE;
8615 /* If SRC and DEST are the same (and not volatile), return DEST. */
8616 if (operand_equal_p (src, dest, 0))
8617 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8619 if (optimize_function_for_size_p (cfun))
8620 return NULL_TREE;
8622 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8623 if (!fn)
8624 return NULL_TREE;
8626 if (!len)
8628 len = c_strlen (src, 1);
8629 if (! len || TREE_SIDE_EFFECTS (len))
8630 return NULL_TREE;
8633 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8634 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8635 build_call_expr_loc (loc, fn, 3, dest, src, len));
8638 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8639 Return NULL_TREE if no simplification can be made. */
8641 static tree
8642 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8644 tree fn, len, lenp1, call, type;
8646 if (!validate_arg (dest, POINTER_TYPE)
8647 || !validate_arg (src, POINTER_TYPE))
8648 return NULL_TREE;
8650 len = c_strlen (src, 1);
8651 if (!len
8652 || TREE_CODE (len) != INTEGER_CST)
8653 return NULL_TREE;
8655 if (optimize_function_for_size_p (cfun)
8656 /* If length is zero it's small enough. */
8657 && !integer_zerop (len))
8658 return NULL_TREE;
8660 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8661 if (!fn)
8662 return NULL_TREE;
8664 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8665 /* We use dest twice in building our expression. Save it from
8666 multiple expansions. */
8667 dest = builtin_save_expr (dest);
8668 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8670 type = TREE_TYPE (TREE_TYPE (fndecl));
8671 len = fold_convert_loc (loc, sizetype, len);
8672 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8673 dest = fold_convert_loc (loc, type, dest);
8674 dest = omit_one_operand_loc (loc, type, dest, call);
8675 return dest;
8678 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8679 If SLEN is not NULL, it represents the length of the source string.
8680 Return NULL_TREE if no simplification can be made. */
8682 tree
8683 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8684 tree src, tree len, tree slen)
8686 tree fn;
8688 if (!validate_arg (dest, POINTER_TYPE)
8689 || !validate_arg (src, POINTER_TYPE)
8690 || !validate_arg (len, INTEGER_TYPE))
8691 return NULL_TREE;
8693 /* If the LEN parameter is zero, return DEST. */
8694 if (integer_zerop (len))
8695 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8697 /* We can't compare slen with len as constants below if len is not a
8698 constant. */
8699 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8700 return NULL_TREE;
8702 if (!slen)
8703 slen = c_strlen (src, 1);
8705 /* Now, we must be passed a constant src ptr parameter. */
8706 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8707 return NULL_TREE;
8709 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8711 /* We do not support simplification of this case, though we do
8712 support it when expanding trees into RTL. */
8713 /* FIXME: generate a call to __builtin_memset. */
8714 if (tree_int_cst_lt (slen, len))
8715 return NULL_TREE;
8717 /* OK transform into builtin memcpy. */
8718 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8719 if (!fn)
8720 return NULL_TREE;
8721 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8722 build_call_expr_loc (loc, fn, 3, dest, src, len));
8725 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8726 arguments to the call, and TYPE is its return type.
8727 Return NULL_TREE if no simplification can be made. */
8729 static tree
8730 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8732 if (!validate_arg (arg1, POINTER_TYPE)
8733 || !validate_arg (arg2, INTEGER_TYPE)
8734 || !validate_arg (len, INTEGER_TYPE))
8735 return NULL_TREE;
8736 else
8738 const char *p1;
8740 if (TREE_CODE (arg2) != INTEGER_CST
8741 || !host_integerp (len, 1))
8742 return NULL_TREE;
8744 p1 = c_getstr (arg1);
8745 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8747 char c;
8748 const char *r;
8749 tree tem;
8751 if (target_char_cast (arg2, &c))
8752 return NULL_TREE;
8754 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8756 if (r == NULL)
8757 return build_int_cst (TREE_TYPE (arg1), 0);
8759 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8760 size_int (r - p1));
8761 return fold_convert_loc (loc, type, tem);
8763 return NULL_TREE;
8767 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8768 Return NULL_TREE if no simplification can be made. */
8770 static tree
8771 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8773 const char *p1, *p2;
8775 if (!validate_arg (arg1, POINTER_TYPE)
8776 || !validate_arg (arg2, POINTER_TYPE)
8777 || !validate_arg (len, INTEGER_TYPE))
8778 return NULL_TREE;
8780 /* If the LEN parameter is zero, return zero. */
8781 if (integer_zerop (len))
8782 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8783 arg1, arg2);
8785 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8786 if (operand_equal_p (arg1, arg2, 0))
8787 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8789 p1 = c_getstr (arg1);
8790 p2 = c_getstr (arg2);
8792 /* If all arguments are constant, and the value of len is not greater
8793 than the lengths of arg1 and arg2, evaluate at compile-time. */
8794 if (host_integerp (len, 1) && p1 && p2
8795 && compare_tree_int (len, strlen (p1) + 1) <= 0
8796 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8798 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8800 if (r > 0)
8801 return integer_one_node;
8802 else if (r < 0)
8803 return integer_minus_one_node;
8804 else
8805 return integer_zero_node;
8808 /* If len parameter is one, return an expression corresponding to
8809 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8810 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8812 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8813 tree cst_uchar_ptr_node
8814 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8816 tree ind1
8817 = fold_convert_loc (loc, integer_type_node,
8818 build1 (INDIRECT_REF, cst_uchar_node,
8819 fold_convert_loc (loc,
8820 cst_uchar_ptr_node,
8821 arg1)));
8822 tree ind2
8823 = fold_convert_loc (loc, integer_type_node,
8824 build1 (INDIRECT_REF, cst_uchar_node,
8825 fold_convert_loc (loc,
8826 cst_uchar_ptr_node,
8827 arg2)));
8828 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8831 return NULL_TREE;
8834 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8835 Return NULL_TREE if no simplification can be made. */
8837 static tree
8838 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8840 const char *p1, *p2;
8842 if (!validate_arg (arg1, POINTER_TYPE)
8843 || !validate_arg (arg2, POINTER_TYPE))
8844 return NULL_TREE;
8846 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8847 if (operand_equal_p (arg1, arg2, 0))
8848 return integer_zero_node;
8850 p1 = c_getstr (arg1);
8851 p2 = c_getstr (arg2);
8853 if (p1 && p2)
8855 const int i = strcmp (p1, p2);
8856 if (i < 0)
8857 return integer_minus_one_node;
8858 else if (i > 0)
8859 return integer_one_node;
8860 else
8861 return integer_zero_node;
8864 /* If the second arg is "", return *(const unsigned char*)arg1. */
8865 if (p2 && *p2 == '\0')
8867 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8868 tree cst_uchar_ptr_node
8869 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8871 return fold_convert_loc (loc, integer_type_node,
8872 build1 (INDIRECT_REF, cst_uchar_node,
8873 fold_convert_loc (loc,
8874 cst_uchar_ptr_node,
8875 arg1)));
8878 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8879 if (p1 && *p1 == '\0')
8881 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8882 tree cst_uchar_ptr_node
8883 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8885 tree temp
8886 = fold_convert_loc (loc, integer_type_node,
8887 build1 (INDIRECT_REF, cst_uchar_node,
8888 fold_convert_loc (loc,
8889 cst_uchar_ptr_node,
8890 arg2)));
8891 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8894 return NULL_TREE;
8897 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8898 Return NULL_TREE if no simplification can be made. */
8900 static tree
8901 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8903 const char *p1, *p2;
8905 if (!validate_arg (arg1, POINTER_TYPE)
8906 || !validate_arg (arg2, POINTER_TYPE)
8907 || !validate_arg (len, INTEGER_TYPE))
8908 return NULL_TREE;
8910 /* If the LEN parameter is zero, return zero. */
8911 if (integer_zerop (len))
8912 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8913 arg1, arg2);
8915 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8916 if (operand_equal_p (arg1, arg2, 0))
8917 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8919 p1 = c_getstr (arg1);
8920 p2 = c_getstr (arg2);
8922 if (host_integerp (len, 1) && p1 && p2)
8924 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8925 if (i > 0)
8926 return integer_one_node;
8927 else if (i < 0)
8928 return integer_minus_one_node;
8929 else
8930 return integer_zero_node;
8933 /* If the second arg is "", and the length is greater than zero,
8934 return *(const unsigned char*)arg1. */
8935 if (p2 && *p2 == '\0'
8936 && TREE_CODE (len) == INTEGER_CST
8937 && tree_int_cst_sgn (len) == 1)
8939 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8940 tree cst_uchar_ptr_node
8941 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8943 return fold_convert_loc (loc, integer_type_node,
8944 build1 (INDIRECT_REF, cst_uchar_node,
8945 fold_convert_loc (loc,
8946 cst_uchar_ptr_node,
8947 arg1)));
8950 /* If the first arg is "", and the length is greater than zero,
8951 return -*(const unsigned char*)arg2. */
8952 if (p1 && *p1 == '\0'
8953 && TREE_CODE (len) == INTEGER_CST
8954 && tree_int_cst_sgn (len) == 1)
8956 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8957 tree cst_uchar_ptr_node
8958 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8960 tree temp = fold_convert_loc (loc, integer_type_node,
8961 build1 (INDIRECT_REF, cst_uchar_node,
8962 fold_convert_loc (loc,
8963 cst_uchar_ptr_node,
8964 arg2)));
8965 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8968 /* If len parameter is one, return an expression corresponding to
8969 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8970 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8972 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8973 tree cst_uchar_ptr_node
8974 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8976 tree ind1 = fold_convert_loc (loc, integer_type_node,
8977 build1 (INDIRECT_REF, cst_uchar_node,
8978 fold_convert_loc (loc,
8979 cst_uchar_ptr_node,
8980 arg1)));
8981 tree ind2 = fold_convert_loc (loc, integer_type_node,
8982 build1 (INDIRECT_REF, cst_uchar_node,
8983 fold_convert_loc (loc,
8984 cst_uchar_ptr_node,
8985 arg2)));
8986 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8989 return NULL_TREE;
8992 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8993 ARG. Return NULL_TREE if no simplification can be made. */
8995 static tree
8996 fold_builtin_signbit (location_t loc, tree arg, tree type)
8998 tree temp;
9000 if (!validate_arg (arg, REAL_TYPE))
9001 return NULL_TREE;
9003 /* If ARG is a compile-time constant, determine the result. */
9004 if (TREE_CODE (arg) == REAL_CST
9005 && !TREE_OVERFLOW (arg))
9007 REAL_VALUE_TYPE c;
9009 c = TREE_REAL_CST (arg);
9010 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9011 return fold_convert_loc (loc, type, temp);
9014 /* If ARG is non-negative, the result is always zero. */
9015 if (tree_expr_nonnegative_p (arg))
9016 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9018 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9019 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9020 return fold_build2_loc (loc, LT_EXPR, type, arg,
9021 build_real (TREE_TYPE (arg), dconst0));
9023 return NULL_TREE;
9026 /* Fold function call to builtin copysign, copysignf or copysignl with
9027 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9028 be made. */
9030 static tree
9031 fold_builtin_copysign (location_t loc, tree fndecl,
9032 tree arg1, tree arg2, tree type)
9034 tree tem;
9036 if (!validate_arg (arg1, REAL_TYPE)
9037 || !validate_arg (arg2, REAL_TYPE))
9038 return NULL_TREE;
9040 /* copysign(X,X) is X. */
9041 if (operand_equal_p (arg1, arg2, 0))
9042 return fold_convert_loc (loc, type, arg1);
9044 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9045 if (TREE_CODE (arg1) == REAL_CST
9046 && TREE_CODE (arg2) == REAL_CST
9047 && !TREE_OVERFLOW (arg1)
9048 && !TREE_OVERFLOW (arg2))
9050 REAL_VALUE_TYPE c1, c2;
9052 c1 = TREE_REAL_CST (arg1);
9053 c2 = TREE_REAL_CST (arg2);
9054 /* c1.sign := c2.sign. */
9055 real_copysign (&c1, &c2);
9056 return build_real (type, c1);
9059 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9060 Remember to evaluate Y for side-effects. */
9061 if (tree_expr_nonnegative_p (arg2))
9062 return omit_one_operand_loc (loc, type,
9063 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9064 arg2);
9066 /* Strip sign changing operations for the first argument. */
9067 tem = fold_strip_sign_ops (arg1);
9068 if (tem)
9069 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9071 return NULL_TREE;
9074 /* Fold a call to builtin isascii with argument ARG. */
9076 static tree
9077 fold_builtin_isascii (location_t loc, tree arg)
9079 if (!validate_arg (arg, INTEGER_TYPE))
9080 return NULL_TREE;
9081 else
9083 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9084 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9085 build_int_cst (NULL_TREE,
9086 ~ (unsigned HOST_WIDE_INT) 0x7f));
9087 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9088 arg, integer_zero_node);
9092 /* Fold a call to builtin toascii with argument ARG. */
9094 static tree
9095 fold_builtin_toascii (location_t loc, tree arg)
9097 if (!validate_arg (arg, INTEGER_TYPE))
9098 return NULL_TREE;
9100 /* Transform toascii(c) -> (c & 0x7f). */
9101 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9102 build_int_cst (NULL_TREE, 0x7f));
9105 /* Fold a call to builtin isdigit with argument ARG. */
9107 static tree
9108 fold_builtin_isdigit (location_t loc, tree arg)
9110 if (!validate_arg (arg, INTEGER_TYPE))
9111 return NULL_TREE;
9112 else
9114 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9115 /* According to the C standard, isdigit is unaffected by locale.
9116 However, it definitely is affected by the target character set. */
9117 unsigned HOST_WIDE_INT target_digit0
9118 = lang_hooks.to_target_charset ('0');
9120 if (target_digit0 == 0)
9121 return NULL_TREE;
9123 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9124 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9125 build_int_cst (unsigned_type_node, target_digit0));
9126 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9127 build_int_cst (unsigned_type_node, 9));
9131 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9133 static tree
9134 fold_builtin_fabs (location_t loc, tree arg, tree type)
9136 if (!validate_arg (arg, REAL_TYPE))
9137 return NULL_TREE;
9139 arg = fold_convert_loc (loc, type, arg);
9140 if (TREE_CODE (arg) == REAL_CST)
9141 return fold_abs_const (arg, type);
9142 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9145 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9147 static tree
9148 fold_builtin_abs (location_t loc, tree arg, tree type)
9150 if (!validate_arg (arg, INTEGER_TYPE))
9151 return NULL_TREE;
9153 arg = fold_convert_loc (loc, type, arg);
9154 if (TREE_CODE (arg) == INTEGER_CST)
9155 return fold_abs_const (arg, type);
9156 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9159 /* Fold a call to builtin fmin or fmax. */
9161 static tree
9162 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9163 tree type, bool max)
9165 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9167 /* Calculate the result when the argument is a constant. */
9168 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9170 if (res)
9171 return res;
9173 /* If either argument is NaN, return the other one. Avoid the
9174 transformation if we get (and honor) a signalling NaN. Using
9175 omit_one_operand() ensures we create a non-lvalue. */
9176 if (TREE_CODE (arg0) == REAL_CST
9177 && real_isnan (&TREE_REAL_CST (arg0))
9178 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9179 || ! TREE_REAL_CST (arg0).signalling))
9180 return omit_one_operand_loc (loc, type, arg1, arg0);
9181 if (TREE_CODE (arg1) == REAL_CST
9182 && real_isnan (&TREE_REAL_CST (arg1))
9183 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9184 || ! TREE_REAL_CST (arg1).signalling))
9185 return omit_one_operand_loc (loc, type, arg0, arg1);
9187 /* Transform fmin/fmax(x,x) -> x. */
9188 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9189 return omit_one_operand_loc (loc, type, arg0, arg1);
9191 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9192 functions to return the numeric arg if the other one is NaN.
9193 These tree codes don't honor that, so only transform if
9194 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9195 handled, so we don't have to worry about it either. */
9196 if (flag_finite_math_only)
9197 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9198 fold_convert_loc (loc, type, arg0),
9199 fold_convert_loc (loc, type, arg1));
9201 return NULL_TREE;
9204 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9206 static tree
9207 fold_builtin_carg (location_t loc, tree arg, tree type)
9209 if (validate_arg (arg, COMPLEX_TYPE)
9210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9212 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9214 if (atan2_fn)
9216 tree new_arg = builtin_save_expr (arg);
9217 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9218 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9219 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9223 return NULL_TREE;
9226 /* Fold a call to builtin logb/ilogb. */
9228 static tree
9229 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9231 if (! validate_arg (arg, REAL_TYPE))
9232 return NULL_TREE;
9234 STRIP_NOPS (arg);
9236 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9238 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9240 switch (value->cl)
9242 case rvc_nan:
9243 case rvc_inf:
9244 /* If arg is Inf or NaN and we're logb, return it. */
9245 if (TREE_CODE (rettype) == REAL_TYPE)
9246 return fold_convert_loc (loc, rettype, arg);
9247 /* Fall through... */
9248 case rvc_zero:
9249 /* Zero may set errno and/or raise an exception for logb, also
9250 for ilogb we don't know FP_ILOGB0. */
9251 return NULL_TREE;
9252 case rvc_normal:
9253 /* For normal numbers, proceed iff radix == 2. In GCC,
9254 normalized significands are in the range [0.5, 1.0). We
9255 want the exponent as if they were [1.0, 2.0) so get the
9256 exponent and subtract 1. */
9257 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9258 return fold_convert_loc (loc, rettype,
9259 build_int_cst (NULL_TREE,
9260 REAL_EXP (value)-1));
9261 break;
9265 return NULL_TREE;
9268 /* Fold a call to builtin significand, if radix == 2. */
9270 static tree
9271 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9273 if (! validate_arg (arg, REAL_TYPE))
9274 return NULL_TREE;
9276 STRIP_NOPS (arg);
9278 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9280 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9282 switch (value->cl)
9284 case rvc_zero:
9285 case rvc_nan:
9286 case rvc_inf:
9287 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9288 return fold_convert_loc (loc, rettype, arg);
9289 case rvc_normal:
9290 /* For normal numbers, proceed iff radix == 2. */
9291 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9293 REAL_VALUE_TYPE result = *value;
9294 /* In GCC, normalized significands are in the range [0.5,
9295 1.0). We want them to be [1.0, 2.0) so set the
9296 exponent to 1. */
9297 SET_REAL_EXP (&result, 1);
9298 return build_real (rettype, result);
9300 break;
9304 return NULL_TREE;
9307 /* Fold a call to builtin frexp, we can assume the base is 2. */
9309 static tree
9310 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9312 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9313 return NULL_TREE;
9315 STRIP_NOPS (arg0);
9317 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9318 return NULL_TREE;
9320 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9322 /* Proceed if a valid pointer type was passed in. */
9323 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9325 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9326 tree frac, exp;
9328 switch (value->cl)
9330 case rvc_zero:
9331 /* For +-0, return (*exp = 0, +-0). */
9332 exp = integer_zero_node;
9333 frac = arg0;
9334 break;
9335 case rvc_nan:
9336 case rvc_inf:
9337 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9338 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9339 case rvc_normal:
9341 /* Since the frexp function always expects base 2, and in
9342 GCC normalized significands are already in the range
9343 [0.5, 1.0), we have exactly what frexp wants. */
9344 REAL_VALUE_TYPE frac_rvt = *value;
9345 SET_REAL_EXP (&frac_rvt, 0);
9346 frac = build_real (rettype, frac_rvt);
9347 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9349 break;
9350 default:
9351 gcc_unreachable ();
9354 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9355 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9356 TREE_SIDE_EFFECTS (arg1) = 1;
9357 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9360 return NULL_TREE;
9363 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9364 then we can assume the base is two. If it's false, then we have to
9365 check the mode of the TYPE parameter in certain cases. */
9367 static tree
9368 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9369 tree type, bool ldexp)
9371 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9373 STRIP_NOPS (arg0);
9374 STRIP_NOPS (arg1);
9376 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9377 if (real_zerop (arg0) || integer_zerop (arg1)
9378 || (TREE_CODE (arg0) == REAL_CST
9379 && !real_isfinite (&TREE_REAL_CST (arg0))))
9380 return omit_one_operand_loc (loc, type, arg0, arg1);
9382 /* If both arguments are constant, then try to evaluate it. */
9383 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9384 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9385 && host_integerp (arg1, 0))
9387 /* Bound the maximum adjustment to twice the range of the
9388 mode's valid exponents. Use abs to ensure the range is
9389 positive as a sanity check. */
9390 const long max_exp_adj = 2 *
9391 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9392 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9394 /* Get the user-requested adjustment. */
9395 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9397 /* The requested adjustment must be inside this range. This
9398 is a preliminary cap to avoid things like overflow, we
9399 may still fail to compute the result for other reasons. */
9400 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9402 REAL_VALUE_TYPE initial_result;
9404 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9406 /* Ensure we didn't overflow. */
9407 if (! real_isinf (&initial_result))
9409 const REAL_VALUE_TYPE trunc_result
9410 = real_value_truncate (TYPE_MODE (type), initial_result);
9412 /* Only proceed if the target mode can hold the
9413 resulting value. */
9414 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9415 return build_real (type, trunc_result);
9421 return NULL_TREE;
9424 /* Fold a call to builtin modf. */
9426 static tree
9427 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9429 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9430 return NULL_TREE;
9432 STRIP_NOPS (arg0);
9434 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9435 return NULL_TREE;
9437 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9439 /* Proceed if a valid pointer type was passed in. */
9440 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9442 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9443 REAL_VALUE_TYPE trunc, frac;
9445 switch (value->cl)
9447 case rvc_nan:
9448 case rvc_zero:
9449 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9450 trunc = frac = *value;
9451 break;
9452 case rvc_inf:
9453 /* For +-Inf, return (*arg1 = arg0, +-0). */
9454 frac = dconst0;
9455 frac.sign = value->sign;
9456 trunc = *value;
9457 break;
9458 case rvc_normal:
9459 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9460 real_trunc (&trunc, VOIDmode, value);
9461 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9462 /* If the original number was negative and already
9463 integral, then the fractional part is -0.0. */
9464 if (value->sign && frac.cl == rvc_zero)
9465 frac.sign = value->sign;
9466 break;
9469 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9470 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9471 build_real (rettype, trunc));
9472 TREE_SIDE_EFFECTS (arg1) = 1;
9473 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9474 build_real (rettype, frac));
9477 return NULL_TREE;
9480 /* Given a location LOC, an interclass builtin function decl FNDECL
9481 and its single argument ARG, return an folded expression computing
9482 the same, or NULL_TREE if we either couldn't or didn't want to fold
9483 (the latter happen if there's an RTL instruction available). */
9485 static tree
9486 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9488 enum machine_mode mode;
9490 if (!validate_arg (arg, REAL_TYPE))
9491 return NULL_TREE;
9493 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9494 return NULL_TREE;
9496 mode = TYPE_MODE (TREE_TYPE (arg));
9498 /* If there is no optab, try generic code. */
9499 switch (DECL_FUNCTION_CODE (fndecl))
9501 tree result;
9503 CASE_FLT_FN (BUILT_IN_ISINF):
9505 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9506 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9507 tree const type = TREE_TYPE (arg);
9508 REAL_VALUE_TYPE r;
9509 char buf[128];
9511 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9512 real_from_string (&r, buf);
9513 result = build_call_expr (isgr_fn, 2,
9514 fold_build1_loc (loc, ABS_EXPR, type, arg),
9515 build_real (type, r));
9516 return result;
9518 CASE_FLT_FN (BUILT_IN_FINITE):
9519 case BUILT_IN_ISFINITE:
9521 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9522 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9523 tree const type = TREE_TYPE (arg);
9524 REAL_VALUE_TYPE r;
9525 char buf[128];
9527 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9528 real_from_string (&r, buf);
9529 result = build_call_expr (isle_fn, 2,
9530 fold_build1_loc (loc, ABS_EXPR, type, arg),
9531 build_real (type, r));
9532 /*result = fold_build2_loc (loc, UNGT_EXPR,
9533 TREE_TYPE (TREE_TYPE (fndecl)),
9534 fold_build1_loc (loc, ABS_EXPR, type, arg),
9535 build_real (type, r));
9536 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9537 TREE_TYPE (TREE_TYPE (fndecl)),
9538 result);*/
9539 return result;
9541 case BUILT_IN_ISNORMAL:
9543 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9544 islessequal(fabs(x),DBL_MAX). */
9545 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9546 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9547 tree const type = TREE_TYPE (arg);
9548 REAL_VALUE_TYPE rmax, rmin;
9549 char buf[128];
9551 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9552 real_from_string (&rmax, buf);
9553 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9554 real_from_string (&rmin, buf);
9555 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9556 result = build_call_expr (isle_fn, 2, arg,
9557 build_real (type, rmax));
9558 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9559 build_call_expr (isge_fn, 2, arg,
9560 build_real (type, rmin)));
9561 return result;
9563 default:
9564 break;
9567 return NULL_TREE;
9570 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9571 ARG is the argument for the call. */
9573 static tree
9574 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9576 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9577 REAL_VALUE_TYPE r;
9579 if (!validate_arg (arg, REAL_TYPE))
9580 return NULL_TREE;
9582 switch (builtin_index)
9584 case BUILT_IN_ISINF:
9585 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9586 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9588 if (TREE_CODE (arg) == REAL_CST)
9590 r = TREE_REAL_CST (arg);
9591 if (real_isinf (&r))
9592 return real_compare (GT_EXPR, &r, &dconst0)
9593 ? integer_one_node : integer_minus_one_node;
9594 else
9595 return integer_zero_node;
9598 return NULL_TREE;
9600 case BUILT_IN_ISINF_SIGN:
9602 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9603 /* In a boolean context, GCC will fold the inner COND_EXPR to
9604 1. So e.g. "if (isinf_sign(x))" would be folded to just
9605 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9606 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9607 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9608 tree tmp = NULL_TREE;
9610 arg = builtin_save_expr (arg);
9612 if (signbit_fn && isinf_fn)
9614 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9615 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9617 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9618 signbit_call, integer_zero_node);
9619 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9620 isinf_call, integer_zero_node);
9622 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9623 integer_minus_one_node, integer_one_node);
9624 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9625 isinf_call, tmp,
9626 integer_zero_node);
9629 return tmp;
9632 case BUILT_IN_ISFINITE:
9633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9634 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9635 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9637 if (TREE_CODE (arg) == REAL_CST)
9639 r = TREE_REAL_CST (arg);
9640 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9643 return NULL_TREE;
9645 case BUILT_IN_ISNAN:
9646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9647 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9649 if (TREE_CODE (arg) == REAL_CST)
9651 r = TREE_REAL_CST (arg);
9652 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9655 arg = builtin_save_expr (arg);
9656 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9658 default:
9659 gcc_unreachable ();
9663 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9664 This builtin will generate code to return the appropriate floating
9665 point classification depending on the value of the floating point
9666 number passed in. The possible return values must be supplied as
9667 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9668 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9669 one floating point argument which is "type generic". */
9671 static tree
9672 fold_builtin_fpclassify (location_t loc, tree exp)
9674 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9675 arg, type, res, tmp;
9676 enum machine_mode mode;
9677 REAL_VALUE_TYPE r;
9678 char buf[128];
9680 /* Verify the required arguments in the original call. */
9681 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9682 INTEGER_TYPE, INTEGER_TYPE,
9683 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9684 return NULL_TREE;
9686 fp_nan = CALL_EXPR_ARG (exp, 0);
9687 fp_infinite = CALL_EXPR_ARG (exp, 1);
9688 fp_normal = CALL_EXPR_ARG (exp, 2);
9689 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9690 fp_zero = CALL_EXPR_ARG (exp, 4);
9691 arg = CALL_EXPR_ARG (exp, 5);
9692 type = TREE_TYPE (arg);
9693 mode = TYPE_MODE (type);
9694 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9696 /* fpclassify(x) ->
9697 isnan(x) ? FP_NAN :
9698 (fabs(x) == Inf ? FP_INFINITE :
9699 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9700 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9702 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9703 build_real (type, dconst0));
9704 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9705 tmp, fp_zero, fp_subnormal);
9707 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9708 real_from_string (&r, buf);
9709 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9710 arg, build_real (type, r));
9711 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9713 if (HONOR_INFINITIES (mode))
9715 real_inf (&r);
9716 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9717 build_real (type, r));
9718 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9719 fp_infinite, res);
9722 if (HONOR_NANS (mode))
9724 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9725 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9728 return res;
9731 /* Fold a call to an unordered comparison function such as
9732 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9733 being called and ARG0 and ARG1 are the arguments for the call.
9734 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9735 the opposite of the desired result. UNORDERED_CODE is used
9736 for modes that can hold NaNs and ORDERED_CODE is used for
9737 the rest. */
9739 static tree
9740 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9741 enum tree_code unordered_code,
9742 enum tree_code ordered_code)
9744 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9745 enum tree_code code;
9746 tree type0, type1;
9747 enum tree_code code0, code1;
9748 tree cmp_type = NULL_TREE;
9750 type0 = TREE_TYPE (arg0);
9751 type1 = TREE_TYPE (arg1);
9753 code0 = TREE_CODE (type0);
9754 code1 = TREE_CODE (type1);
9756 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9757 /* Choose the wider of two real types. */
9758 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9759 ? type0 : type1;
9760 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9761 cmp_type = type0;
9762 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9763 cmp_type = type1;
9765 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9766 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9768 if (unordered_code == UNORDERED_EXPR)
9770 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9771 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9772 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9775 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9776 : ordered_code;
9777 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9778 fold_build2_loc (loc, code, type, arg0, arg1));
9781 /* Fold a call to built-in function FNDECL with 0 arguments.
9782 IGNORE is true if the result of the function call is ignored. This
9783 function returns NULL_TREE if no simplification was possible. */
9785 static tree
9786 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9788 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9789 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9790 switch (fcode)
9792 CASE_FLT_FN (BUILT_IN_INF):
9793 case BUILT_IN_INFD32:
9794 case BUILT_IN_INFD64:
9795 case BUILT_IN_INFD128:
9796 return fold_builtin_inf (loc, type, true);
9798 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9799 return fold_builtin_inf (loc, type, false);
9801 case BUILT_IN_CLASSIFY_TYPE:
9802 return fold_builtin_classify_type (NULL_TREE);
9804 default:
9805 break;
9807 return NULL_TREE;
9810 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9811 IGNORE is true if the result of the function call is ignored. This
9812 function returns NULL_TREE if no simplification was possible. */
9814 static tree
9815 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9817 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9818 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9819 switch (fcode)
9821 case BUILT_IN_CONSTANT_P:
9823 tree val = fold_builtin_constant_p (arg0);
9825 /* Gimplification will pull the CALL_EXPR for the builtin out of
9826 an if condition. When not optimizing, we'll not CSE it back.
9827 To avoid link error types of regressions, return false now. */
9828 if (!val && !optimize)
9829 val = integer_zero_node;
9831 return val;
9834 case BUILT_IN_CLASSIFY_TYPE:
9835 return fold_builtin_classify_type (arg0);
9837 case BUILT_IN_STRLEN:
9838 return fold_builtin_strlen (loc, type, arg0);
9840 CASE_FLT_FN (BUILT_IN_FABS):
9841 return fold_builtin_fabs (loc, arg0, type);
9843 case BUILT_IN_ABS:
9844 case BUILT_IN_LABS:
9845 case BUILT_IN_LLABS:
9846 case BUILT_IN_IMAXABS:
9847 return fold_builtin_abs (loc, arg0, type);
9849 CASE_FLT_FN (BUILT_IN_CONJ):
9850 if (validate_arg (arg0, COMPLEX_TYPE)
9851 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9852 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9853 break;
9855 CASE_FLT_FN (BUILT_IN_CREAL):
9856 if (validate_arg (arg0, COMPLEX_TYPE)
9857 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9858 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9859 break;
9861 CASE_FLT_FN (BUILT_IN_CIMAG):
9862 if (validate_arg (arg0, COMPLEX_TYPE)
9863 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9864 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9865 break;
9867 CASE_FLT_FN (BUILT_IN_CCOS):
9868 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9870 CASE_FLT_FN (BUILT_IN_CCOSH):
9871 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9873 CASE_FLT_FN (BUILT_IN_CPROJ):
9874 return fold_builtin_cproj(loc, arg0, type);
9876 CASE_FLT_FN (BUILT_IN_CSIN):
9877 if (validate_arg (arg0, COMPLEX_TYPE)
9878 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9879 return do_mpc_arg1 (arg0, type, mpc_sin);
9880 break;
9882 CASE_FLT_FN (BUILT_IN_CSINH):
9883 if (validate_arg (arg0, COMPLEX_TYPE)
9884 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9885 return do_mpc_arg1 (arg0, type, mpc_sinh);
9886 break;
9888 CASE_FLT_FN (BUILT_IN_CTAN):
9889 if (validate_arg (arg0, COMPLEX_TYPE)
9890 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9891 return do_mpc_arg1 (arg0, type, mpc_tan);
9892 break;
9894 CASE_FLT_FN (BUILT_IN_CTANH):
9895 if (validate_arg (arg0, COMPLEX_TYPE)
9896 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9897 return do_mpc_arg1 (arg0, type, mpc_tanh);
9898 break;
9900 CASE_FLT_FN (BUILT_IN_CLOG):
9901 if (validate_arg (arg0, COMPLEX_TYPE)
9902 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9903 return do_mpc_arg1 (arg0, type, mpc_log);
9904 break;
9906 CASE_FLT_FN (BUILT_IN_CSQRT):
9907 if (validate_arg (arg0, COMPLEX_TYPE)
9908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9909 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9910 break;
9912 CASE_FLT_FN (BUILT_IN_CASIN):
9913 if (validate_arg (arg0, COMPLEX_TYPE)
9914 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9915 return do_mpc_arg1 (arg0, type, mpc_asin);
9916 break;
9918 CASE_FLT_FN (BUILT_IN_CACOS):
9919 if (validate_arg (arg0, COMPLEX_TYPE)
9920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9921 return do_mpc_arg1 (arg0, type, mpc_acos);
9922 break;
9924 CASE_FLT_FN (BUILT_IN_CATAN):
9925 if (validate_arg (arg0, COMPLEX_TYPE)
9926 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9927 return do_mpc_arg1 (arg0, type, mpc_atan);
9928 break;
9930 CASE_FLT_FN (BUILT_IN_CASINH):
9931 if (validate_arg (arg0, COMPLEX_TYPE)
9932 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9933 return do_mpc_arg1 (arg0, type, mpc_asinh);
9934 break;
9936 CASE_FLT_FN (BUILT_IN_CACOSH):
9937 if (validate_arg (arg0, COMPLEX_TYPE)
9938 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9939 return do_mpc_arg1 (arg0, type, mpc_acosh);
9940 break;
9942 CASE_FLT_FN (BUILT_IN_CATANH):
9943 if (validate_arg (arg0, COMPLEX_TYPE)
9944 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9945 return do_mpc_arg1 (arg0, type, mpc_atanh);
9946 break;
9948 CASE_FLT_FN (BUILT_IN_CABS):
9949 return fold_builtin_cabs (loc, arg0, type, fndecl);
9951 CASE_FLT_FN (BUILT_IN_CARG):
9952 return fold_builtin_carg (loc, arg0, type);
9954 CASE_FLT_FN (BUILT_IN_SQRT):
9955 return fold_builtin_sqrt (loc, arg0, type);
9957 CASE_FLT_FN (BUILT_IN_CBRT):
9958 return fold_builtin_cbrt (loc, arg0, type);
9960 CASE_FLT_FN (BUILT_IN_ASIN):
9961 if (validate_arg (arg0, REAL_TYPE))
9962 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9963 &dconstm1, &dconst1, true);
9964 break;
9966 CASE_FLT_FN (BUILT_IN_ACOS):
9967 if (validate_arg (arg0, REAL_TYPE))
9968 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9969 &dconstm1, &dconst1, true);
9970 break;
9972 CASE_FLT_FN (BUILT_IN_ATAN):
9973 if (validate_arg (arg0, REAL_TYPE))
9974 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9975 break;
9977 CASE_FLT_FN (BUILT_IN_ASINH):
9978 if (validate_arg (arg0, REAL_TYPE))
9979 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9980 break;
9982 CASE_FLT_FN (BUILT_IN_ACOSH):
9983 if (validate_arg (arg0, REAL_TYPE))
9984 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9985 &dconst1, NULL, true);
9986 break;
9988 CASE_FLT_FN (BUILT_IN_ATANH):
9989 if (validate_arg (arg0, REAL_TYPE))
9990 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9991 &dconstm1, &dconst1, false);
9992 break;
9994 CASE_FLT_FN (BUILT_IN_SIN):
9995 if (validate_arg (arg0, REAL_TYPE))
9996 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9997 break;
9999 CASE_FLT_FN (BUILT_IN_COS):
10000 return fold_builtin_cos (loc, arg0, type, fndecl);
10002 CASE_FLT_FN (BUILT_IN_TAN):
10003 return fold_builtin_tan (arg0, type);
10005 CASE_FLT_FN (BUILT_IN_CEXP):
10006 return fold_builtin_cexp (loc, arg0, type);
10008 CASE_FLT_FN (BUILT_IN_CEXPI):
10009 if (validate_arg (arg0, REAL_TYPE))
10010 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10011 break;
10013 CASE_FLT_FN (BUILT_IN_SINH):
10014 if (validate_arg (arg0, REAL_TYPE))
10015 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10016 break;
10018 CASE_FLT_FN (BUILT_IN_COSH):
10019 return fold_builtin_cosh (loc, arg0, type, fndecl);
10021 CASE_FLT_FN (BUILT_IN_TANH):
10022 if (validate_arg (arg0, REAL_TYPE))
10023 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10024 break;
10026 CASE_FLT_FN (BUILT_IN_ERF):
10027 if (validate_arg (arg0, REAL_TYPE))
10028 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10029 break;
10031 CASE_FLT_FN (BUILT_IN_ERFC):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10034 break;
10036 CASE_FLT_FN (BUILT_IN_TGAMMA):
10037 if (validate_arg (arg0, REAL_TYPE))
10038 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10039 break;
10041 CASE_FLT_FN (BUILT_IN_EXP):
10042 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10044 CASE_FLT_FN (BUILT_IN_EXP2):
10045 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10047 CASE_FLT_FN (BUILT_IN_EXP10):
10048 CASE_FLT_FN (BUILT_IN_POW10):
10049 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10051 CASE_FLT_FN (BUILT_IN_EXPM1):
10052 if (validate_arg (arg0, REAL_TYPE))
10053 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10054 break;
10056 CASE_FLT_FN (BUILT_IN_LOG):
10057 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10059 CASE_FLT_FN (BUILT_IN_LOG2):
10060 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10062 CASE_FLT_FN (BUILT_IN_LOG10):
10063 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10065 CASE_FLT_FN (BUILT_IN_LOG1P):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10068 &dconstm1, NULL, false);
10069 break;
10071 CASE_FLT_FN (BUILT_IN_J0):
10072 if (validate_arg (arg0, REAL_TYPE))
10073 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10074 NULL, NULL, 0);
10075 break;
10077 CASE_FLT_FN (BUILT_IN_J1):
10078 if (validate_arg (arg0, REAL_TYPE))
10079 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10080 NULL, NULL, 0);
10081 break;
10083 CASE_FLT_FN (BUILT_IN_Y0):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10086 &dconst0, NULL, false);
10087 break;
10089 CASE_FLT_FN (BUILT_IN_Y1):
10090 if (validate_arg (arg0, REAL_TYPE))
10091 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10092 &dconst0, NULL, false);
10093 break;
10095 CASE_FLT_FN (BUILT_IN_NAN):
10096 case BUILT_IN_NAND32:
10097 case BUILT_IN_NAND64:
10098 case BUILT_IN_NAND128:
10099 return fold_builtin_nan (arg0, type, true);
10101 CASE_FLT_FN (BUILT_IN_NANS):
10102 return fold_builtin_nan (arg0, type, false);
10104 CASE_FLT_FN (BUILT_IN_FLOOR):
10105 return fold_builtin_floor (loc, fndecl, arg0);
10107 CASE_FLT_FN (BUILT_IN_CEIL):
10108 return fold_builtin_ceil (loc, fndecl, arg0);
10110 CASE_FLT_FN (BUILT_IN_TRUNC):
10111 return fold_builtin_trunc (loc, fndecl, arg0);
10113 CASE_FLT_FN (BUILT_IN_ROUND):
10114 return fold_builtin_round (loc, fndecl, arg0);
10116 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10117 CASE_FLT_FN (BUILT_IN_RINT):
10118 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10120 CASE_FLT_FN (BUILT_IN_LCEIL):
10121 CASE_FLT_FN (BUILT_IN_LLCEIL):
10122 CASE_FLT_FN (BUILT_IN_LFLOOR):
10123 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10124 CASE_FLT_FN (BUILT_IN_LROUND):
10125 CASE_FLT_FN (BUILT_IN_LLROUND):
10126 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10128 CASE_FLT_FN (BUILT_IN_LRINT):
10129 CASE_FLT_FN (BUILT_IN_LLRINT):
10130 return fold_fixed_mathfn (loc, fndecl, arg0);
10132 case BUILT_IN_BSWAP32:
10133 case BUILT_IN_BSWAP64:
10134 return fold_builtin_bswap (fndecl, arg0);
10136 CASE_INT_FN (BUILT_IN_FFS):
10137 CASE_INT_FN (BUILT_IN_CLZ):
10138 CASE_INT_FN (BUILT_IN_CTZ):
10139 CASE_INT_FN (BUILT_IN_POPCOUNT):
10140 CASE_INT_FN (BUILT_IN_PARITY):
10141 return fold_builtin_bitop (fndecl, arg0);
10143 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10144 return fold_builtin_signbit (loc, arg0, type);
10146 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10147 return fold_builtin_significand (loc, arg0, type);
10149 CASE_FLT_FN (BUILT_IN_ILOGB):
10150 CASE_FLT_FN (BUILT_IN_LOGB):
10151 return fold_builtin_logb (loc, arg0, type);
10153 case BUILT_IN_ISASCII:
10154 return fold_builtin_isascii (loc, arg0);
10156 case BUILT_IN_TOASCII:
10157 return fold_builtin_toascii (loc, arg0);
10159 case BUILT_IN_ISDIGIT:
10160 return fold_builtin_isdigit (loc, arg0);
10162 CASE_FLT_FN (BUILT_IN_FINITE):
10163 case BUILT_IN_FINITED32:
10164 case BUILT_IN_FINITED64:
10165 case BUILT_IN_FINITED128:
10166 case BUILT_IN_ISFINITE:
10168 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10169 if (ret)
10170 return ret;
10171 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10174 CASE_FLT_FN (BUILT_IN_ISINF):
10175 case BUILT_IN_ISINFD32:
10176 case BUILT_IN_ISINFD64:
10177 case BUILT_IN_ISINFD128:
10179 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10180 if (ret)
10181 return ret;
10182 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10185 case BUILT_IN_ISNORMAL:
10186 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10188 case BUILT_IN_ISINF_SIGN:
10189 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10191 CASE_FLT_FN (BUILT_IN_ISNAN):
10192 case BUILT_IN_ISNAND32:
10193 case BUILT_IN_ISNAND64:
10194 case BUILT_IN_ISNAND128:
10195 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10197 case BUILT_IN_PRINTF:
10198 case BUILT_IN_PRINTF_UNLOCKED:
10199 case BUILT_IN_VPRINTF:
10200 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10202 case BUILT_IN_FREE:
10203 if (integer_zerop (arg0))
10204 return build_empty_stmt (loc);
10205 break;
10207 default:
10208 break;
10211 return NULL_TREE;
10215 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10216 IGNORE is true if the result of the function call is ignored. This
10217 function returns NULL_TREE if no simplification was possible. */
10219 static tree
10220 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10222 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10223 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10225 switch (fcode)
10227 CASE_FLT_FN (BUILT_IN_JN):
10228 if (validate_arg (arg0, INTEGER_TYPE)
10229 && validate_arg (arg1, REAL_TYPE))
10230 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10231 break;
10233 CASE_FLT_FN (BUILT_IN_YN):
10234 if (validate_arg (arg0, INTEGER_TYPE)
10235 && validate_arg (arg1, REAL_TYPE))
10236 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10237 &dconst0, false);
10238 break;
10240 CASE_FLT_FN (BUILT_IN_DREM):
10241 CASE_FLT_FN (BUILT_IN_REMAINDER):
10242 if (validate_arg (arg0, REAL_TYPE)
10243 && validate_arg(arg1, REAL_TYPE))
10244 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10245 break;
10247 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10248 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10249 if (validate_arg (arg0, REAL_TYPE)
10250 && validate_arg(arg1, POINTER_TYPE))
10251 return do_mpfr_lgamma_r (arg0, arg1, type);
10252 break;
10254 CASE_FLT_FN (BUILT_IN_ATAN2):
10255 if (validate_arg (arg0, REAL_TYPE)
10256 && validate_arg(arg1, REAL_TYPE))
10257 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10258 break;
10260 CASE_FLT_FN (BUILT_IN_FDIM):
10261 if (validate_arg (arg0, REAL_TYPE)
10262 && validate_arg(arg1, REAL_TYPE))
10263 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10264 break;
10266 CASE_FLT_FN (BUILT_IN_HYPOT):
10267 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10269 CASE_FLT_FN (BUILT_IN_CPOW):
10270 if (validate_arg (arg0, COMPLEX_TYPE)
10271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10272 && validate_arg (arg1, COMPLEX_TYPE)
10273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10274 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10275 break;
10277 CASE_FLT_FN (BUILT_IN_LDEXP):
10278 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10279 CASE_FLT_FN (BUILT_IN_SCALBN):
10280 CASE_FLT_FN (BUILT_IN_SCALBLN):
10281 return fold_builtin_load_exponent (loc, arg0, arg1,
10282 type, /*ldexp=*/false);
10284 CASE_FLT_FN (BUILT_IN_FREXP):
10285 return fold_builtin_frexp (loc, arg0, arg1, type);
10287 CASE_FLT_FN (BUILT_IN_MODF):
10288 return fold_builtin_modf (loc, arg0, arg1, type);
10290 case BUILT_IN_BZERO:
10291 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10293 case BUILT_IN_FPUTS:
10294 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10296 case BUILT_IN_FPUTS_UNLOCKED:
10297 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10299 case BUILT_IN_STRSTR:
10300 return fold_builtin_strstr (loc, arg0, arg1, type);
10302 case BUILT_IN_STRCAT:
10303 return fold_builtin_strcat (loc, arg0, arg1);
10305 case BUILT_IN_STRSPN:
10306 return fold_builtin_strspn (loc, arg0, arg1);
10308 case BUILT_IN_STRCSPN:
10309 return fold_builtin_strcspn (loc, arg0, arg1);
10311 case BUILT_IN_STRCHR:
10312 case BUILT_IN_INDEX:
10313 return fold_builtin_strchr (loc, arg0, arg1, type);
10315 case BUILT_IN_STRRCHR:
10316 case BUILT_IN_RINDEX:
10317 return fold_builtin_strrchr (loc, arg0, arg1, type);
10319 case BUILT_IN_STRCPY:
10320 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10322 case BUILT_IN_STPCPY:
10323 if (ignore)
10325 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10326 if (!fn)
10327 break;
10329 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10331 else
10332 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10333 break;
10335 case BUILT_IN_STRCMP:
10336 return fold_builtin_strcmp (loc, arg0, arg1);
10338 case BUILT_IN_STRPBRK:
10339 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10341 case BUILT_IN_EXPECT:
10342 return fold_builtin_expect (loc, arg0, arg1);
10344 CASE_FLT_FN (BUILT_IN_POW):
10345 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10347 CASE_FLT_FN (BUILT_IN_POWI):
10348 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10350 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10351 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10353 CASE_FLT_FN (BUILT_IN_FMIN):
10354 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10356 CASE_FLT_FN (BUILT_IN_FMAX):
10357 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10359 case BUILT_IN_ISGREATER:
10360 return fold_builtin_unordered_cmp (loc, fndecl,
10361 arg0, arg1, UNLE_EXPR, LE_EXPR);
10362 case BUILT_IN_ISGREATEREQUAL:
10363 return fold_builtin_unordered_cmp (loc, fndecl,
10364 arg0, arg1, UNLT_EXPR, LT_EXPR);
10365 case BUILT_IN_ISLESS:
10366 return fold_builtin_unordered_cmp (loc, fndecl,
10367 arg0, arg1, UNGE_EXPR, GE_EXPR);
10368 case BUILT_IN_ISLESSEQUAL:
10369 return fold_builtin_unordered_cmp (loc, fndecl,
10370 arg0, arg1, UNGT_EXPR, GT_EXPR);
10371 case BUILT_IN_ISLESSGREATER:
10372 return fold_builtin_unordered_cmp (loc, fndecl,
10373 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10374 case BUILT_IN_ISUNORDERED:
10375 return fold_builtin_unordered_cmp (loc, fndecl,
10376 arg0, arg1, UNORDERED_EXPR,
10377 NOP_EXPR);
10379 /* We do the folding for va_start in the expander. */
10380 case BUILT_IN_VA_START:
10381 break;
10383 case BUILT_IN_SPRINTF:
10384 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10386 case BUILT_IN_OBJECT_SIZE:
10387 return fold_builtin_object_size (arg0, arg1);
10389 case BUILT_IN_PRINTF:
10390 case BUILT_IN_PRINTF_UNLOCKED:
10391 case BUILT_IN_VPRINTF:
10392 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10394 case BUILT_IN_PRINTF_CHK:
10395 case BUILT_IN_VPRINTF_CHK:
10396 if (!validate_arg (arg0, INTEGER_TYPE)
10397 || TREE_SIDE_EFFECTS (arg0))
10398 return NULL_TREE;
10399 else
10400 return fold_builtin_printf (loc, fndecl,
10401 arg1, NULL_TREE, ignore, fcode);
10402 break;
10404 case BUILT_IN_FPRINTF:
10405 case BUILT_IN_FPRINTF_UNLOCKED:
10406 case BUILT_IN_VFPRINTF:
10407 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10408 ignore, fcode);
10410 default:
10411 break;
10413 return NULL_TREE;
10416 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10417 and ARG2. IGNORE is true if the result of the function call is ignored.
10418 This function returns NULL_TREE if no simplification was possible. */
10420 static tree
10421 fold_builtin_3 (location_t loc, tree fndecl,
10422 tree arg0, tree arg1, tree arg2, bool ignore)
10424 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10425 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10426 switch (fcode)
10429 CASE_FLT_FN (BUILT_IN_SINCOS):
10430 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10432 CASE_FLT_FN (BUILT_IN_FMA):
10433 if (validate_arg (arg0, REAL_TYPE)
10434 && validate_arg(arg1, REAL_TYPE)
10435 && validate_arg(arg2, REAL_TYPE))
10436 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10437 break;
10439 CASE_FLT_FN (BUILT_IN_REMQUO):
10440 if (validate_arg (arg0, REAL_TYPE)
10441 && validate_arg(arg1, REAL_TYPE)
10442 && validate_arg(arg2, POINTER_TYPE))
10443 return do_mpfr_remquo (arg0, arg1, arg2);
10444 break;
10446 case BUILT_IN_MEMSET:
10447 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10449 case BUILT_IN_BCOPY:
10450 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10451 void_type_node, true, /*endp=*/3);
10453 case BUILT_IN_MEMCPY:
10454 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10455 type, ignore, /*endp=*/0);
10457 case BUILT_IN_MEMPCPY:
10458 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10459 type, ignore, /*endp=*/1);
10461 case BUILT_IN_MEMMOVE:
10462 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10463 type, ignore, /*endp=*/3);
10465 case BUILT_IN_STRNCAT:
10466 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10468 case BUILT_IN_STRNCPY:
10469 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10471 case BUILT_IN_STRNCMP:
10472 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10474 case BUILT_IN_MEMCHR:
10475 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10477 case BUILT_IN_BCMP:
10478 case BUILT_IN_MEMCMP:
10479 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10481 case BUILT_IN_SPRINTF:
10482 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10484 case BUILT_IN_STRCPY_CHK:
10485 case BUILT_IN_STPCPY_CHK:
10486 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10487 ignore, fcode);
10489 case BUILT_IN_STRCAT_CHK:
10490 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10492 case BUILT_IN_PRINTF_CHK:
10493 case BUILT_IN_VPRINTF_CHK:
10494 if (!validate_arg (arg0, INTEGER_TYPE)
10495 || TREE_SIDE_EFFECTS (arg0))
10496 return NULL_TREE;
10497 else
10498 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10499 break;
10501 case BUILT_IN_FPRINTF:
10502 case BUILT_IN_FPRINTF_UNLOCKED:
10503 case BUILT_IN_VFPRINTF:
10504 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10505 ignore, fcode);
10507 case BUILT_IN_FPRINTF_CHK:
10508 case BUILT_IN_VFPRINTF_CHK:
10509 if (!validate_arg (arg1, INTEGER_TYPE)
10510 || TREE_SIDE_EFFECTS (arg1))
10511 return NULL_TREE;
10512 else
10513 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10514 ignore, fcode);
10516 default:
10517 break;
10519 return NULL_TREE;
10522 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10523 ARG2, and ARG3. IGNORE is true if the result of the function call is
10524 ignored. This function returns NULL_TREE if no simplification was
10525 possible. */
10527 static tree
10528 fold_builtin_4 (location_t loc, tree fndecl,
10529 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10531 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10533 switch (fcode)
10535 case BUILT_IN_MEMCPY_CHK:
10536 case BUILT_IN_MEMPCPY_CHK:
10537 case BUILT_IN_MEMMOVE_CHK:
10538 case BUILT_IN_MEMSET_CHK:
10539 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10540 NULL_TREE, ignore,
10541 DECL_FUNCTION_CODE (fndecl));
10543 case BUILT_IN_STRNCPY_CHK:
10544 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10546 case BUILT_IN_STRNCAT_CHK:
10547 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10549 case BUILT_IN_FPRINTF_CHK:
10550 case BUILT_IN_VFPRINTF_CHK:
10551 if (!validate_arg (arg1, INTEGER_TYPE)
10552 || TREE_SIDE_EFFECTS (arg1))
10553 return NULL_TREE;
10554 else
10555 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10556 ignore, fcode);
10557 break;
10559 default:
10560 break;
10562 return NULL_TREE;
10565 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10566 arguments, where NARGS <= 4. IGNORE is true if the result of the
10567 function call is ignored. This function returns NULL_TREE if no
10568 simplification was possible. Note that this only folds builtins with
10569 fixed argument patterns. Foldings that do varargs-to-varargs
10570 transformations, or that match calls with more than 4 arguments,
10571 need to be handled with fold_builtin_varargs instead. */
10573 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10575 static tree
10576 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10578 tree ret = NULL_TREE;
10580 switch (nargs)
10582 case 0:
10583 ret = fold_builtin_0 (loc, fndecl, ignore);
10584 break;
10585 case 1:
10586 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10587 break;
10588 case 2:
10589 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10590 break;
10591 case 3:
10592 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10593 break;
10594 case 4:
10595 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10596 ignore);
10597 break;
10598 default:
10599 break;
10601 if (ret)
10603 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10604 SET_EXPR_LOCATION (ret, loc);
10605 TREE_NO_WARNING (ret) = 1;
10606 return ret;
10608 return NULL_TREE;
10611 /* Builtins with folding operations that operate on "..." arguments
10612 need special handling; we need to store the arguments in a convenient
10613 data structure before attempting any folding. Fortunately there are
10614 only a few builtins that fall into this category. FNDECL is the
10615 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10616 result of the function call is ignored. */
10618 static tree
10619 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10620 bool ignore ATTRIBUTE_UNUSED)
10622 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10623 tree ret = NULL_TREE;
10625 switch (fcode)
10627 case BUILT_IN_SPRINTF_CHK:
10628 case BUILT_IN_VSPRINTF_CHK:
10629 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10630 break;
10632 case BUILT_IN_SNPRINTF_CHK:
10633 case BUILT_IN_VSNPRINTF_CHK:
10634 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10635 break;
10637 case BUILT_IN_FPCLASSIFY:
10638 ret = fold_builtin_fpclassify (loc, exp);
10639 break;
10641 default:
10642 break;
10644 if (ret)
10646 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10647 SET_EXPR_LOCATION (ret, loc);
10648 TREE_NO_WARNING (ret) = 1;
10649 return ret;
10651 return NULL_TREE;
10654 /* Return true if FNDECL shouldn't be folded right now.
10655 If a built-in function has an inline attribute always_inline
10656 wrapper, defer folding it after always_inline functions have
10657 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10658 might not be performed. */
10660 static bool
10661 avoid_folding_inline_builtin (tree fndecl)
10663 return (DECL_DECLARED_INLINE_P (fndecl)
10664 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10665 && cfun
10666 && !cfun->always_inline_functions_inlined
10667 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10670 /* A wrapper function for builtin folding that prevents warnings for
10671 "statement without effect" and the like, caused by removing the
10672 call node earlier than the warning is generated. */
10674 tree
10675 fold_call_expr (location_t loc, tree exp, bool ignore)
10677 tree ret = NULL_TREE;
10678 tree fndecl = get_callee_fndecl (exp);
10679 if (fndecl
10680 && TREE_CODE (fndecl) == FUNCTION_DECL
10681 && DECL_BUILT_IN (fndecl)
10682 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10683 yet. Defer folding until we see all the arguments
10684 (after inlining). */
10685 && !CALL_EXPR_VA_ARG_PACK (exp))
10687 int nargs = call_expr_nargs (exp);
10689 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10690 instead last argument is __builtin_va_arg_pack (). Defer folding
10691 even in that case, until arguments are finalized. */
10692 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10694 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10695 if (fndecl2
10696 && TREE_CODE (fndecl2) == FUNCTION_DECL
10697 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10698 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10699 return NULL_TREE;
10702 if (avoid_folding_inline_builtin (fndecl))
10703 return NULL_TREE;
10705 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10706 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10707 CALL_EXPR_ARGP (exp), ignore);
10708 else
10710 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10712 tree *args = CALL_EXPR_ARGP (exp);
10713 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10715 if (!ret)
10716 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10717 if (ret)
10718 return ret;
10721 return NULL_TREE;
10724 /* Conveniently construct a function call expression. FNDECL names the
10725 function to be called and ARGLIST is a TREE_LIST of arguments. */
10727 tree
10728 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10730 tree fntype = TREE_TYPE (fndecl);
10731 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10732 int n = list_length (arglist);
10733 tree *argarray = (tree *) alloca (n * sizeof (tree));
10734 int i;
10736 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10737 argarray[i] = TREE_VALUE (arglist);
10738 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10741 /* Conveniently construct a function call expression. FNDECL names the
10742 function to be called, N is the number of arguments, and the "..."
10743 parameters are the argument expressions. */
10745 tree
10746 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10748 va_list ap;
10749 tree fntype = TREE_TYPE (fndecl);
10750 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10751 tree *argarray = (tree *) alloca (n * sizeof (tree));
10752 int i;
10754 va_start (ap, n);
10755 for (i = 0; i < n; i++)
10756 argarray[i] = va_arg (ap, tree);
10757 va_end (ap);
10758 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10761 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10762 N arguments are passed in the array ARGARRAY. */
10764 tree
10765 fold_builtin_call_array (location_t loc, tree type,
10766 tree fn,
10767 int n,
10768 tree *argarray)
10770 tree ret = NULL_TREE;
10771 tree exp;
10773 if (TREE_CODE (fn) == ADDR_EXPR)
10775 tree fndecl = TREE_OPERAND (fn, 0);
10776 if (TREE_CODE (fndecl) == FUNCTION_DECL
10777 && DECL_BUILT_IN (fndecl))
10779 /* If last argument is __builtin_va_arg_pack (), arguments to this
10780 function are not finalized yet. Defer folding until they are. */
10781 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10783 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10784 if (fndecl2
10785 && TREE_CODE (fndecl2) == FUNCTION_DECL
10786 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10787 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10788 return build_call_array_loc (loc, type, fn, n, argarray);
10790 if (avoid_folding_inline_builtin (fndecl))
10791 return build_call_array_loc (loc, type, fn, n, argarray);
10792 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10794 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10795 if (ret)
10796 return ret;
10798 return build_call_array_loc (loc, type, fn, n, argarray);
10800 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10802 /* First try the transformations that don't require consing up
10803 an exp. */
10804 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10805 if (ret)
10806 return ret;
10809 /* If we got this far, we need to build an exp. */
10810 exp = build_call_array_loc (loc, type, fn, n, argarray);
10811 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10812 return ret ? ret : exp;
10816 return build_call_array_loc (loc, type, fn, n, argarray);
10819 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10820 along with N new arguments specified as the "..." parameters. SKIP
10821 is the number of arguments in EXP to be omitted. This function is used
10822 to do varargs-to-varargs transformations. */
10824 static tree
10825 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10827 int oldnargs = call_expr_nargs (exp);
10828 int nargs = oldnargs - skip + n;
10829 tree fntype = TREE_TYPE (fndecl);
10830 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10831 tree *buffer;
10833 if (n > 0)
10835 int i, j;
10836 va_list ap;
10838 buffer = XALLOCAVEC (tree, nargs);
10839 va_start (ap, n);
10840 for (i = 0; i < n; i++)
10841 buffer[i] = va_arg (ap, tree);
10842 va_end (ap);
10843 for (j = skip; j < oldnargs; j++, i++)
10844 buffer[i] = CALL_EXPR_ARG (exp, j);
10846 else
10847 buffer = CALL_EXPR_ARGP (exp) + skip;
10849 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10852 /* Validate a single argument ARG against a tree code CODE representing
10853 a type. */
10855 static bool
10856 validate_arg (const_tree arg, enum tree_code code)
10858 if (!arg)
10859 return false;
10860 else if (code == POINTER_TYPE)
10861 return POINTER_TYPE_P (TREE_TYPE (arg));
10862 else if (code == INTEGER_TYPE)
10863 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10864 return code == TREE_CODE (TREE_TYPE (arg));
10867 /* This function validates the types of a function call argument list
10868 against a specified list of tree_codes. If the last specifier is a 0,
10869 that represents an ellipses, otherwise the last specifier must be a
10870 VOID_TYPE.
10872 This is the GIMPLE version of validate_arglist. Eventually we want to
10873 completely convert builtins.c to work from GIMPLEs and the tree based
10874 validate_arglist will then be removed. */
10876 bool
10877 validate_gimple_arglist (const_gimple call, ...)
10879 enum tree_code code;
10880 bool res = 0;
10881 va_list ap;
10882 const_tree arg;
10883 size_t i;
10885 va_start (ap, call);
10886 i = 0;
10890 code = (enum tree_code) va_arg (ap, int);
10891 switch (code)
10893 case 0:
10894 /* This signifies an ellipses, any further arguments are all ok. */
10895 res = true;
10896 goto end;
10897 case VOID_TYPE:
10898 /* This signifies an endlink, if no arguments remain, return
10899 true, otherwise return false. */
10900 res = (i == gimple_call_num_args (call));
10901 goto end;
10902 default:
10903 /* If no parameters remain or the parameter's code does not
10904 match the specified code, return false. Otherwise continue
10905 checking any remaining arguments. */
10906 arg = gimple_call_arg (call, i++);
10907 if (!validate_arg (arg, code))
10908 goto end;
10909 break;
10912 while (1);
10914 /* We need gotos here since we can only have one VA_CLOSE in a
10915 function. */
10916 end: ;
10917 va_end (ap);
10919 return res;
10922 /* This function validates the types of a function call argument list
10923 against a specified list of tree_codes. If the last specifier is a 0,
10924 that represents an ellipses, otherwise the last specifier must be a
10925 VOID_TYPE. */
10927 bool
10928 validate_arglist (const_tree callexpr, ...)
10930 enum tree_code code;
10931 bool res = 0;
10932 va_list ap;
10933 const_call_expr_arg_iterator iter;
10934 const_tree arg;
10936 va_start (ap, callexpr);
10937 init_const_call_expr_arg_iterator (callexpr, &iter);
10941 code = (enum tree_code) va_arg (ap, int);
10942 switch (code)
10944 case 0:
10945 /* This signifies an ellipses, any further arguments are all ok. */
10946 res = true;
10947 goto end;
10948 case VOID_TYPE:
10949 /* This signifies an endlink, if no arguments remain, return
10950 true, otherwise return false. */
10951 res = !more_const_call_expr_args_p (&iter);
10952 goto end;
10953 default:
10954 /* If no parameters remain or the parameter's code does not
10955 match the specified code, return false. Otherwise continue
10956 checking any remaining arguments. */
10957 arg = next_const_call_expr_arg (&iter);
10958 if (!validate_arg (arg, code))
10959 goto end;
10960 break;
10963 while (1);
10965 /* We need gotos here since we can only have one VA_CLOSE in a
10966 function. */
10967 end: ;
10968 va_end (ap);
10970 return res;
10973 /* Default target-specific builtin expander that does nothing. */
10976 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10977 rtx target ATTRIBUTE_UNUSED,
10978 rtx subtarget ATTRIBUTE_UNUSED,
10979 enum machine_mode mode ATTRIBUTE_UNUSED,
10980 int ignore ATTRIBUTE_UNUSED)
10982 return NULL_RTX;
10985 /* Returns true is EXP represents data that would potentially reside
10986 in a readonly section. */
10988 static bool
10989 readonly_data_expr (tree exp)
10991 STRIP_NOPS (exp);
10993 if (TREE_CODE (exp) != ADDR_EXPR)
10994 return false;
10996 exp = get_base_address (TREE_OPERAND (exp, 0));
10997 if (!exp)
10998 return false;
11000 /* Make sure we call decl_readonly_section only for trees it
11001 can handle (since it returns true for everything it doesn't
11002 understand). */
11003 if (TREE_CODE (exp) == STRING_CST
11004 || TREE_CODE (exp) == CONSTRUCTOR
11005 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11006 return decl_readonly_section (exp, 0);
11007 else
11008 return false;
11011 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11012 to the call, and TYPE is its return type.
11014 Return NULL_TREE if no simplification was possible, otherwise return the
11015 simplified form of the call as a tree.
11017 The simplified form may be a constant or other expression which
11018 computes the same value, but in a more efficient manner (including
11019 calls to other builtin functions).
11021 The call may contain arguments which need to be evaluated, but
11022 which are not useful to determine the result of the call. In
11023 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11024 COMPOUND_EXPR will be an argument which must be evaluated.
11025 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11026 COMPOUND_EXPR in the chain will contain the tree for the simplified
11027 form of the builtin function call. */
11029 static tree
11030 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11032 if (!validate_arg (s1, POINTER_TYPE)
11033 || !validate_arg (s2, POINTER_TYPE))
11034 return NULL_TREE;
11035 else
11037 tree fn;
11038 const char *p1, *p2;
11040 p2 = c_getstr (s2);
11041 if (p2 == NULL)
11042 return NULL_TREE;
11044 p1 = c_getstr (s1);
11045 if (p1 != NULL)
11047 const char *r = strstr (p1, p2);
11048 tree tem;
11050 if (r == NULL)
11051 return build_int_cst (TREE_TYPE (s1), 0);
11053 /* Return an offset into the constant string argument. */
11054 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11055 s1, size_int (r - p1));
11056 return fold_convert_loc (loc, type, tem);
11059 /* The argument is const char *, and the result is char *, so we need
11060 a type conversion here to avoid a warning. */
11061 if (p2[0] == '\0')
11062 return fold_convert_loc (loc, type, s1);
11064 if (p2[1] != '\0')
11065 return NULL_TREE;
11067 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11068 if (!fn)
11069 return NULL_TREE;
11071 /* New argument list transforming strstr(s1, s2) to
11072 strchr(s1, s2[0]). */
11073 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11077 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11078 the call, and TYPE is its return type.
11080 Return NULL_TREE if no simplification was possible, otherwise return the
11081 simplified form of the call as a tree.
11083 The simplified form may be a constant or other expression which
11084 computes the same value, but in a more efficient manner (including
11085 calls to other builtin functions).
11087 The call may contain arguments which need to be evaluated, but
11088 which are not useful to determine the result of the call. In
11089 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11090 COMPOUND_EXPR will be an argument which must be evaluated.
11091 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11092 COMPOUND_EXPR in the chain will contain the tree for the simplified
11093 form of the builtin function call. */
11095 static tree
11096 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11098 if (!validate_arg (s1, POINTER_TYPE)
11099 || !validate_arg (s2, INTEGER_TYPE))
11100 return NULL_TREE;
11101 else
11103 const char *p1;
11105 if (TREE_CODE (s2) != INTEGER_CST)
11106 return NULL_TREE;
11108 p1 = c_getstr (s1);
11109 if (p1 != NULL)
11111 char c;
11112 const char *r;
11113 tree tem;
11115 if (target_char_cast (s2, &c))
11116 return NULL_TREE;
11118 r = strchr (p1, c);
11120 if (r == NULL)
11121 return build_int_cst (TREE_TYPE (s1), 0);
11123 /* Return an offset into the constant string argument. */
11124 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11125 s1, size_int (r - p1));
11126 return fold_convert_loc (loc, type, tem);
11128 return NULL_TREE;
11132 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11133 the call, and TYPE is its return type.
11135 Return NULL_TREE if no simplification was possible, otherwise return the
11136 simplified form of the call as a tree.
11138 The simplified form may be a constant or other expression which
11139 computes the same value, but in a more efficient manner (including
11140 calls to other builtin functions).
11142 The call may contain arguments which need to be evaluated, but
11143 which are not useful to determine the result of the call. In
11144 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11145 COMPOUND_EXPR will be an argument which must be evaluated.
11146 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11147 COMPOUND_EXPR in the chain will contain the tree for the simplified
11148 form of the builtin function call. */
11150 static tree
11151 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11153 if (!validate_arg (s1, POINTER_TYPE)
11154 || !validate_arg (s2, INTEGER_TYPE))
11155 return NULL_TREE;
11156 else
11158 tree fn;
11159 const char *p1;
11161 if (TREE_CODE (s2) != INTEGER_CST)
11162 return NULL_TREE;
11164 p1 = c_getstr (s1);
11165 if (p1 != NULL)
11167 char c;
11168 const char *r;
11169 tree tem;
11171 if (target_char_cast (s2, &c))
11172 return NULL_TREE;
11174 r = strrchr (p1, c);
11176 if (r == NULL)
11177 return build_int_cst (TREE_TYPE (s1), 0);
11179 /* Return an offset into the constant string argument. */
11180 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11181 s1, size_int (r - p1));
11182 return fold_convert_loc (loc, type, tem);
11185 if (! integer_zerop (s2))
11186 return NULL_TREE;
11188 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11189 if (!fn)
11190 return NULL_TREE;
11192 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11193 return build_call_expr_loc (loc, fn, 2, s1, s2);
11197 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11198 to the call, and TYPE is its return type.
11200 Return NULL_TREE if no simplification was possible, otherwise return the
11201 simplified form of the call as a tree.
11203 The simplified form may be a constant or other expression which
11204 computes the same value, but in a more efficient manner (including
11205 calls to other builtin functions).
11207 The call may contain arguments which need to be evaluated, but
11208 which are not useful to determine the result of the call. In
11209 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11210 COMPOUND_EXPR will be an argument which must be evaluated.
11211 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11212 COMPOUND_EXPR in the chain will contain the tree for the simplified
11213 form of the builtin function call. */
11215 static tree
11216 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11218 if (!validate_arg (s1, POINTER_TYPE)
11219 || !validate_arg (s2, POINTER_TYPE))
11220 return NULL_TREE;
11221 else
11223 tree fn;
11224 const char *p1, *p2;
11226 p2 = c_getstr (s2);
11227 if (p2 == NULL)
11228 return NULL_TREE;
11230 p1 = c_getstr (s1);
11231 if (p1 != NULL)
11233 const char *r = strpbrk (p1, p2);
11234 tree tem;
11236 if (r == NULL)
11237 return build_int_cst (TREE_TYPE (s1), 0);
11239 /* Return an offset into the constant string argument. */
11240 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11241 s1, size_int (r - p1));
11242 return fold_convert_loc (loc, type, tem);
11245 if (p2[0] == '\0')
11246 /* strpbrk(x, "") == NULL.
11247 Evaluate and ignore s1 in case it had side-effects. */
11248 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11250 if (p2[1] != '\0')
11251 return NULL_TREE; /* Really call strpbrk. */
11253 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11254 if (!fn)
11255 return NULL_TREE;
11257 /* New argument list transforming strpbrk(s1, s2) to
11258 strchr(s1, s2[0]). */
11259 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11263 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11264 to the call.
11266 Return NULL_TREE if no simplification was possible, otherwise return the
11267 simplified form of the call as a tree.
11269 The simplified form may be a constant or other expression which
11270 computes the same value, but in a more efficient manner (including
11271 calls to other builtin functions).
11273 The call may contain arguments which need to be evaluated, but
11274 which are not useful to determine the result of the call. In
11275 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11276 COMPOUND_EXPR will be an argument which must be evaluated.
11277 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11278 COMPOUND_EXPR in the chain will contain the tree for the simplified
11279 form of the builtin function call. */
11281 static tree
11282 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11284 if (!validate_arg (dst, POINTER_TYPE)
11285 || !validate_arg (src, POINTER_TYPE))
11286 return NULL_TREE;
11287 else
11289 const char *p = c_getstr (src);
11291 /* If the string length is zero, return the dst parameter. */
11292 if (p && *p == '\0')
11293 return dst;
11295 if (optimize_insn_for_speed_p ())
11297 /* See if we can store by pieces into (dst + strlen(dst)). */
11298 tree newdst, call;
11299 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11300 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11302 if (!strlen_fn || !strcpy_fn)
11303 return NULL_TREE;
11305 /* If we don't have a movstr we don't want to emit an strcpy
11306 call. We have to do that if the length of the source string
11307 isn't computable (in that case we can use memcpy probably
11308 later expanding to a sequence of mov instructions). If we
11309 have movstr instructions we can emit strcpy calls. */
11310 if (!HAVE_movstr)
11312 tree len = c_strlen (src, 1);
11313 if (! len || TREE_SIDE_EFFECTS (len))
11314 return NULL_TREE;
11317 /* Stabilize the argument list. */
11318 dst = builtin_save_expr (dst);
11320 /* Create strlen (dst). */
11321 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11322 /* Create (dst p+ strlen (dst)). */
11324 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11325 TREE_TYPE (dst), dst, newdst);
11326 newdst = builtin_save_expr (newdst);
11328 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11329 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11331 return NULL_TREE;
11335 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11336 arguments to the call.
11338 Return NULL_TREE if no simplification was possible, otherwise return the
11339 simplified form of the call as a tree.
11341 The simplified form may be a constant or other expression which
11342 computes the same value, but in a more efficient manner (including
11343 calls to other builtin functions).
11345 The call may contain arguments which need to be evaluated, but
11346 which are not useful to determine the result of the call. In
11347 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11348 COMPOUND_EXPR will be an argument which must be evaluated.
11349 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11350 COMPOUND_EXPR in the chain will contain the tree for the simplified
11351 form of the builtin function call. */
11353 static tree
11354 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11356 if (!validate_arg (dst, POINTER_TYPE)
11357 || !validate_arg (src, POINTER_TYPE)
11358 || !validate_arg (len, INTEGER_TYPE))
11359 return NULL_TREE;
11360 else
11362 const char *p = c_getstr (src);
11364 /* If the requested length is zero, or the src parameter string
11365 length is zero, return the dst parameter. */
11366 if (integer_zerop (len) || (p && *p == '\0'))
11367 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11369 /* If the requested len is greater than or equal to the string
11370 length, call strcat. */
11371 if (TREE_CODE (len) == INTEGER_CST && p
11372 && compare_tree_int (len, strlen (p)) >= 0)
11374 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11376 /* If the replacement _DECL isn't initialized, don't do the
11377 transformation. */
11378 if (!fn)
11379 return NULL_TREE;
11381 return build_call_expr_loc (loc, fn, 2, dst, src);
11383 return NULL_TREE;
11387 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11388 to the call.
11390 Return NULL_TREE if no simplification was possible, otherwise return the
11391 simplified form of the call as a tree.
11393 The simplified form may be a constant or other expression which
11394 computes the same value, but in a more efficient manner (including
11395 calls to other builtin functions).
11397 The call may contain arguments which need to be evaluated, but
11398 which are not useful to determine the result of the call. In
11399 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11400 COMPOUND_EXPR will be an argument which must be evaluated.
11401 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11402 COMPOUND_EXPR in the chain will contain the tree for the simplified
11403 form of the builtin function call. */
11405 static tree
11406 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11408 if (!validate_arg (s1, POINTER_TYPE)
11409 || !validate_arg (s2, POINTER_TYPE))
11410 return NULL_TREE;
11411 else
11413 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11415 /* If both arguments are constants, evaluate at compile-time. */
11416 if (p1 && p2)
11418 const size_t r = strspn (p1, p2);
11419 return size_int (r);
11422 /* If either argument is "", return NULL_TREE. */
11423 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11424 /* Evaluate and ignore both arguments in case either one has
11425 side-effects. */
11426 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11427 s1, s2);
11428 return NULL_TREE;
11432 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11433 to the call.
11435 Return NULL_TREE if no simplification was possible, otherwise return the
11436 simplified form of the call as a tree.
11438 The simplified form may be a constant or other expression which
11439 computes the same value, but in a more efficient manner (including
11440 calls to other builtin functions).
11442 The call may contain arguments which need to be evaluated, but
11443 which are not useful to determine the result of the call. In
11444 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11445 COMPOUND_EXPR will be an argument which must be evaluated.
11446 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11447 COMPOUND_EXPR in the chain will contain the tree for the simplified
11448 form of the builtin function call. */
11450 static tree
11451 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11453 if (!validate_arg (s1, POINTER_TYPE)
11454 || !validate_arg (s2, POINTER_TYPE))
11455 return NULL_TREE;
11456 else
11458 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11460 /* If both arguments are constants, evaluate at compile-time. */
11461 if (p1 && p2)
11463 const size_t r = strcspn (p1, p2);
11464 return size_int (r);
11467 /* If the first argument is "", return NULL_TREE. */
11468 if (p1 && *p1 == '\0')
11470 /* Evaluate and ignore argument s2 in case it has
11471 side-effects. */
11472 return omit_one_operand_loc (loc, size_type_node,
11473 size_zero_node, s2);
11476 /* If the second argument is "", return __builtin_strlen(s1). */
11477 if (p2 && *p2 == '\0')
11479 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11481 /* If the replacement _DECL isn't initialized, don't do the
11482 transformation. */
11483 if (!fn)
11484 return NULL_TREE;
11486 return build_call_expr_loc (loc, fn, 1, s1);
11488 return NULL_TREE;
11492 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11493 to the call. IGNORE is true if the value returned
11494 by the builtin will be ignored. UNLOCKED is true is true if this
11495 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11496 the known length of the string. Return NULL_TREE if no simplification
11497 was possible. */
11499 tree
11500 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11501 bool ignore, bool unlocked, tree len)
11503 /* If we're using an unlocked function, assume the other unlocked
11504 functions exist explicitly. */
11505 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11506 : implicit_built_in_decls[BUILT_IN_FPUTC];
11507 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11508 : implicit_built_in_decls[BUILT_IN_FWRITE];
11510 /* If the return value is used, don't do the transformation. */
11511 if (!ignore)
11512 return NULL_TREE;
11514 /* Verify the arguments in the original call. */
11515 if (!validate_arg (arg0, POINTER_TYPE)
11516 || !validate_arg (arg1, POINTER_TYPE))
11517 return NULL_TREE;
11519 if (! len)
11520 len = c_strlen (arg0, 0);
11522 /* Get the length of the string passed to fputs. If the length
11523 can't be determined, punt. */
11524 if (!len
11525 || TREE_CODE (len) != INTEGER_CST)
11526 return NULL_TREE;
11528 switch (compare_tree_int (len, 1))
11530 case -1: /* length is 0, delete the call entirely . */
11531 return omit_one_operand_loc (loc, integer_type_node,
11532 integer_zero_node, arg1);;
11534 case 0: /* length is 1, call fputc. */
11536 const char *p = c_getstr (arg0);
11538 if (p != NULL)
11540 if (fn_fputc)
11541 return build_call_expr_loc (loc, fn_fputc, 2,
11542 build_int_cst (NULL_TREE, p[0]), arg1);
11543 else
11544 return NULL_TREE;
11547 /* FALLTHROUGH */
11548 case 1: /* length is greater than 1, call fwrite. */
11550 /* If optimizing for size keep fputs. */
11551 if (optimize_function_for_size_p (cfun))
11552 return NULL_TREE;
11553 /* New argument list transforming fputs(string, stream) to
11554 fwrite(string, 1, len, stream). */
11555 if (fn_fwrite)
11556 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11557 size_one_node, len, arg1);
11558 else
11559 return NULL_TREE;
11561 default:
11562 gcc_unreachable ();
11564 return NULL_TREE;
11567 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11568 produced. False otherwise. This is done so that we don't output the error
11569 or warning twice or three times. */
11571 bool
11572 fold_builtin_next_arg (tree exp, bool va_start_p)
11574 tree fntype = TREE_TYPE (current_function_decl);
11575 int nargs = call_expr_nargs (exp);
11576 tree arg;
11578 if (TYPE_ARG_TYPES (fntype) == 0
11579 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11580 == void_type_node))
11582 error ("%<va_start%> used in function with fixed args");
11583 return true;
11586 if (va_start_p)
11588 if (va_start_p && (nargs != 2))
11590 error ("wrong number of arguments to function %<va_start%>");
11591 return true;
11593 arg = CALL_EXPR_ARG (exp, 1);
11595 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11596 when we checked the arguments and if needed issued a warning. */
11597 else
11599 if (nargs == 0)
11601 /* Evidently an out of date version of <stdarg.h>; can't validate
11602 va_start's second argument, but can still work as intended. */
11603 warning (0, "%<__builtin_next_arg%> called without an argument");
11604 return true;
11606 else if (nargs > 1)
11608 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11609 return true;
11611 arg = CALL_EXPR_ARG (exp, 0);
11614 if (TREE_CODE (arg) == SSA_NAME)
11615 arg = SSA_NAME_VAR (arg);
11617 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11618 or __builtin_next_arg (0) the first time we see it, after checking
11619 the arguments and if needed issuing a warning. */
11620 if (!integer_zerop (arg))
11622 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11624 /* Strip off all nops for the sake of the comparison. This
11625 is not quite the same as STRIP_NOPS. It does more.
11626 We must also strip off INDIRECT_EXPR for C++ reference
11627 parameters. */
11628 while (CONVERT_EXPR_P (arg)
11629 || TREE_CODE (arg) == INDIRECT_REF)
11630 arg = TREE_OPERAND (arg, 0);
11631 if (arg != last_parm)
11633 /* FIXME: Sometimes with the tree optimizers we can get the
11634 not the last argument even though the user used the last
11635 argument. We just warn and set the arg to be the last
11636 argument so that we will get wrong-code because of
11637 it. */
11638 warning (0, "second parameter of %<va_start%> not last named argument");
11641 /* Undefined by C99 7.15.1.4p4 (va_start):
11642 "If the parameter parmN is declared with the register storage
11643 class, with a function or array type, or with a type that is
11644 not compatible with the type that results after application of
11645 the default argument promotions, the behavior is undefined."
11647 else if (DECL_REGISTER (arg))
11648 warning (0, "undefined behaviour when second parameter of "
11649 "%<va_start%> is declared with %<register%> storage");
11651 /* We want to verify the second parameter just once before the tree
11652 optimizers are run and then avoid keeping it in the tree,
11653 as otherwise we could warn even for correct code like:
11654 void foo (int i, ...)
11655 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11656 if (va_start_p)
11657 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11658 else
11659 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11661 return false;
11665 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11666 ORIG may be null if this is a 2-argument call. We don't attempt to
11667 simplify calls with more than 3 arguments.
11669 Return NULL_TREE if no simplification was possible, otherwise return the
11670 simplified form of the call as a tree. If IGNORED is true, it means that
11671 the caller does not use the returned value of the function. */
11673 static tree
11674 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11675 tree orig, int ignored)
11677 tree call, retval;
11678 const char *fmt_str = NULL;
11680 /* Verify the required arguments in the original call. We deal with two
11681 types of sprintf() calls: 'sprintf (str, fmt)' and
11682 'sprintf (dest, "%s", orig)'. */
11683 if (!validate_arg (dest, POINTER_TYPE)
11684 || !validate_arg (fmt, POINTER_TYPE))
11685 return NULL_TREE;
11686 if (orig && !validate_arg (orig, POINTER_TYPE))
11687 return NULL_TREE;
11689 /* Check whether the format is a literal string constant. */
11690 fmt_str = c_getstr (fmt);
11691 if (fmt_str == NULL)
11692 return NULL_TREE;
11694 call = NULL_TREE;
11695 retval = NULL_TREE;
11697 if (!init_target_chars ())
11698 return NULL_TREE;
11700 /* If the format doesn't contain % args or %%, use strcpy. */
11701 if (strchr (fmt_str, target_percent) == NULL)
11703 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11705 if (!fn)
11706 return NULL_TREE;
11708 /* Don't optimize sprintf (buf, "abc", ptr++). */
11709 if (orig)
11710 return NULL_TREE;
11712 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11713 'format' is known to contain no % formats. */
11714 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11715 if (!ignored)
11716 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11719 /* If the format is "%s", use strcpy if the result isn't used. */
11720 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11722 tree fn;
11723 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11725 if (!fn)
11726 return NULL_TREE;
11728 /* Don't crash on sprintf (str1, "%s"). */
11729 if (!orig)
11730 return NULL_TREE;
11732 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11733 if (!ignored)
11735 retval = c_strlen (orig, 1);
11736 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11737 return NULL_TREE;
11739 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11742 if (call && retval)
11744 retval = fold_convert_loc
11745 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11746 retval);
11747 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11749 else
11750 return call;
11753 /* Expand a call EXP to __builtin_object_size. */
11756 expand_builtin_object_size (tree exp)
11758 tree ost;
11759 int object_size_type;
11760 tree fndecl = get_callee_fndecl (exp);
11762 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11764 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11765 exp, fndecl);
11766 expand_builtin_trap ();
11767 return const0_rtx;
11770 ost = CALL_EXPR_ARG (exp, 1);
11771 STRIP_NOPS (ost);
11773 if (TREE_CODE (ost) != INTEGER_CST
11774 || tree_int_cst_sgn (ost) < 0
11775 || compare_tree_int (ost, 3) > 0)
11777 error ("%Klast argument of %D is not integer constant between 0 and 3",
11778 exp, fndecl);
11779 expand_builtin_trap ();
11780 return const0_rtx;
11783 object_size_type = tree_low_cst (ost, 0);
11785 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11788 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11789 FCODE is the BUILT_IN_* to use.
11790 Return NULL_RTX if we failed; the caller should emit a normal call,
11791 otherwise try to get the result in TARGET, if convenient (and in
11792 mode MODE if that's convenient). */
11794 static rtx
11795 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11796 enum built_in_function fcode)
11798 tree dest, src, len, size;
11800 if (!validate_arglist (exp,
11801 POINTER_TYPE,
11802 fcode == BUILT_IN_MEMSET_CHK
11803 ? INTEGER_TYPE : POINTER_TYPE,
11804 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11805 return NULL_RTX;
11807 dest = CALL_EXPR_ARG (exp, 0);
11808 src = CALL_EXPR_ARG (exp, 1);
11809 len = CALL_EXPR_ARG (exp, 2);
11810 size = CALL_EXPR_ARG (exp, 3);
11812 if (! host_integerp (size, 1))
11813 return NULL_RTX;
11815 if (host_integerp (len, 1) || integer_all_onesp (size))
11817 tree fn;
11819 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11821 warning_at (tree_nonartificial_location (exp),
11822 0, "%Kcall to %D will always overflow destination buffer",
11823 exp, get_callee_fndecl (exp));
11824 return NULL_RTX;
11827 fn = NULL_TREE;
11828 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11829 mem{cpy,pcpy,move,set} is available. */
11830 switch (fcode)
11832 case BUILT_IN_MEMCPY_CHK:
11833 fn = built_in_decls[BUILT_IN_MEMCPY];
11834 break;
11835 case BUILT_IN_MEMPCPY_CHK:
11836 fn = built_in_decls[BUILT_IN_MEMPCPY];
11837 break;
11838 case BUILT_IN_MEMMOVE_CHK:
11839 fn = built_in_decls[BUILT_IN_MEMMOVE];
11840 break;
11841 case BUILT_IN_MEMSET_CHK:
11842 fn = built_in_decls[BUILT_IN_MEMSET];
11843 break;
11844 default:
11845 break;
11848 if (! fn)
11849 return NULL_RTX;
11851 fn = build_call_nofold (fn, 3, dest, src, len);
11852 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11853 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11854 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11856 else if (fcode == BUILT_IN_MEMSET_CHK)
11857 return NULL_RTX;
11858 else
11860 unsigned int dest_align
11861 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11863 /* If DEST is not a pointer type, call the normal function. */
11864 if (dest_align == 0)
11865 return NULL_RTX;
11867 /* If SRC and DEST are the same (and not volatile), do nothing. */
11868 if (operand_equal_p (src, dest, 0))
11870 tree expr;
11872 if (fcode != BUILT_IN_MEMPCPY_CHK)
11874 /* Evaluate and ignore LEN in case it has side-effects. */
11875 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11876 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11879 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11880 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11883 /* __memmove_chk special case. */
11884 if (fcode == BUILT_IN_MEMMOVE_CHK)
11886 unsigned int src_align
11887 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11889 if (src_align == 0)
11890 return NULL_RTX;
11892 /* If src is categorized for a readonly section we can use
11893 normal __memcpy_chk. */
11894 if (readonly_data_expr (src))
11896 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11897 if (!fn)
11898 return NULL_RTX;
11899 fn = build_call_nofold (fn, 4, dest, src, len, size);
11900 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11901 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11902 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11905 return NULL_RTX;
11909 /* Emit warning if a buffer overflow is detected at compile time. */
11911 static void
11912 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11914 int is_strlen = 0;
11915 tree len, size;
11916 location_t loc = tree_nonartificial_location (exp);
11918 switch (fcode)
11920 case BUILT_IN_STRCPY_CHK:
11921 case BUILT_IN_STPCPY_CHK:
11922 /* For __strcat_chk the warning will be emitted only if overflowing
11923 by at least strlen (dest) + 1 bytes. */
11924 case BUILT_IN_STRCAT_CHK:
11925 len = CALL_EXPR_ARG (exp, 1);
11926 size = CALL_EXPR_ARG (exp, 2);
11927 is_strlen = 1;
11928 break;
11929 case BUILT_IN_STRNCAT_CHK:
11930 case BUILT_IN_STRNCPY_CHK:
11931 len = CALL_EXPR_ARG (exp, 2);
11932 size = CALL_EXPR_ARG (exp, 3);
11933 break;
11934 case BUILT_IN_SNPRINTF_CHK:
11935 case BUILT_IN_VSNPRINTF_CHK:
11936 len = CALL_EXPR_ARG (exp, 1);
11937 size = CALL_EXPR_ARG (exp, 3);
11938 break;
11939 default:
11940 gcc_unreachable ();
11943 if (!len || !size)
11944 return;
11946 if (! host_integerp (size, 1) || integer_all_onesp (size))
11947 return;
11949 if (is_strlen)
11951 len = c_strlen (len, 1);
11952 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11953 return;
11955 else if (fcode == BUILT_IN_STRNCAT_CHK)
11957 tree src = CALL_EXPR_ARG (exp, 1);
11958 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11959 return;
11960 src = c_strlen (src, 1);
11961 if (! src || ! host_integerp (src, 1))
11963 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11964 exp, get_callee_fndecl (exp));
11965 return;
11967 else if (tree_int_cst_lt (src, size))
11968 return;
11970 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11971 return;
11973 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11974 exp, get_callee_fndecl (exp));
11977 /* Emit warning if a buffer overflow is detected at compile time
11978 in __sprintf_chk/__vsprintf_chk calls. */
11980 static void
11981 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11983 tree size, len, fmt;
11984 const char *fmt_str;
11985 int nargs = call_expr_nargs (exp);
11987 /* Verify the required arguments in the original call. */
11989 if (nargs < 4)
11990 return;
11991 size = CALL_EXPR_ARG (exp, 2);
11992 fmt = CALL_EXPR_ARG (exp, 3);
11994 if (! host_integerp (size, 1) || integer_all_onesp (size))
11995 return;
11997 /* Check whether the format is a literal string constant. */
11998 fmt_str = c_getstr (fmt);
11999 if (fmt_str == NULL)
12000 return;
12002 if (!init_target_chars ())
12003 return;
12005 /* If the format doesn't contain % args or %%, we know its size. */
12006 if (strchr (fmt_str, target_percent) == 0)
12007 len = build_int_cstu (size_type_node, strlen (fmt_str));
12008 /* If the format is "%s" and first ... argument is a string literal,
12009 we know it too. */
12010 else if (fcode == BUILT_IN_SPRINTF_CHK
12011 && strcmp (fmt_str, target_percent_s) == 0)
12013 tree arg;
12015 if (nargs < 5)
12016 return;
12017 arg = CALL_EXPR_ARG (exp, 4);
12018 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12019 return;
12021 len = c_strlen (arg, 1);
12022 if (!len || ! host_integerp (len, 1))
12023 return;
12025 else
12026 return;
12028 if (! tree_int_cst_lt (len, size))
12029 warning_at (tree_nonartificial_location (exp),
12030 0, "%Kcall to %D will always overflow destination buffer",
12031 exp, get_callee_fndecl (exp));
12034 /* Emit warning if a free is called with address of a variable. */
12036 static void
12037 maybe_emit_free_warning (tree exp)
12039 tree arg = CALL_EXPR_ARG (exp, 0);
12041 STRIP_NOPS (arg);
12042 if (TREE_CODE (arg) != ADDR_EXPR)
12043 return;
12045 arg = get_base_address (TREE_OPERAND (arg, 0));
12046 if (arg == NULL || INDIRECT_REF_P (arg))
12047 return;
12049 if (SSA_VAR_P (arg))
12050 warning_at (tree_nonartificial_location (exp),
12051 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12052 else
12053 warning_at (tree_nonartificial_location (exp),
12054 0, "%Kattempt to free a non-heap object", exp);
12057 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12058 if possible. */
12060 tree
12061 fold_builtin_object_size (tree ptr, tree ost)
12063 tree ret = NULL_TREE;
12064 int object_size_type;
12066 if (!validate_arg (ptr, POINTER_TYPE)
12067 || !validate_arg (ost, INTEGER_TYPE))
12068 return NULL_TREE;
12070 STRIP_NOPS (ost);
12072 if (TREE_CODE (ost) != INTEGER_CST
12073 || tree_int_cst_sgn (ost) < 0
12074 || compare_tree_int (ost, 3) > 0)
12075 return NULL_TREE;
12077 object_size_type = tree_low_cst (ost, 0);
12079 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12080 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12081 and (size_t) 0 for types 2 and 3. */
12082 if (TREE_SIDE_EFFECTS (ptr))
12083 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12085 if (TREE_CODE (ptr) == ADDR_EXPR)
12086 ret = build_int_cstu (size_type_node,
12087 compute_builtin_object_size (ptr, object_size_type));
12089 else if (TREE_CODE (ptr) == SSA_NAME)
12091 unsigned HOST_WIDE_INT bytes;
12093 /* If object size is not known yet, delay folding until
12094 later. Maybe subsequent passes will help determining
12095 it. */
12096 bytes = compute_builtin_object_size (ptr, object_size_type);
12097 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12098 ? -1 : 0))
12099 ret = build_int_cstu (size_type_node, bytes);
12102 if (ret)
12104 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12105 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12106 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12107 ret = NULL_TREE;
12110 return ret;
12113 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12114 DEST, SRC, LEN, and SIZE are the arguments to the call.
12115 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12116 code of the builtin. If MAXLEN is not NULL, it is maximum length
12117 passed as third argument. */
12119 tree
12120 fold_builtin_memory_chk (location_t loc, tree fndecl,
12121 tree dest, tree src, tree len, tree size,
12122 tree maxlen, bool ignore,
12123 enum built_in_function fcode)
12125 tree fn;
12127 if (!validate_arg (dest, POINTER_TYPE)
12128 || !validate_arg (src,
12129 (fcode == BUILT_IN_MEMSET_CHK
12130 ? INTEGER_TYPE : POINTER_TYPE))
12131 || !validate_arg (len, INTEGER_TYPE)
12132 || !validate_arg (size, INTEGER_TYPE))
12133 return NULL_TREE;
12135 /* If SRC and DEST are the same (and not volatile), return DEST
12136 (resp. DEST+LEN for __mempcpy_chk). */
12137 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12139 if (fcode != BUILT_IN_MEMPCPY_CHK)
12140 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12141 dest, len);
12142 else
12144 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12145 dest, len);
12146 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12150 if (! host_integerp (size, 1))
12151 return NULL_TREE;
12153 if (! integer_all_onesp (size))
12155 if (! host_integerp (len, 1))
12157 /* If LEN is not constant, try MAXLEN too.
12158 For MAXLEN only allow optimizing into non-_ocs function
12159 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12160 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12162 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12164 /* (void) __mempcpy_chk () can be optimized into
12165 (void) __memcpy_chk (). */
12166 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12167 if (!fn)
12168 return NULL_TREE;
12170 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12172 return NULL_TREE;
12175 else
12176 maxlen = len;
12178 if (tree_int_cst_lt (size, maxlen))
12179 return NULL_TREE;
12182 fn = NULL_TREE;
12183 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12184 mem{cpy,pcpy,move,set} is available. */
12185 switch (fcode)
12187 case BUILT_IN_MEMCPY_CHK:
12188 fn = built_in_decls[BUILT_IN_MEMCPY];
12189 break;
12190 case BUILT_IN_MEMPCPY_CHK:
12191 fn = built_in_decls[BUILT_IN_MEMPCPY];
12192 break;
12193 case BUILT_IN_MEMMOVE_CHK:
12194 fn = built_in_decls[BUILT_IN_MEMMOVE];
12195 break;
12196 case BUILT_IN_MEMSET_CHK:
12197 fn = built_in_decls[BUILT_IN_MEMSET];
12198 break;
12199 default:
12200 break;
12203 if (!fn)
12204 return NULL_TREE;
12206 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12209 /* Fold a call to the __st[rp]cpy_chk builtin.
12210 DEST, SRC, and SIZE are the arguments to the call.
12211 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12212 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12213 strings passed as second argument. */
12215 tree
12216 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12217 tree src, tree size,
12218 tree maxlen, bool ignore,
12219 enum built_in_function fcode)
12221 tree len, fn;
12223 if (!validate_arg (dest, POINTER_TYPE)
12224 || !validate_arg (src, POINTER_TYPE)
12225 || !validate_arg (size, INTEGER_TYPE))
12226 return NULL_TREE;
12228 /* If SRC and DEST are the same (and not volatile), return DEST. */
12229 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12230 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12232 if (! host_integerp (size, 1))
12233 return NULL_TREE;
12235 if (! integer_all_onesp (size))
12237 len = c_strlen (src, 1);
12238 if (! len || ! host_integerp (len, 1))
12240 /* If LEN is not constant, try MAXLEN too.
12241 For MAXLEN only allow optimizing into non-_ocs function
12242 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12243 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12245 if (fcode == BUILT_IN_STPCPY_CHK)
12247 if (! ignore)
12248 return NULL_TREE;
12250 /* If return value of __stpcpy_chk is ignored,
12251 optimize into __strcpy_chk. */
12252 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12253 if (!fn)
12254 return NULL_TREE;
12256 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12259 if (! len || TREE_SIDE_EFFECTS (len))
12260 return NULL_TREE;
12262 /* If c_strlen returned something, but not a constant,
12263 transform __strcpy_chk into __memcpy_chk. */
12264 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12265 if (!fn)
12266 return NULL_TREE;
12268 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12269 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12270 build_call_expr_loc (loc, fn, 4,
12271 dest, src, len, size));
12274 else
12275 maxlen = len;
12277 if (! tree_int_cst_lt (maxlen, size))
12278 return NULL_TREE;
12281 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12282 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12283 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12284 if (!fn)
12285 return NULL_TREE;
12287 return build_call_expr_loc (loc, fn, 2, dest, src);
12290 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12291 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12292 length passed as third argument. */
12294 tree
12295 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12296 tree len, tree size, tree maxlen)
12298 tree fn;
12300 if (!validate_arg (dest, POINTER_TYPE)
12301 || !validate_arg (src, POINTER_TYPE)
12302 || !validate_arg (len, INTEGER_TYPE)
12303 || !validate_arg (size, INTEGER_TYPE))
12304 return NULL_TREE;
12306 if (! host_integerp (size, 1))
12307 return NULL_TREE;
12309 if (! integer_all_onesp (size))
12311 if (! host_integerp (len, 1))
12313 /* If LEN is not constant, try MAXLEN too.
12314 For MAXLEN only allow optimizing into non-_ocs function
12315 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12316 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12317 return NULL_TREE;
12319 else
12320 maxlen = len;
12322 if (tree_int_cst_lt (size, maxlen))
12323 return NULL_TREE;
12326 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12327 fn = built_in_decls[BUILT_IN_STRNCPY];
12328 if (!fn)
12329 return NULL_TREE;
12331 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12334 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12335 are the arguments to the call. */
12337 static tree
12338 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12339 tree src, tree size)
12341 tree fn;
12342 const char *p;
12344 if (!validate_arg (dest, POINTER_TYPE)
12345 || !validate_arg (src, POINTER_TYPE)
12346 || !validate_arg (size, INTEGER_TYPE))
12347 return NULL_TREE;
12349 p = c_getstr (src);
12350 /* If the SRC parameter is "", return DEST. */
12351 if (p && *p == '\0')
12352 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12354 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12355 return NULL_TREE;
12357 /* If __builtin_strcat_chk is used, assume strcat is available. */
12358 fn = built_in_decls[BUILT_IN_STRCAT];
12359 if (!fn)
12360 return NULL_TREE;
12362 return build_call_expr_loc (loc, fn, 2, dest, src);
12365 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12366 LEN, and SIZE. */
12368 static tree
12369 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12370 tree dest, tree src, tree len, tree size)
12372 tree fn;
12373 const char *p;
12375 if (!validate_arg (dest, POINTER_TYPE)
12376 || !validate_arg (src, POINTER_TYPE)
12377 || !validate_arg (size, INTEGER_TYPE)
12378 || !validate_arg (size, INTEGER_TYPE))
12379 return NULL_TREE;
12381 p = c_getstr (src);
12382 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12383 if (p && *p == '\0')
12384 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12385 else if (integer_zerop (len))
12386 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12388 if (! host_integerp (size, 1))
12389 return NULL_TREE;
12391 if (! integer_all_onesp (size))
12393 tree src_len = c_strlen (src, 1);
12394 if (src_len
12395 && host_integerp (src_len, 1)
12396 && host_integerp (len, 1)
12397 && ! tree_int_cst_lt (len, src_len))
12399 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12400 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12401 if (!fn)
12402 return NULL_TREE;
12404 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12406 return NULL_TREE;
12409 /* If __builtin_strncat_chk is used, assume strncat is available. */
12410 fn = built_in_decls[BUILT_IN_STRNCAT];
12411 if (!fn)
12412 return NULL_TREE;
12414 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12417 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12418 a normal call should be emitted rather than expanding the function
12419 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12421 static tree
12422 fold_builtin_sprintf_chk (location_t loc, tree exp,
12423 enum built_in_function fcode)
12425 tree dest, size, len, fn, fmt, flag;
12426 const char *fmt_str;
12427 int nargs = call_expr_nargs (exp);
12429 /* Verify the required arguments in the original call. */
12430 if (nargs < 4)
12431 return NULL_TREE;
12432 dest = CALL_EXPR_ARG (exp, 0);
12433 if (!validate_arg (dest, POINTER_TYPE))
12434 return NULL_TREE;
12435 flag = CALL_EXPR_ARG (exp, 1);
12436 if (!validate_arg (flag, INTEGER_TYPE))
12437 return NULL_TREE;
12438 size = CALL_EXPR_ARG (exp, 2);
12439 if (!validate_arg (size, INTEGER_TYPE))
12440 return NULL_TREE;
12441 fmt = CALL_EXPR_ARG (exp, 3);
12442 if (!validate_arg (fmt, POINTER_TYPE))
12443 return NULL_TREE;
12445 if (! host_integerp (size, 1))
12446 return NULL_TREE;
12448 len = NULL_TREE;
12450 if (!init_target_chars ())
12451 return NULL_TREE;
12453 /* Check whether the format is a literal string constant. */
12454 fmt_str = c_getstr (fmt);
12455 if (fmt_str != NULL)
12457 /* If the format doesn't contain % args or %%, we know the size. */
12458 if (strchr (fmt_str, target_percent) == 0)
12460 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12461 len = build_int_cstu (size_type_node, strlen (fmt_str));
12463 /* If the format is "%s" and first ... argument is a string literal,
12464 we know the size too. */
12465 else if (fcode == BUILT_IN_SPRINTF_CHK
12466 && strcmp (fmt_str, target_percent_s) == 0)
12468 tree arg;
12470 if (nargs == 5)
12472 arg = CALL_EXPR_ARG (exp, 4);
12473 if (validate_arg (arg, POINTER_TYPE))
12475 len = c_strlen (arg, 1);
12476 if (! len || ! host_integerp (len, 1))
12477 len = NULL_TREE;
12483 if (! integer_all_onesp (size))
12485 if (! len || ! tree_int_cst_lt (len, size))
12486 return NULL_TREE;
12489 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12490 or if format doesn't contain % chars or is "%s". */
12491 if (! integer_zerop (flag))
12493 if (fmt_str == NULL)
12494 return NULL_TREE;
12495 if (strchr (fmt_str, target_percent) != NULL
12496 && strcmp (fmt_str, target_percent_s))
12497 return NULL_TREE;
12500 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12501 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12502 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12503 if (!fn)
12504 return NULL_TREE;
12506 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12509 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12510 a normal call should be emitted rather than expanding the function
12511 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12512 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12513 passed as second argument. */
12515 tree
12516 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12517 enum built_in_function fcode)
12519 tree dest, size, len, fn, fmt, flag;
12520 const char *fmt_str;
12522 /* Verify the required arguments in the original call. */
12523 if (call_expr_nargs (exp) < 5)
12524 return NULL_TREE;
12525 dest = CALL_EXPR_ARG (exp, 0);
12526 if (!validate_arg (dest, POINTER_TYPE))
12527 return NULL_TREE;
12528 len = CALL_EXPR_ARG (exp, 1);
12529 if (!validate_arg (len, INTEGER_TYPE))
12530 return NULL_TREE;
12531 flag = CALL_EXPR_ARG (exp, 2);
12532 if (!validate_arg (flag, INTEGER_TYPE))
12533 return NULL_TREE;
12534 size = CALL_EXPR_ARG (exp, 3);
12535 if (!validate_arg (size, INTEGER_TYPE))
12536 return NULL_TREE;
12537 fmt = CALL_EXPR_ARG (exp, 4);
12538 if (!validate_arg (fmt, POINTER_TYPE))
12539 return NULL_TREE;
12541 if (! host_integerp (size, 1))
12542 return NULL_TREE;
12544 if (! integer_all_onesp (size))
12546 if (! host_integerp (len, 1))
12548 /* If LEN is not constant, try MAXLEN too.
12549 For MAXLEN only allow optimizing into non-_ocs function
12550 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12551 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12552 return NULL_TREE;
12554 else
12555 maxlen = len;
12557 if (tree_int_cst_lt (size, maxlen))
12558 return NULL_TREE;
12561 if (!init_target_chars ())
12562 return NULL_TREE;
12564 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12565 or if format doesn't contain % chars or is "%s". */
12566 if (! integer_zerop (flag))
12568 fmt_str = c_getstr (fmt);
12569 if (fmt_str == NULL)
12570 return NULL_TREE;
12571 if (strchr (fmt_str, target_percent) != NULL
12572 && strcmp (fmt_str, target_percent_s))
12573 return NULL_TREE;
12576 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12577 available. */
12578 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12579 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12580 if (!fn)
12581 return NULL_TREE;
12583 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12586 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12587 FMT and ARG are the arguments to the call; we don't fold cases with
12588 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12590 Return NULL_TREE if no simplification was possible, otherwise return the
12591 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12592 code of the function to be simplified. */
12594 static tree
12595 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12596 tree arg, bool ignore,
12597 enum built_in_function fcode)
12599 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12600 const char *fmt_str = NULL;
12602 /* If the return value is used, don't do the transformation. */
12603 if (! ignore)
12604 return NULL_TREE;
12606 /* Verify the required arguments in the original call. */
12607 if (!validate_arg (fmt, POINTER_TYPE))
12608 return NULL_TREE;
12610 /* Check whether the format is a literal string constant. */
12611 fmt_str = c_getstr (fmt);
12612 if (fmt_str == NULL)
12613 return NULL_TREE;
12615 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12617 /* If we're using an unlocked function, assume the other
12618 unlocked functions exist explicitly. */
12619 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12620 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12622 else
12624 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12625 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12628 if (!init_target_chars ())
12629 return NULL_TREE;
12631 if (strcmp (fmt_str, target_percent_s) == 0
12632 || strchr (fmt_str, target_percent) == NULL)
12634 const char *str;
12636 if (strcmp (fmt_str, target_percent_s) == 0)
12638 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12639 return NULL_TREE;
12641 if (!arg || !validate_arg (arg, POINTER_TYPE))
12642 return NULL_TREE;
12644 str = c_getstr (arg);
12645 if (str == NULL)
12646 return NULL_TREE;
12648 else
12650 /* The format specifier doesn't contain any '%' characters. */
12651 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12652 && arg)
12653 return NULL_TREE;
12654 str = fmt_str;
12657 /* If the string was "", printf does nothing. */
12658 if (str[0] == '\0')
12659 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12661 /* If the string has length of 1, call putchar. */
12662 if (str[1] == '\0')
12664 /* Given printf("c"), (where c is any one character,)
12665 convert "c"[0] to an int and pass that to the replacement
12666 function. */
12667 newarg = build_int_cst (NULL_TREE, str[0]);
12668 if (fn_putchar)
12669 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12671 else
12673 /* If the string was "string\n", call puts("string"). */
12674 size_t len = strlen (str);
12675 if ((unsigned char)str[len - 1] == target_newline)
12677 /* Create a NUL-terminated string that's one char shorter
12678 than the original, stripping off the trailing '\n'. */
12679 char *newstr = XALLOCAVEC (char, len);
12680 memcpy (newstr, str, len - 1);
12681 newstr[len - 1] = 0;
12683 newarg = build_string_literal (len, newstr);
12684 if (fn_puts)
12685 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12687 else
12688 /* We'd like to arrange to call fputs(string,stdout) here,
12689 but we need stdout and don't have a way to get it yet. */
12690 return NULL_TREE;
12694 /* The other optimizations can be done only on the non-va_list variants. */
12695 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12696 return NULL_TREE;
12698 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12699 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12701 if (!arg || !validate_arg (arg, POINTER_TYPE))
12702 return NULL_TREE;
12703 if (fn_puts)
12704 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12707 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12708 else if (strcmp (fmt_str, target_percent_c) == 0)
12710 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12711 return NULL_TREE;
12712 if (fn_putchar)
12713 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12716 if (!call)
12717 return NULL_TREE;
12719 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12722 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12723 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12724 more than 3 arguments, and ARG may be null in the 2-argument case.
12726 Return NULL_TREE if no simplification was possible, otherwise return the
12727 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12728 code of the function to be simplified. */
12730 static tree
12731 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12732 tree fmt, tree arg, bool ignore,
12733 enum built_in_function fcode)
12735 tree fn_fputc, fn_fputs, call = NULL_TREE;
12736 const char *fmt_str = NULL;
12738 /* If the return value is used, don't do the transformation. */
12739 if (! ignore)
12740 return NULL_TREE;
12742 /* Verify the required arguments in the original call. */
12743 if (!validate_arg (fp, POINTER_TYPE))
12744 return NULL_TREE;
12745 if (!validate_arg (fmt, POINTER_TYPE))
12746 return NULL_TREE;
12748 /* Check whether the format is a literal string constant. */
12749 fmt_str = c_getstr (fmt);
12750 if (fmt_str == NULL)
12751 return NULL_TREE;
12753 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12755 /* If we're using an unlocked function, assume the other
12756 unlocked functions exist explicitly. */
12757 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12758 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12760 else
12762 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12763 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12766 if (!init_target_chars ())
12767 return NULL_TREE;
12769 /* If the format doesn't contain % args or %%, use strcpy. */
12770 if (strchr (fmt_str, target_percent) == NULL)
12772 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12773 && arg)
12774 return NULL_TREE;
12776 /* If the format specifier was "", fprintf does nothing. */
12777 if (fmt_str[0] == '\0')
12779 /* If FP has side-effects, just wait until gimplification is
12780 done. */
12781 if (TREE_SIDE_EFFECTS (fp))
12782 return NULL_TREE;
12784 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12787 /* When "string" doesn't contain %, replace all cases of
12788 fprintf (fp, string) with fputs (string, fp). The fputs
12789 builtin will take care of special cases like length == 1. */
12790 if (fn_fputs)
12791 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12794 /* The other optimizations can be done only on the non-va_list variants. */
12795 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12796 return NULL_TREE;
12798 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12799 else if (strcmp (fmt_str, target_percent_s) == 0)
12801 if (!arg || !validate_arg (arg, POINTER_TYPE))
12802 return NULL_TREE;
12803 if (fn_fputs)
12804 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12807 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12808 else if (strcmp (fmt_str, target_percent_c) == 0)
12810 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12811 return NULL_TREE;
12812 if (fn_fputc)
12813 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12816 if (!call)
12817 return NULL_TREE;
12818 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12821 /* Initialize format string characters in the target charset. */
12823 static bool
12824 init_target_chars (void)
12826 static bool init;
12827 if (!init)
12829 target_newline = lang_hooks.to_target_charset ('\n');
12830 target_percent = lang_hooks.to_target_charset ('%');
12831 target_c = lang_hooks.to_target_charset ('c');
12832 target_s = lang_hooks.to_target_charset ('s');
12833 if (target_newline == 0 || target_percent == 0 || target_c == 0
12834 || target_s == 0)
12835 return false;
12837 target_percent_c[0] = target_percent;
12838 target_percent_c[1] = target_c;
12839 target_percent_c[2] = '\0';
12841 target_percent_s[0] = target_percent;
12842 target_percent_s[1] = target_s;
12843 target_percent_s[2] = '\0';
12845 target_percent_s_newline[0] = target_percent;
12846 target_percent_s_newline[1] = target_s;
12847 target_percent_s_newline[2] = target_newline;
12848 target_percent_s_newline[3] = '\0';
12850 init = true;
12852 return true;
12855 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12856 and no overflow/underflow occurred. INEXACT is true if M was not
12857 exactly calculated. TYPE is the tree type for the result. This
12858 function assumes that you cleared the MPFR flags and then
12859 calculated M to see if anything subsequently set a flag prior to
12860 entering this function. Return NULL_TREE if any checks fail. */
12862 static tree
12863 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12865 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12866 overflow/underflow occurred. If -frounding-math, proceed iff the
12867 result of calling FUNC was exact. */
12868 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12869 && (!flag_rounding_math || !inexact))
12871 REAL_VALUE_TYPE rr;
12873 real_from_mpfr (&rr, m, type, GMP_RNDN);
12874 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12875 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12876 but the mpft_t is not, then we underflowed in the
12877 conversion. */
12878 if (real_isfinite (&rr)
12879 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12881 REAL_VALUE_TYPE rmode;
12883 real_convert (&rmode, TYPE_MODE (type), &rr);
12884 /* Proceed iff the specified mode can hold the value. */
12885 if (real_identical (&rmode, &rr))
12886 return build_real (type, rmode);
12889 return NULL_TREE;
12892 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12893 number and no overflow/underflow occurred. INEXACT is true if M
12894 was not exactly calculated. TYPE is the tree type for the result.
12895 This function assumes that you cleared the MPFR flags and then
12896 calculated M to see if anything subsequently set a flag prior to
12897 entering this function. Return NULL_TREE if any checks fail, if
12898 FORCE_CONVERT is true, then bypass the checks. */
12900 static tree
12901 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12903 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12904 overflow/underflow occurred. If -frounding-math, proceed iff the
12905 result of calling FUNC was exact. */
12906 if (force_convert
12907 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12908 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12909 && (!flag_rounding_math || !inexact)))
12911 REAL_VALUE_TYPE re, im;
12913 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12914 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12915 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12916 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12917 but the mpft_t is not, then we underflowed in the
12918 conversion. */
12919 if (force_convert
12920 || (real_isfinite (&re) && real_isfinite (&im)
12921 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12922 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12924 REAL_VALUE_TYPE re_mode, im_mode;
12926 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12927 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12928 /* Proceed iff the specified mode can hold the value. */
12929 if (force_convert
12930 || (real_identical (&re_mode, &re)
12931 && real_identical (&im_mode, &im)))
12932 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12933 build_real (TREE_TYPE (type), im_mode));
12936 return NULL_TREE;
12939 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12940 FUNC on it and return the resulting value as a tree with type TYPE.
12941 If MIN and/or MAX are not NULL, then the supplied ARG must be
12942 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12943 acceptable values, otherwise they are not. The mpfr precision is
12944 set to the precision of TYPE. We assume that function FUNC returns
12945 zero if the result could be calculated exactly within the requested
12946 precision. */
12948 static tree
12949 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12950 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12951 bool inclusive)
12953 tree result = NULL_TREE;
12955 STRIP_NOPS (arg);
12957 /* To proceed, MPFR must exactly represent the target floating point
12958 format, which only happens when the target base equals two. */
12959 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12960 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12962 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12964 if (real_isfinite (ra)
12965 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12966 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12968 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12969 const int prec = fmt->p;
12970 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12971 int inexact;
12972 mpfr_t m;
12974 mpfr_init2 (m, prec);
12975 mpfr_from_real (m, ra, GMP_RNDN);
12976 mpfr_clear_flags ();
12977 inexact = func (m, m, rnd);
12978 result = do_mpfr_ckconv (m, type, inexact);
12979 mpfr_clear (m);
12983 return result;
12986 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12987 FUNC on it and return the resulting value as a tree with type TYPE.
12988 The mpfr precision is set to the precision of TYPE. We assume that
12989 function FUNC returns zero if the result could be calculated
12990 exactly within the requested precision. */
12992 static tree
12993 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12994 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12996 tree result = NULL_TREE;
12998 STRIP_NOPS (arg1);
12999 STRIP_NOPS (arg2);
13001 /* To proceed, MPFR must exactly represent the target floating point
13002 format, which only happens when the target base equals two. */
13003 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13004 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13005 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13007 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13008 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13010 if (real_isfinite (ra1) && real_isfinite (ra2))
13012 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13013 const int prec = fmt->p;
13014 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13015 int inexact;
13016 mpfr_t m1, m2;
13018 mpfr_inits2 (prec, m1, m2, NULL);
13019 mpfr_from_real (m1, ra1, GMP_RNDN);
13020 mpfr_from_real (m2, ra2, GMP_RNDN);
13021 mpfr_clear_flags ();
13022 inexact = func (m1, m1, m2, rnd);
13023 result = do_mpfr_ckconv (m1, type, inexact);
13024 mpfr_clears (m1, m2, NULL);
13028 return result;
13031 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13032 FUNC on it and return the resulting value as a tree with type TYPE.
13033 The mpfr precision is set to the precision of TYPE. We assume that
13034 function FUNC returns zero if the result could be calculated
13035 exactly within the requested precision. */
13037 static tree
13038 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13039 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13041 tree result = NULL_TREE;
13043 STRIP_NOPS (arg1);
13044 STRIP_NOPS (arg2);
13045 STRIP_NOPS (arg3);
13047 /* To proceed, MPFR must exactly represent the target floating point
13048 format, which only happens when the target base equals two. */
13049 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13050 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13051 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13052 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13054 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13055 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13056 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13058 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13060 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13061 const int prec = fmt->p;
13062 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13063 int inexact;
13064 mpfr_t m1, m2, m3;
13066 mpfr_inits2 (prec, m1, m2, m3, NULL);
13067 mpfr_from_real (m1, ra1, GMP_RNDN);
13068 mpfr_from_real (m2, ra2, GMP_RNDN);
13069 mpfr_from_real (m3, ra3, GMP_RNDN);
13070 mpfr_clear_flags ();
13071 inexact = func (m1, m1, m2, m3, rnd);
13072 result = do_mpfr_ckconv (m1, type, inexact);
13073 mpfr_clears (m1, m2, m3, NULL);
13077 return result;
13080 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13081 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13082 If ARG_SINP and ARG_COSP are NULL then the result is returned
13083 as a complex value.
13084 The type is taken from the type of ARG and is used for setting the
13085 precision of the calculation and results. */
13087 static tree
13088 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13090 tree const type = TREE_TYPE (arg);
13091 tree result = NULL_TREE;
13093 STRIP_NOPS (arg);
13095 /* To proceed, MPFR must exactly represent the target floating point
13096 format, which only happens when the target base equals two. */
13097 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13098 && TREE_CODE (arg) == REAL_CST
13099 && !TREE_OVERFLOW (arg))
13101 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13103 if (real_isfinite (ra))
13105 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13106 const int prec = fmt->p;
13107 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13108 tree result_s, result_c;
13109 int inexact;
13110 mpfr_t m, ms, mc;
13112 mpfr_inits2 (prec, m, ms, mc, NULL);
13113 mpfr_from_real (m, ra, GMP_RNDN);
13114 mpfr_clear_flags ();
13115 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13116 result_s = do_mpfr_ckconv (ms, type, inexact);
13117 result_c = do_mpfr_ckconv (mc, type, inexact);
13118 mpfr_clears (m, ms, mc, NULL);
13119 if (result_s && result_c)
13121 /* If we are to return in a complex value do so. */
13122 if (!arg_sinp && !arg_cosp)
13123 return build_complex (build_complex_type (type),
13124 result_c, result_s);
13126 /* Dereference the sin/cos pointer arguments. */
13127 arg_sinp = build_fold_indirect_ref (arg_sinp);
13128 arg_cosp = build_fold_indirect_ref (arg_cosp);
13129 /* Proceed if valid pointer type were passed in. */
13130 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13131 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13133 /* Set the values. */
13134 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13135 result_s);
13136 TREE_SIDE_EFFECTS (result_s) = 1;
13137 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13138 result_c);
13139 TREE_SIDE_EFFECTS (result_c) = 1;
13140 /* Combine the assignments into a compound expr. */
13141 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13142 result_s, result_c));
13147 return result;
13150 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13151 two-argument mpfr order N Bessel function FUNC on them and return
13152 the resulting value as a tree with type TYPE. The mpfr precision
13153 is set to the precision of TYPE. We assume that function FUNC
13154 returns zero if the result could be calculated exactly within the
13155 requested precision. */
13156 static tree
13157 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13158 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13159 const REAL_VALUE_TYPE *min, bool inclusive)
13161 tree result = NULL_TREE;
13163 STRIP_NOPS (arg1);
13164 STRIP_NOPS (arg2);
13166 /* To proceed, MPFR must exactly represent the target floating point
13167 format, which only happens when the target base equals two. */
13168 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13169 && host_integerp (arg1, 0)
13170 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13172 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13173 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13175 if (n == (long)n
13176 && real_isfinite (ra)
13177 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13179 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13180 const int prec = fmt->p;
13181 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13182 int inexact;
13183 mpfr_t m;
13185 mpfr_init2 (m, prec);
13186 mpfr_from_real (m, ra, GMP_RNDN);
13187 mpfr_clear_flags ();
13188 inexact = func (m, n, m, rnd);
13189 result = do_mpfr_ckconv (m, type, inexact);
13190 mpfr_clear (m);
13194 return result;
13197 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13198 the pointer *(ARG_QUO) and return the result. The type is taken
13199 from the type of ARG0 and is used for setting the precision of the
13200 calculation and results. */
13202 static tree
13203 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13205 tree const type = TREE_TYPE (arg0);
13206 tree result = NULL_TREE;
13208 STRIP_NOPS (arg0);
13209 STRIP_NOPS (arg1);
13211 /* To proceed, MPFR must exactly represent the target floating point
13212 format, which only happens when the target base equals two. */
13213 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13214 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13215 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13217 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13218 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13220 if (real_isfinite (ra0) && real_isfinite (ra1))
13222 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13223 const int prec = fmt->p;
13224 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13225 tree result_rem;
13226 long integer_quo;
13227 mpfr_t m0, m1;
13229 mpfr_inits2 (prec, m0, m1, NULL);
13230 mpfr_from_real (m0, ra0, GMP_RNDN);
13231 mpfr_from_real (m1, ra1, GMP_RNDN);
13232 mpfr_clear_flags ();
13233 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13234 /* Remquo is independent of the rounding mode, so pass
13235 inexact=0 to do_mpfr_ckconv(). */
13236 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13237 mpfr_clears (m0, m1, NULL);
13238 if (result_rem)
13240 /* MPFR calculates quo in the host's long so it may
13241 return more bits in quo than the target int can hold
13242 if sizeof(host long) > sizeof(target int). This can
13243 happen even for native compilers in LP64 mode. In
13244 these cases, modulo the quo value with the largest
13245 number that the target int can hold while leaving one
13246 bit for the sign. */
13247 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13248 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13250 /* Dereference the quo pointer argument. */
13251 arg_quo = build_fold_indirect_ref (arg_quo);
13252 /* Proceed iff a valid pointer type was passed in. */
13253 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13255 /* Set the value. */
13256 tree result_quo = fold_build2 (MODIFY_EXPR,
13257 TREE_TYPE (arg_quo), arg_quo,
13258 build_int_cst (NULL, integer_quo));
13259 TREE_SIDE_EFFECTS (result_quo) = 1;
13260 /* Combine the quo assignment with the rem. */
13261 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13262 result_quo, result_rem));
13267 return result;
13270 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13271 resulting value as a tree with type TYPE. The mpfr precision is
13272 set to the precision of TYPE. We assume that this mpfr function
13273 returns zero if the result could be calculated exactly within the
13274 requested precision. In addition, the integer pointer represented
13275 by ARG_SG will be dereferenced and set to the appropriate signgam
13276 (-1,1) value. */
13278 static tree
13279 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13281 tree result = NULL_TREE;
13283 STRIP_NOPS (arg);
13285 /* To proceed, MPFR must exactly represent the target floating point
13286 format, which only happens when the target base equals two. Also
13287 verify ARG is a constant and that ARG_SG is an int pointer. */
13288 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13289 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13290 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13291 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13293 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13295 /* In addition to NaN and Inf, the argument cannot be zero or a
13296 negative integer. */
13297 if (real_isfinite (ra)
13298 && ra->cl != rvc_zero
13299 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13301 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13302 const int prec = fmt->p;
13303 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13304 int inexact, sg;
13305 mpfr_t m;
13306 tree result_lg;
13308 mpfr_init2 (m, prec);
13309 mpfr_from_real (m, ra, GMP_RNDN);
13310 mpfr_clear_flags ();
13311 inexact = mpfr_lgamma (m, &sg, m, rnd);
13312 result_lg = do_mpfr_ckconv (m, type, inexact);
13313 mpfr_clear (m);
13314 if (result_lg)
13316 tree result_sg;
13318 /* Dereference the arg_sg pointer argument. */
13319 arg_sg = build_fold_indirect_ref (arg_sg);
13320 /* Assign the signgam value into *arg_sg. */
13321 result_sg = fold_build2 (MODIFY_EXPR,
13322 TREE_TYPE (arg_sg), arg_sg,
13323 build_int_cst (NULL, sg));
13324 TREE_SIDE_EFFECTS (result_sg) = 1;
13325 /* Combine the signgam assignment with the lgamma result. */
13326 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13327 result_sg, result_lg));
13332 return result;
13335 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13336 function FUNC on it and return the resulting value as a tree with
13337 type TYPE. The mpfr precision is set to the precision of TYPE. We
13338 assume that function FUNC returns zero if the result could be
13339 calculated exactly within the requested precision. */
13341 static tree
13342 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13344 tree result = NULL_TREE;
13346 STRIP_NOPS (arg);
13348 /* To proceed, MPFR must exactly represent the target floating point
13349 format, which only happens when the target base equals two. */
13350 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13351 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13352 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13354 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13355 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13357 if (real_isfinite (re) && real_isfinite (im))
13359 const struct real_format *const fmt =
13360 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13361 const int prec = fmt->p;
13362 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13363 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13364 int inexact;
13365 mpc_t m;
13367 mpc_init2 (m, prec);
13368 mpfr_from_real (mpc_realref(m), re, rnd);
13369 mpfr_from_real (mpc_imagref(m), im, rnd);
13370 mpfr_clear_flags ();
13371 inexact = func (m, m, crnd);
13372 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13373 mpc_clear (m);
13377 return result;
13380 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13381 mpc function FUNC on it and return the resulting value as a tree
13382 with type TYPE. The mpfr precision is set to the precision of
13383 TYPE. We assume that function FUNC returns zero if the result
13384 could be calculated exactly within the requested precision. If
13385 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13386 in the arguments and/or results. */
13388 tree
13389 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13390 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13392 tree result = NULL_TREE;
13394 STRIP_NOPS (arg0);
13395 STRIP_NOPS (arg1);
13397 /* To proceed, MPFR must exactly represent the target floating point
13398 format, which only happens when the target base equals two. */
13399 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13401 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13402 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13403 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13405 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13406 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13407 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13408 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13410 if (do_nonfinite
13411 || (real_isfinite (re0) && real_isfinite (im0)
13412 && real_isfinite (re1) && real_isfinite (im1)))
13414 const struct real_format *const fmt =
13415 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13416 const int prec = fmt->p;
13417 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13418 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13419 int inexact;
13420 mpc_t m0, m1;
13422 mpc_init2 (m0, prec);
13423 mpc_init2 (m1, prec);
13424 mpfr_from_real (mpc_realref(m0), re0, rnd);
13425 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13426 mpfr_from_real (mpc_realref(m1), re1, rnd);
13427 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13428 mpfr_clear_flags ();
13429 inexact = func (m0, m0, m1, crnd);
13430 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13431 mpc_clear (m0);
13432 mpc_clear (m1);
13436 return result;
13439 /* FIXME tuples.
13440 The functions below provide an alternate interface for folding
13441 builtin function calls presented as GIMPLE_CALL statements rather
13442 than as CALL_EXPRs. The folded result is still expressed as a
13443 tree. There is too much code duplication in the handling of
13444 varargs functions, and a more intrusive re-factoring would permit
13445 better sharing of code between the tree and statement-based
13446 versions of these functions. */
13448 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13449 along with N new arguments specified as the "..." parameters. SKIP
13450 is the number of arguments in STMT to be omitted. This function is used
13451 to do varargs-to-varargs transformations. */
13453 static tree
13454 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13456 int oldnargs = gimple_call_num_args (stmt);
13457 int nargs = oldnargs - skip + n;
13458 tree fntype = TREE_TYPE (fndecl);
13459 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13460 tree *buffer;
13461 int i, j;
13462 va_list ap;
13463 location_t loc = gimple_location (stmt);
13465 buffer = XALLOCAVEC (tree, nargs);
13466 va_start (ap, n);
13467 for (i = 0; i < n; i++)
13468 buffer[i] = va_arg (ap, tree);
13469 va_end (ap);
13470 for (j = skip; j < oldnargs; j++, i++)
13471 buffer[i] = gimple_call_arg (stmt, j);
13473 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13476 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13477 a normal call should be emitted rather than expanding the function
13478 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13480 static tree
13481 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13483 tree dest, size, len, fn, fmt, flag;
13484 const char *fmt_str;
13485 int nargs = gimple_call_num_args (stmt);
13487 /* Verify the required arguments in the original call. */
13488 if (nargs < 4)
13489 return NULL_TREE;
13490 dest = gimple_call_arg (stmt, 0);
13491 if (!validate_arg (dest, POINTER_TYPE))
13492 return NULL_TREE;
13493 flag = gimple_call_arg (stmt, 1);
13494 if (!validate_arg (flag, INTEGER_TYPE))
13495 return NULL_TREE;
13496 size = gimple_call_arg (stmt, 2);
13497 if (!validate_arg (size, INTEGER_TYPE))
13498 return NULL_TREE;
13499 fmt = gimple_call_arg (stmt, 3);
13500 if (!validate_arg (fmt, POINTER_TYPE))
13501 return NULL_TREE;
13503 if (! host_integerp (size, 1))
13504 return NULL_TREE;
13506 len = NULL_TREE;
13508 if (!init_target_chars ())
13509 return NULL_TREE;
13511 /* Check whether the format is a literal string constant. */
13512 fmt_str = c_getstr (fmt);
13513 if (fmt_str != NULL)
13515 /* If the format doesn't contain % args or %%, we know the size. */
13516 if (strchr (fmt_str, target_percent) == 0)
13518 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13519 len = build_int_cstu (size_type_node, strlen (fmt_str));
13521 /* If the format is "%s" and first ... argument is a string literal,
13522 we know the size too. */
13523 else if (fcode == BUILT_IN_SPRINTF_CHK
13524 && strcmp (fmt_str, target_percent_s) == 0)
13526 tree arg;
13528 if (nargs == 5)
13530 arg = gimple_call_arg (stmt, 4);
13531 if (validate_arg (arg, POINTER_TYPE))
13533 len = c_strlen (arg, 1);
13534 if (! len || ! host_integerp (len, 1))
13535 len = NULL_TREE;
13541 if (! integer_all_onesp (size))
13543 if (! len || ! tree_int_cst_lt (len, size))
13544 return NULL_TREE;
13547 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13548 or if format doesn't contain % chars or is "%s". */
13549 if (! integer_zerop (flag))
13551 if (fmt_str == NULL)
13552 return NULL_TREE;
13553 if (strchr (fmt_str, target_percent) != NULL
13554 && strcmp (fmt_str, target_percent_s))
13555 return NULL_TREE;
13558 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13559 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13560 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13561 if (!fn)
13562 return NULL_TREE;
13564 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13567 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13568 a normal call should be emitted rather than expanding the function
13569 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13570 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13571 passed as second argument. */
13573 tree
13574 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13575 enum built_in_function fcode)
13577 tree dest, size, len, fn, fmt, flag;
13578 const char *fmt_str;
13580 /* Verify the required arguments in the original call. */
13581 if (gimple_call_num_args (stmt) < 5)
13582 return NULL_TREE;
13583 dest = gimple_call_arg (stmt, 0);
13584 if (!validate_arg (dest, POINTER_TYPE))
13585 return NULL_TREE;
13586 len = gimple_call_arg (stmt, 1);
13587 if (!validate_arg (len, INTEGER_TYPE))
13588 return NULL_TREE;
13589 flag = gimple_call_arg (stmt, 2);
13590 if (!validate_arg (flag, INTEGER_TYPE))
13591 return NULL_TREE;
13592 size = gimple_call_arg (stmt, 3);
13593 if (!validate_arg (size, INTEGER_TYPE))
13594 return NULL_TREE;
13595 fmt = gimple_call_arg (stmt, 4);
13596 if (!validate_arg (fmt, POINTER_TYPE))
13597 return NULL_TREE;
13599 if (! host_integerp (size, 1))
13600 return NULL_TREE;
13602 if (! integer_all_onesp (size))
13604 if (! host_integerp (len, 1))
13606 /* If LEN is not constant, try MAXLEN too.
13607 For MAXLEN only allow optimizing into non-_ocs function
13608 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13609 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13610 return NULL_TREE;
13612 else
13613 maxlen = len;
13615 if (tree_int_cst_lt (size, maxlen))
13616 return NULL_TREE;
13619 if (!init_target_chars ())
13620 return NULL_TREE;
13622 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13623 or if format doesn't contain % chars or is "%s". */
13624 if (! integer_zerop (flag))
13626 fmt_str = c_getstr (fmt);
13627 if (fmt_str == NULL)
13628 return NULL_TREE;
13629 if (strchr (fmt_str, target_percent) != NULL
13630 && strcmp (fmt_str, target_percent_s))
13631 return NULL_TREE;
13634 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13635 available. */
13636 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13637 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13638 if (!fn)
13639 return NULL_TREE;
13641 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13644 /* Builtins with folding operations that operate on "..." arguments
13645 need special handling; we need to store the arguments in a convenient
13646 data structure before attempting any folding. Fortunately there are
13647 only a few builtins that fall into this category. FNDECL is the
13648 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13649 result of the function call is ignored. */
13651 static tree
13652 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13653 bool ignore ATTRIBUTE_UNUSED)
13655 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13656 tree ret = NULL_TREE;
13658 switch (fcode)
13660 case BUILT_IN_SPRINTF_CHK:
13661 case BUILT_IN_VSPRINTF_CHK:
13662 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13663 break;
13665 case BUILT_IN_SNPRINTF_CHK:
13666 case BUILT_IN_VSNPRINTF_CHK:
13667 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13669 default:
13670 break;
13672 if (ret)
13674 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13675 TREE_NO_WARNING (ret) = 1;
13676 return ret;
13678 return NULL_TREE;
13681 /* A wrapper function for builtin folding that prevents warnings for
13682 "statement without effect" and the like, caused by removing the
13683 call node earlier than the warning is generated. */
13685 tree
13686 fold_call_stmt (gimple stmt, bool ignore)
13688 tree ret = NULL_TREE;
13689 tree fndecl = gimple_call_fndecl (stmt);
13690 location_t loc = gimple_location (stmt);
13691 if (fndecl
13692 && TREE_CODE (fndecl) == FUNCTION_DECL
13693 && DECL_BUILT_IN (fndecl)
13694 && !gimple_call_va_arg_pack_p (stmt))
13696 int nargs = gimple_call_num_args (stmt);
13698 if (avoid_folding_inline_builtin (fndecl))
13699 return NULL_TREE;
13700 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13702 return targetm.fold_builtin (fndecl, nargs,
13703 (nargs > 0
13704 ? gimple_call_arg_ptr (stmt, 0)
13705 : &error_mark_node), ignore);
13707 else
13709 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13711 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13712 int i;
13713 for (i = 0; i < nargs; i++)
13714 args[i] = gimple_call_arg (stmt, i);
13715 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13717 if (!ret)
13718 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13719 if (ret)
13721 /* Propagate location information from original call to
13722 expansion of builtin. Otherwise things like
13723 maybe_emit_chk_warning, that operate on the expansion
13724 of a builtin, will use the wrong location information. */
13725 if (gimple_has_location (stmt))
13727 tree realret = ret;
13728 if (TREE_CODE (ret) == NOP_EXPR)
13729 realret = TREE_OPERAND (ret, 0);
13730 if (CAN_HAVE_LOCATION_P (realret)
13731 && !EXPR_HAS_LOCATION (realret))
13732 SET_EXPR_LOCATION (realret, loc);
13733 return realret;
13735 return ret;
13739 return NULL_TREE;
13742 /* Look up the function in built_in_decls that corresponds to DECL
13743 and set ASMSPEC as its user assembler name. DECL must be a
13744 function decl that declares a builtin. */
13746 void
13747 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13749 tree builtin;
13750 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13751 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13752 && asmspec != 0);
13754 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13755 set_user_assembler_name (builtin, asmspec);
13756 switch (DECL_FUNCTION_CODE (decl))
13758 case BUILT_IN_MEMCPY:
13759 init_block_move_fn (asmspec);
13760 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13761 break;
13762 case BUILT_IN_MEMSET:
13763 init_block_clear_fn (asmspec);
13764 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13765 break;
13766 case BUILT_IN_MEMMOVE:
13767 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13768 break;
13769 case BUILT_IN_MEMCMP:
13770 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13771 break;
13772 case BUILT_IN_ABORT:
13773 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13774 break;
13775 case BUILT_IN_FFS:
13776 if (INT_TYPE_SIZE < BITS_PER_WORD)
13778 set_user_assembler_libfunc ("ffs", asmspec);
13779 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13780 MODE_INT, 0), "ffs");
13782 break;
13783 default:
13784 break;